diff --git a/ArtificialAutism/src/main/java/DataLayer/DataMapper.java b/ArtificialAutism/src/main/java/DataLayer/DataMapper.java index 4a8f9109..2d0b091a 100644 --- a/ArtificialAutism/src/main/java/DataLayer/DataMapper.java +++ b/ArtificialAutism/src/main/java/DataLayer/DataMapper.java @@ -5,69 +5,41 @@ */ package DataLayer; -import FunctionLayer.SimilarityMatrix; -import FunctionLayer.CustomError; -import com.google.common.collect.MapMaker; +import org.jetbrains.annotations.NotNull; + import java.sql.Connection; import java.sql.PreparedStatement; import java.sql.ResultSet; import java.sql.SQLException; -import java.sql.Statement; -import java.util.ArrayList; -import java.util.HashMap; -import java.util.LinkedHashMap; -import java.util.List; -import java.util.Map; -import java.util.concurrent.ConcurrentMap; +import java.util.*; import java.util.logging.Level; import java.util.logging.Logger; /** - * * @author install1 */ public class DataMapper { - public static void createTables() throws CustomError { - Connection l_cCon = null; - PreparedStatement l_pStatement = null; - ResultSet l_rsSearch = null; - try { - l_cCon = DBCPDataSource.getConnection(); - String l_sSQL = "CREATE TABLE IF NOT EXISTS `ArtificialAutism`.`Sentences` (`Strings` text NOT NULL)"; - l_pStatement = l_cCon.prepareStatement(l_sSQL); - l_pStatement.execute(); - } catch (SQLException ex) { - throw new CustomError("failed in DataMapper " + ex.getMessage()); - } finally { - CloseConnections(l_pStatement, l_rsSearch, l_cCon); - } - } - - public static ConcurrentMap getAllStrings() throws CustomError { - ConcurrentMap allStrings = new MapMaker().concurrencyLevel(2).makeMap(); + public static ArrayList getAllStrings() throws SQLException { Connection l_cCon = null; PreparedStatement l_pStatement = null; ResultSet l_rsSearch = null; + ArrayList arrayListStr = new ArrayList(); try { l_cCon = DBCPDataSource.getConnection(); String l_sSQL = "SELECT * FROM `Sentences`"; l_pStatement = l_cCon.prepareStatement(l_sSQL); l_rsSearch = l_pStatement.executeQuery(); - int ij = 0; while (l_rsSearch.next()) { - allStrings.put(ij, l_rsSearch.getString(1)); - ij++; + arrayListStr.add(l_rsSearch.getString(1)); } - } catch (SQLException ex) { - throw new CustomError("failed in DataMapper " + ex.getMessage()); } finally { CloseConnections(l_pStatement, l_rsSearch, l_cCon); } - return allStrings; + return arrayListStr; } - public static void InsertMYSQLStrings(ConcurrentMap str) throws CustomError { + public static void InsertMYSQLStrings(ArrayList str) throws SQLException { Connection l_cCon = null; PreparedStatement l_pStatement = null; ResultSet l_rsSearch = null; @@ -75,35 +47,15 @@ public class DataMapper { try { l_cCon = DBCPDataSource.getConnection(); l_pStatement = l_cCon.prepareStatement(l_sSQL); - for (String str1 : str.values()) { - //System.out.println("adding str1: " + str1 + "\n"); + for (String str1 : str) { l_pStatement.setString(1, str1); - l_pStatement.addBatch(); + l_pStatement.execute(); } - l_pStatement.executeBatch(); - } catch (SQLException ex) { - throw new CustomError("failed in DataMapper " + ex.getMessage()); } finally { CloseConnections(l_pStatement, l_rsSearch, l_cCon); } } - public static ConcurrentMap getHLstatsMessages() { - ConcurrentMap hlStatsMessages = new MapMaker().concurrencyLevel(2).makeMap(); - try (Connection l_cCon = DBCPDataSourceHLstats.getConnection()) { - String l_sSQL = "SELECT message FROM `hlstats_Events_Chat`"; - try (PreparedStatement l_pStatement = l_cCon.prepareStatement(l_sSQL)) { - try (ResultSet l_rsSearch = l_pStatement.executeQuery()) { - while (l_rsSearch.next()) { - hlStatsMessages.put(hlStatsMessages.size() + 1, l_rsSearch.getString(1)); - } - } - } - } catch (SQLException ex) { - Logger.getLogger(DataMapper.class.getName()).log(Level.SEVERE, null, ex); - } - return hlStatsMessages; - } public static void CloseConnections(PreparedStatement ps, ResultSet rs, Connection con) { if (rs != null) { @@ -128,4 +80,41 @@ public class DataMapper { } } } + + public static void checkStringsToDelete() { + Connection l_cCon = null; + PreparedStatement l_pStatement = null; + ResultSet l_rsSearch = null; + String l_sSQL = "delete from Sentences\n" + + " where DATE(last_used) < DATE_SUB(CURDATE(), INTERVAL 32 DAY)\n" + + " order by last_used asc limit 3"; + try { + l_cCon = DBCPDataSource.getConnection(); + l_pStatement = l_cCon.prepareStatement(l_sSQL); + l_pStatement.execute(); + } catch (SQLException throwables) { + throwables.printStackTrace(); + } finally { + CloseConnections(l_pStatement, l_rsSearch, l_cCon); + } + } + + public static void updateLastUsed(@NotNull ArrayList mysqlUpdateLastUsed) { + Connection l_cCon = null; + PreparedStatement l_pStatement = null; + ResultSet l_rsSearch = null; + String l_sSQL = "update Sentences Set last_used = now() where Strings = (?)"; + try { + l_cCon = DBCPDataSource.getConnection(); + l_pStatement = l_cCon.prepareStatement(l_sSQL); + for (String str1 : mysqlUpdateLastUsed) { + l_pStatement.setString(1, str1); + l_pStatement.execute(); + } + } catch (SQLException throwables) { + throwables.printStackTrace(); + } finally { + CloseConnections(l_pStatement, l_rsSearch, l_cCon); + } + } } diff --git a/ArtificialAutism/src/main/java/FunctionLayer/CustomError.java b/ArtificialAutism/src/main/java/FunctionLayer/CustomError.java deleted file mode 100644 index a7988d55..00000000 --- a/ArtificialAutism/src/main/java/FunctionLayer/CustomError.java +++ /dev/null @@ -1,17 +0,0 @@ -/* - * To change this license header, choose License Headers in Project Properties. - * To change this template file, choose Tools | Templates - * and open the template in the editor. - */ -package FunctionLayer; - -/** - * - * @author install1 - */ -public class CustomError extends Exception { - - public CustomError(String msg) { - super(msg); - } -} diff --git a/ArtificialAutism/src/main/java/FunctionLayer/Datahandler.java b/ArtificialAutism/src/main/java/FunctionLayer/Datahandler.java deleted file mode 100644 index bb61c948..00000000 --- a/ArtificialAutism/src/main/java/FunctionLayer/Datahandler.java +++ /dev/null @@ -1,825 +0,0 @@ -/* - * To change this license header, choose License Headers in Project Properties. - * To change this template file, choose Tools | Templates - * and open the template in the editor. - */ -package FunctionLayer; - -import DataLayer.DataMapper; -import FunctionLayer.StanfordParser.SentimentAnalyzerTest; -import FunctionLayer.StanfordParser.SentimentValueCache; -import com.google.common.base.Stopwatch; -import com.google.common.collect.MapMaker; -import edu.stanford.nlp.ie.AbstractSequenceClassifier; -import edu.stanford.nlp.ie.crf.CRFClassifier; -import edu.stanford.nlp.ling.CoreLabel; -import edu.stanford.nlp.parser.lexparser.LexicalizedParser; -import edu.stanford.nlp.pipeline.Annotation; -import edu.stanford.nlp.pipeline.CoreDocument; -import edu.stanford.nlp.pipeline.StanfordCoreNLP; -import edu.stanford.nlp.tagger.maxent.MaxentTagger; -import edu.stanford.nlp.trees.GrammaticalStructureFactory; -import edu.stanford.nlp.trees.TreebankLanguagePack; -import java.io.IOException; -import java.io.UnsupportedEncodingException; -import static java.lang.Math.random; -import java.net.DatagramPacket; -import java.net.DatagramSocket; -import java.net.InetAddress; -import java.net.SocketException; -import java.sql.SQLException; -import java.util.AbstractMap; -import java.util.ArrayList; -import java.util.Collection; -import java.util.Collections; -import java.util.LinkedHashMap; -import java.util.List; -import java.util.ListIterator; -import java.util.Map; -import java.util.Map.Entry; -import java.util.Properties; -import java.util.Set; -import java.util.concurrent.Callable; -import java.util.concurrent.CompletionService; -import java.util.concurrent.ConcurrentMap; -import java.util.concurrent.CountDownLatch; -import java.util.concurrent.ExecutionException; -import java.util.concurrent.ExecutorCompletionService; -import java.util.concurrent.ExecutorService; -import java.util.concurrent.Executors; -import java.util.concurrent.ForkJoinPool; -import java.util.concurrent.ForkJoinTask; -import java.util.concurrent.Future; -import java.util.concurrent.ThreadLocalRandom; -import java.util.concurrent.TimeUnit; -import java.util.concurrent.TimeoutException; -import java.util.function.Consumer; -import java.util.logging.Level; -import java.util.logging.Logger; -import java.util.stream.Collectors; -import java.util.stream.Stream; - -/** - * - * @author install1 - */ -public class Datahandler { - - public static final long EXPIRE_TIME_IN_SECONDS = TimeUnit.SECONDS.convert(10, TimeUnit.MINUTES); - public static final long EXPIRE_TIME_IN_SECONDS1 = TimeUnit.SECONDS.convert(10, TimeUnit.HOURS); - public static Datahandler instance = new Datahandler(); - private static Annotation strAnno; - private static Annotation strAnnoSentiment; - private static Annotation strAnnoJMWE; - private static CoreDocument coreDoc; - private static final ConcurrentMap stringCache = new MapMaker().concurrencyLevel(6).makeMap(); - private static ConcurrentMap pipelineAnnotationCache; - private static ConcurrentMap pipelineSentimentAnnotationCache; - private static ConcurrentMap jmweAnnotationCache; - private static ConcurrentMap coreDocumentAnnotationCache; - private static ConcurrentMap sentimentCachingMap = new MapMaker().concurrencyLevel(6).makeMap(); - private LinkedHashMap> lHMSMX = new LinkedHashMap(); - private final Stopwatch stopwatch; - private static String similar = ""; - private static String shiftReduceParserPath = "edu/stanford/nlp/models/srparser/englishSR.ser.gz"; - private static String sentimentModel = "edu/stanford/nlp/models/sentiment/sentiment.ser.gz"; - private static String lexParserEnglishRNN = "edu/stanford/nlp/models/lexparser/englishRNN.ser.gz"; - private static String taggerPath = "edu/stanford/nlp/models/pos-tagger/english-left3words/english-left3words-distsim.tagger"; - private static String nerModel = "edu/stanford/nlp/models/ner/english.all.3class.caseless.distsim.crf.ser.gz"; - private static String nerModel2 = "edu/stanford/nlp/models/ner/english.conll.4class.caseless.distsim.crf.ser.gz"; - private static String nerModel3 = "edu/stanford/nlp/models/ner/english.muc.7class.caseless.distsim.crf.ser.gz"; - private static final String customStopWordList = "start,starts,period,periods,a,an,and,are,as,at,be,but,by,for,if,in,into,is,it,no,not,of,on,or,such,that,the,their,then,there,these,they,this,to,was,will,with"; - private static MaxentTagger tagger; - private static String[] options = {"-maxLength", "100"}; - private static Properties props = new Properties(); - private static Properties propsSentiment = new Properties(); - private static GrammaticalStructureFactory gsf; - private static LexicalizedParser lp; - private static TreebankLanguagePack tlp; - private static AbstractSequenceClassifier classifier; - // set up Stanford CoreNLP pipeline - private static final StanfordCoreNLP pipeline = getPipeLineSetUp(); - private static StanfordCoreNLP pipelineSentiment; - - public Datahandler() { - this.stopwatch = Stopwatch.createUnstarted(); - this.jmweAnnotationCache = new MapMaker().concurrencyLevel(3).makeMap(); - this.pipelineAnnotationCache = new MapMaker().concurrencyLevel(4).makeMap(); - this.pipelineSentimentAnnotationCache = new MapMaker().concurrencyLevel(4).makeMap(); - this.coreDocumentAnnotationCache = new MapMaker().concurrencyLevel(5).makeMap(); - } - - public static StanfordCoreNLP getPipeline() { - return pipeline; - } - - private static StanfordCoreNLP getPipeLineSetUp() { - props.setProperty("annotators", "tokenize,ssplit,pos,lemma,ner,parse"); - props.setProperty("parse.model", shiftReduceParserPath); - props.setProperty("parse.maxlen", "90"); - props.setProperty("parse.binaryTrees", "true"); - props.setProperty("threads", "8"); - props.setProperty("pos.maxlen", "90"); - props.setProperty("tokenize.maxlen", "90"); - props.setProperty("ssplit.maxlen", "90"); - props.setProperty("lemma.maxlen", "90"); - props.setProperty("ner.model", nerModel + "," + nerModel2 + "," + nerModel3); - props.setProperty("ner.combinationMode", "HIGH_RECALL"); - props.setProperty("regexner.ignorecase", "true"); - props.setProperty("ner.fine.regexner.ignorecase", "true"); - props.setProperty("tokenize.options", "untokenizable=firstDelete"); - return new StanfordCoreNLP(props); - } - - public void shiftReduceParserInitiate() { - //got 8 cores - CountDownLatch cdl = new CountDownLatch(2); - new Thread(() -> { - try { - classifier = CRFClassifier.getClassifierNoExceptions(nerModel); - } catch (ClassCastException ex) { - Logger.getLogger(Datahandler.class.getName()).log(Level.SEVERE, null, ex); - } - cdl.countDown(); - }).start(); - new Thread(() -> { - propsSentiment.setProperty("parse.model", lexParserEnglishRNN); - propsSentiment.setProperty("sentiment.model", sentimentModel); - propsSentiment.setProperty("parse.maxlen", "90"); - propsSentiment.setProperty("threads", "8"); - propsSentiment.setProperty("pos.maxlen", "90"); - propsSentiment.setProperty("tokenize.maxlen", "90"); - propsSentiment.setProperty("ssplit.maxlen", "90"); - propsSentiment.setProperty("annotators", "tokenize,ssplit,pos,parse,sentiment,lemma,stopword"); //coref too expensive memorywise - propsSentiment.setProperty("customAnnotatorClass.stopword", "FunctionLayer.StopwordAnnotator"); - propsSentiment.setProperty(StopwordAnnotator.STOPWORDS_LIST, customStopWordList); - propsSentiment.setProperty("tokenize.options", "untokenizable=firstDelete"); - pipelineSentiment = new StanfordCoreNLP(propsSentiment); - tagger = new MaxentTagger(taggerPath); - cdl.countDown(); - }).start(); - lp = LexicalizedParser.loadModel(lexParserEnglishRNN, options); - tlp = lp.getOp().langpack(); - gsf = tlp.grammaticalStructureFactory(); - try { - cdl.await(); - } catch (InterruptedException ex) { - //System.out.println("cdl await interrupted: " + ex.getLocalizedMessage() + "\n"); - } - System.out.println("finished shiftReduceParserInitiate\n"); - } - - public static AbstractSequenceClassifier getClassifier() { - return classifier; - } - - public static void setClassifier(AbstractSequenceClassifier classifier) { - Datahandler.classifier = classifier; - } - - public void updateStringCache() { - try { - checkIfUpdateStrings(); - } catch (CustomError ex) { - Logger.getLogger(Datahandler.class.getName()).log(Level.SEVERE, null, ex); - } - } - - public static GrammaticalStructureFactory getGsf() { - return gsf; - } - - public static MaxentTagger getTagger() { - return tagger; - } - - private Map getCache() throws SQLException, IOException, CustomError { - return DataMapper.getAllStrings(); - } - - public int getlHMSMXSize() { - return lHMSMX.size(); - } - - public int getstringCacheSize() { - return stringCache.size(); - } - - public void initiateMYSQL() throws SQLException, IOException { - try { - DataMapper.createTables(); - stringCache.putAll(getCache()); - // lHMSMX = DataMapper.getAllRelationScores(); - } catch (CustomError ex) { - Logger.getLogger(Datahandler.class - .getName()).log(Level.SEVERE, null, ex); - } - } - - public void addHLstatsMessages() { - ConcurrentMap hlStatsMessages = new MapMaker().concurrencyLevel(2).makeMap(); - ConcurrentMap strCacheLocal = stringCache; - Collection strs = DataMapper.getHLstatsMessages().values(); - for (String str : strs) { - if (hlStatsMessages.get(str) == null) { - hlStatsMessages.put(str, hlStatsMessages.size()); - } - } - int capacity = 150; - hlStatsMessages.keySet().forEach(str -> { - if (!str.startsWith("!") && MessageResponseHandler.getStr().values().size() < capacity) { - String orElse = strCacheLocal.values().parallelStream().filter(e -> e.equals(str)).findAny().orElse(null); - if (orElse == null) { - MessageResponseHandler.getMessage(str); - } - } - }); - } - - public void instantiateAnnotationMapJMWE() { - if (!stringCache.isEmpty()) { - ConcurrentMap jmweAnnotation = PipelineJMWESingleton.INSTANCE.getJMWEAnnotation(stringCache.values()); - for (Entry entries : jmweAnnotation.entrySet()) { - jmweAnnotationCache.put(entries.getKey(), entries.getValue()); - } - } - } - - public void instantiateAnnotationMap() { - if (!stringCache.isEmpty()) { - ConcurrentMap Annotationspipeline = new MapMaker().concurrencyLevel(2).makeMap(); - ConcurrentMap AnnotationspipelineSentiment = new MapMaker().concurrencyLevel(2).makeMap(); - stringCache.values().parallelStream().forEach(str -> { - Annotation strAnno = new Annotation(str); - strAnno.compact(); - Annotationspipeline.put(str, strAnno); - Annotation strAnno2 = new Annotation(str); - strAnno2.compact(); - AnnotationspipelineSentiment.put(str, strAnno2); - }); - ConcurrentMap coreDocumentpipelineMap = getMultipleCoreDocumentsWaySuggestion(stringCache.values(), pipeline); - pipeline.annotate(Annotationspipeline.values()); - pipelineSentiment.annotate(AnnotationspipelineSentiment.values()); - Annotationspipeline.entrySet().forEach(pipelineEntry -> { - //relatively experimental change - pipelineEntry.getValue().compact(); - pipelineAnnotationCache.put(pipelineEntry.getKey(), pipelineEntry.getValue()); - }); - AnnotationspipelineSentiment.entrySet().forEach(pipelineEntry -> { - pipelineEntry.getValue().compact(); - pipelineSentimentAnnotationCache.put(pipelineEntry.getKey(), pipelineEntry.getValue()); - }); - coreDocumentpipelineMap.entrySet().stream().forEach(CD -> { - coreDocumentAnnotationCache.put(CD.getKey(), CD.getValue()); - }); - } - } - - private ConcurrentMap futuresReturnOverallEvaluation(List similarityMatrixes) { - ConcurrentMap strmapreturn = new MapMaker().concurrencyLevel(6).makeMap(); - if (!similarityMatrixes.isEmpty()) { - int iterator = 0; - for (SimilarityMatrix SMX : similarityMatrixes) { - final Double scoreRelationNewMsgToRecentMsg = SMX.getDistance(); - if (scoreRelationNewMsgToRecentMsg > 0.0) { - strmapreturn = addSMXToMapReturn(strmapreturn, SMX); - } - //System.out.println("scoreRelationNewMsgToRecentMsg: " + scoreRelationNewMsgToRecentMsg + "\niterator: " + iterator); - iterator++; - } - } - return strmapreturn; - } - - private ConcurrentMap addSMXToMapReturn(ConcurrentMap strmapreturn, SimilarityMatrix SMX) { - if (!strmapreturn.containsValue(SMX.getPrimaryString())) { - strmapreturn.put(strmapreturn.size(), SMX.getPrimaryString()); - String transmittedStr = SMX.getSecondaryString(); - SentimentValueCache cacheValue1 = SMX.getCacheValue1(); - SentimentValueCache cacheValue2 = SMX.getCacheValue2(); - if (cacheValue1 != null && !sentimentCachingMap.keySet().contains(SMX.getPrimaryString())) { - sentimentCachingMap.put(SMX.getSecondaryString(), SMX.getCacheValue1()); - } - if (cacheValue2 != null && !sentimentCachingMap.keySet().contains(transmittedStr)) { - sentimentCachingMap.put(transmittedStr, SMX.getCacheValue2()); - } - } - return strmapreturn; - } - - private List StrComparringNoSentenceRelationMap( - ConcurrentMap strCacheLocal, Collection strCollection, ConcurrentMap localJMWEMap, - ConcurrentMap localPipelineAnnotation, ConcurrentMap localPipelineSentimentAnnotation, - ConcurrentMap localCoreDocumentMap) { - ExecutorService threadPool = Executors.newCachedThreadPool(); - CompletionService ecs = new ExecutorCompletionService<>(threadPool); - int index = 0; - int prefix_size = 150; - SentimentValueCache sentimentCacheStr = sentimentCachingMap.getOrDefault(strCollection, null); - List smxReturnList = new ArrayList(); - if (strCacheLocal.size() < prefix_size) - { - for (String colStr : strCollection) - { - strCacheLocal.put(strCacheLocal.size(), colStr); - } - } - - - - for (String str1 : strCollection) { - for (String str : strCollection) { - if (!str.equals(str1)) { - SimilarityMatrix SMXInit = new SimilarityMatrix(str, str1); - SentimentValueCache sentimentCacheStr1 = sentimentCachingMap.getOrDefault(str1, null); - Callable worker; - if (stringCache.size() < prefix_size) { - worker = new SentimentAnalyzerTest(str, str1, SMXInit, - localJMWEMap.get(str), localJMWEMap.get(str1), localPipelineAnnotation.get(str), - localPipelineAnnotation.get(str1), localPipelineSentimentAnnotation.get(str), - localPipelineSentimentAnnotation.get(str1), localCoreDocumentMap.get(str), localCoreDocumentMap.get(str1), sentimentCacheStr, sentimentCacheStr1); - } else { - worker = new SentimentAnalyzerTest(str, str1, SMXInit, - localJMWEMap.get(str), jmweAnnotationCache.get(str1), localPipelineAnnotation.get(str), - pipelineAnnotationCache.get(str1), localPipelineSentimentAnnotation.get(str), - pipelineSentimentAnnotationCache.get(str1), localCoreDocumentMap.get(str), coreDocumentAnnotationCache.get(str1), sentimentCacheStr, sentimentCacheStr1); - } - ecs.submit(worker); - index++; - if (index % 1000 == 0 && index > 0) { - for (int i = 0; i < index; i++) { - try { - Future take = ecs.take(); - SimilarityMatrix smx = take.get(); - if (smx != null) { - smxReturnList.add(smx); - } - } catch (InterruptedException | ExecutionException ex) { - // - } - } - index = 0; - //System.out.println("smxReturnList size iterating ECS.take(): " + smxReturnList.size()); - } - } - } - } - double distance_requirement = 15500.0; - for (int i = 0; i < index; i++) { - try { - Future take = ecs.take(); - SimilarityMatrix smx = take.get(); - - if (smx != null && smx.getDistance() > distance_requirement) { - smxReturnList.add(smx); - } - } catch (InterruptedException | ExecutionException ex) { - // - } - } - //System.out.println("smxReturnList size: " + smxReturnList.size()); - threadPool.shutdown(); - return smxReturnList; - } - - private ConcurrentMap stringIteratorComparator(ConcurrentMap strmap, - ConcurrentMap strCacheLocal, ConcurrentMap localJMWEMap, - ConcurrentMap localPipelineAnnotation, ConcurrentMap localPipelineSentimentAnnotation, - ConcurrentMap localCoreDocumentMap) { - //System.out.println("strmap siuze: " + strmap.size()); - List StrComparringNoSentenceRelationMap = StrComparringNoSentenceRelationMap(strCacheLocal, strmap.values(), - localJMWEMap, localPipelineAnnotation, localPipelineSentimentAnnotation, localCoreDocumentMap); - Collections.sort(StrComparringNoSentenceRelationMap, (e1, e2) -> e1.getPrimaryString().compareTo(e2.getPrimaryString())); - ConcurrentMap strmapreturn = futuresReturnOverallEvaluation(StrComparringNoSentenceRelationMap); - //System.out.println("strmapreturn size: " + strmapreturn.size()); - return strmapreturn; - } - - private ConcurrentMap removeNonSensicalStrings(ConcurrentMap strmap) { - final ConcurrentMap strCacheLocal = stringCache; - final ConcurrentMap localJMWEMap = getMultipleJMWEAnnotation(strmap.values()); - final ConcurrentMap localPipelineAnnotation = getMultiplePipelineAnnotation(strmap.values()); - final ConcurrentMap localPipelineSentimentAnnotation = getMultiplePipelineSentimentAnnotation(strmap.values()); - final ConcurrentMap localCoreDocumentMap = getMultipleCoreDocumentsWaySuggestion(strmap.values(), pipeline); - return stringIteratorComparator(strmap, strCacheLocal, localJMWEMap, localPipelineAnnotation, localPipelineSentimentAnnotation, localCoreDocumentMap); - } - - public synchronized void checkIfUpdateStrings() throws CustomError { - if (stopwatch.elapsed(TimeUnit.SECONDS) >= EXPIRE_TIME_IN_SECONDS || !stopwatch.isRunning()) { - ConcurrentMap str = MessageResponseHandler.getStr(); - System.out.println("str size: " + str.size()); - str = filterContent(str); - str = removeNonSensicalStrings(str); - //System.out.println("removeNonSensicalStrings str size POST: " + str.size() + "\n"); - str = annotationCacheUpdate(str); - System.out.println("annotationCacheUpdate str size POST: " + str.size() + "\n"); - ConcurrentMap strf = str; - if (!stringCache.isEmpty()) { - new Thread(() -> { - try { - DataMapper.InsertMYSQLStrings(strf); - } catch (CustomError ex) { - Logger.getLogger(Datahandler.class - .getName()).log(Level.SEVERE, null, ex); - } - MessageResponseHandler.setStr(new MapMaker().concurrencyLevel(6).makeMap()); - }).start(); - } else { - try { - DataMapper.InsertMYSQLStrings(strf); - } catch (CustomError ex) { - Logger.getLogger(Datahandler.class - .getName()).log(Level.SEVERE, null, ex); - } - MessageResponseHandler.setStr(new MapMaker().concurrencyLevel(2).makeMap()); - } - if (!stopwatch.isRunning()) { - stopwatch.start(); - } else { - stopwatch.reset(); - } - } - } - - private String trimString(String str) { - str = str.trim(); - if (str.startsWith("<@")) { - str = str.substring(str.indexOf("> ") + 2); - } - return str; - } - - private String getResponseFutures(String strF) { - List values_copy = new ArrayList(stringCache.values()); - Collections.shuffle(values_copy); - double preRelationUserCounters = -155000.0; - List concurrentRelations = new ArrayList(); - for (String str1 : values_copy) { - if (!strF.equals(str1)) { - SentimentValueCache sentimentCacheStr1 = sentimentCachingMap.getOrDefault(str1, null); - Callable worker = new SentimentAnalyzerTest(strF, str1, new SimilarityMatrix(strF, str1), - strAnnoJMWE, jmweAnnotationCache.get(str1), strAnno, - pipelineAnnotationCache.get(str1), strAnnoSentiment, - pipelineSentimentAnnotationCache.get(str1), coreDoc, coreDocumentAnnotationCache.get(str1), null, sentimentCacheStr1); - try { - SimilarityMatrix getSMX = worker.call(); - if (getSMX != null) { - Double scoreRelationLastUserMsg = getSMX.getDistance(); - if (scoreRelationLastUserMsg > preRelationUserCounters) { - preRelationUserCounters = scoreRelationLastUserMsg; - concurrentRelations.add(getSMX.getSecondaryString()); - //System.out.println("secondary: " + getSMX.getSecondaryString() + "\nDistance: " + getSMX.getDistance() + "\n"); - //System.out.println("SUCESS concurrentRelationsMap size: " + concurrentRelations.size() + "\n"); - } - } - } catch (Exception ex) { - Logger.getLogger(Datahandler.class.getName()).log(Level.SEVERE, null, ex); - } - } - } - - StringBuilder SB = new StringBuilder(); - double randomLenghtPermit = strF.length() * ((Math.random() * Math.random() * Math.random()) * 5); - Collections.reverse(concurrentRelations); - if (concurrentRelations.isEmpty()) { - return "failure, preventing stuckness"; - } - String firstRelation = concurrentRelations.get(0); - for (String secondaryRelation : concurrentRelations) { - if (SB.toString().length() > randomLenghtPermit && !SB.toString().isEmpty()) { - break; - } - boolean append = appendToString(firstRelation, secondaryRelation); - if (append) { - SB.append(secondaryRelation).append(" "); - } - } - return SB.toString(); - } - - private boolean appendToString(String firstRelation, String secondaryRelation) { - if (firstRelation.equals(secondaryRelation)) { - return true; - } - Double scoreRelationStrF = getScoreRelationStrF(firstRelation, secondaryRelation); - if (scoreRelationStrF > 1900) { - return true; - } - return false; - } - - public String getResponseMsg(String str) throws CustomError { - String strF = trimString(str); - getSingularAnnotation(strF); - return getResponseFutures(strF); - } - - public void getSingularAnnotation(String str) { - strAnno = new Annotation(str); - strAnno.compact(); - pipeline.annotate(strAnno); - strAnnoSentiment = new Annotation(str); - strAnnoSentiment.compact(); - pipelineSentiment.annotate(strAnnoSentiment); - List notactualList = new ArrayList(); - notactualList.add(str); - ConcurrentMap jmweAnnotation = PipelineJMWESingleton.INSTANCE.getJMWEAnnotation(notactualList); - strAnnoJMWE = jmweAnnotation.values().iterator().next(); - strAnnoJMWE.compact(); - CoreDocument coreDocument = new CoreDocument(str); - pipeline.annotate(coreDocument); - coreDoc = coreDocument; - } - - private static ConcurrentMap getMultipleJMWEAnnotation(Collection str) { - ConcurrentMap jmweAnnotation = PipelineJMWESingleton.INSTANCE.getJMWEAnnotation(str); - return jmweAnnotation; - } - - private static ConcurrentMap getMultiplePipelineAnnotation(Collection str) { - ConcurrentMap pipelineAnnotationMap = new MapMaker().concurrencyLevel(2).makeMap(); - for (String str1 : str) { - Annotation strAnno1 = new Annotation(str1); - pipelineAnnotationMap.put(str1, strAnno1); - } - pipeline.annotate(pipelineAnnotationMap.values()); - return pipelineAnnotationMap; - } - - private static ConcurrentMap getMultiplePipelineSentimentAnnotation(Collection str) { - ConcurrentMap pipelineAnnotationMap = new MapMaker().concurrencyLevel(2).makeMap(); - for (String str1 : str) { - Annotation strAnno1 = new Annotation(str1); - pipelineAnnotationMap.put(str1, strAnno1); - } - pipelineSentiment.annotate(pipelineAnnotationMap.values()); - return pipelineAnnotationMap; - } - - private Double getScoreRelationNewMsgToRecentMsg(String str, String mostRecentMsg) { - SimilarityMatrix SMX = new SimilarityMatrix(str, mostRecentMsg); - SentimentValueCache cacheSentiment1 = sentimentCachingMap.getOrDefault(str, null); - SentimentValueCache cacheSentiment2 = sentimentCachingMap.getOrDefault(mostRecentMsg, null); - Callable worker = new SentimentAnalyzerTest(str, mostRecentMsg, SMX, - jmweAnnotationCache.get(str), jmweAnnotationCache.get(mostRecentMsg), pipelineAnnotationCache.get(str), - pipelineAnnotationCache.get(mostRecentMsg), pipelineSentimentAnnotationCache.get(str), - pipelineSentimentAnnotationCache.get(mostRecentMsg), coreDocumentAnnotationCache.get(str), - coreDocumentAnnotationCache.get(mostRecentMsg), cacheSentiment1, cacheSentiment2); - SimilarityMatrix callSMX = null; - try { - callSMX = worker.call(); - } catch (Exception ex) { - Logger.getLogger(Datahandler.class - .getName()).log(Level.SEVERE, null, ex); - } - if (callSMX != null) { - double smxDistance = callSMX.getDistance(); - return smxDistance; - } - return 0.0; - } - - private Double getScoreRelationStrF(String str, String mostRecentMsg) { - SimilarityMatrix SMX = new SimilarityMatrix(str, mostRecentMsg); - SentimentValueCache cacheSentiment1 = sentimentCachingMap.getOrDefault(str, null); - SentimentValueCache cacheSentiment2 = sentimentCachingMap.getOrDefault(mostRecentMsg, null); - Callable worker = new SentimentAnalyzerTest(str, mostRecentMsg, SMX, - strAnnoJMWE, jmweAnnotationCache.get(mostRecentMsg), strAnno, - pipelineAnnotationCache.get(mostRecentMsg), strAnnoSentiment, - pipelineSentimentAnnotationCache.get(mostRecentMsg), coreDoc, coreDocumentAnnotationCache.get(mostRecentMsg), cacheSentiment1, cacheSentiment2); - SimilarityMatrix callSMX = null; - try { - callSMX = worker.call(); - } catch (Exception ex) { - Logger.getLogger(Datahandler.class - .getName()).log(Level.SEVERE, null, ex); - } - if (callSMX != null) { - double smxDistance = callSMX.getDistance(); - return smxDistance; - } - return 0.0; - } - - public static ConcurrentMap filterContent(ConcurrentMap str) { - ConcurrentMap strlistreturn = new MapMaker().concurrencyLevel(2).makeMap(); - str.values().forEach(str1 -> { - if (!str1.isEmpty() && str1.length() > 3) { - str1 = str1.trim(); - if (str1.contains("PM*")) { - str1 = str1.substring(str1.indexOf("PM*") + 3); - } - if (str1.contains("AM*")) { - str1 = str1.substring(str1.indexOf("AM*") + 3); - } - /* - if (str1.contains("?") || str1.contains("°")) - { - if (!str1.contains("http")) - { - str1 = str1.replace("?", " <:wlenny:514861023002624001> "); - str1 = str1.replace("°", " <:wlenny:514861023002624001> "); - } - } - */ - if (str1.contains("(Counter-Terrorist)")) { - str1 = str1.replace("(Counter-Terrorist)", " "); - } - if (str1.contains("(Terrorist)")) { - str1 = str1.replace("(Terrorist)", " "); - } - if (str1.contains("(Spectator)")) { - str1 = str1.replace("(Spectator)", " "); - } - if (str1.contains("*DEAD*")) { - str1 = str1.replace("*DEAD*", " "); - } - if (str1.contains("{red}")) { - str1 = str1.replace("{red}", " "); - } - if (str1.contains("{orange}")) { - str1 = str1.replace("{orange}", " "); - } - if (str1.contains("{yellow}")) { - str1 = str1.replace("{yellow}", " "); - } - if (str1.contains("{green}")) { - str1 = str1.replace("{green}", " "); - } - if (str1.contains("{lightblue}")) { - str1 = str1.replace("{lightblue}", " "); - } - if (str1.contains("{blue}")) { - str1 = str1.replace("{blue}", " "); - } - if (str1.contains("{purple}")) { - str1 = str1.replace("{purple}", " "); - } - if (str1.contains("{white}")) { - str1 = str1.replace("{white}", " "); - } - if (str1.contains("{fullblue}")) { - str1 = str1.replace("{fullblue}", " "); - } - if (str1.contains("{cyan}")) { - str1 = str1.replace("{cyan}", " "); - } - if (str1.contains("{lime}")) { - str1 = str1.replace("{lime}", " "); - } - if (str1.contains("{deeppink}")) { - str1 = str1.replace("{deeppink}", " "); - } - if (str1.contains("{slategray}")) { - str1 = str1.replace("{slategray}", " "); - } - if (str1.contains("{dodgerblue}")) { - str1 = str1.replace("{dodgerblue}", " "); - } - if (str1.contains("{black}")) { - str1 = str1.replace("{black}", " "); - } - if (str1.contains("{orangered}")) { - str1 = str1.replace("{orangered}", " "); - } - if (str1.contains("{darkorchid}")) { - str1 = str1.replace("{darkorchid}", " "); - } - if (str1.contains("{pink}")) { - str1 = str1.replace("{pink}", " "); - } - if (str1.contains("{lightyellow}")) { - str1 = str1.replace("{lightyellow}", " "); - } - if (str1.contains("{chocolate}")) { - str1 = str1.replace("{chocolate}", " "); - } - if (str1.contains("{beige}")) { - str1 = str1.replace("{beige}", " "); - } - if (str1.contains("{azure}")) { - str1 = str1.replace("{azure}", " "); - } - if (str1.contains("{yellowgreen}")) { - str1 = str1.replace("{yellowgreen}", " "); - } - str1 = str1.trim(); - if (str1.length() > 2 && (!str1.startsWith("!"))) { - strlistreturn.put(strlistreturn.size(), str1); - } - } - }); - return strlistreturn; - } - - private ConcurrentMap annotationCacheUpdate(ConcurrentMap strmap) { - ConcurrentMap jmweAnnotation = PipelineJMWESingleton.INSTANCE.getJMWEAnnotation(strmap.values()); - for (Entry jmweitr : jmweAnnotation.entrySet()) { - jmweAnnotationCache.put(jmweitr.getKey(), jmweitr.getValue()); - } - ConcurrentMap Annotationspipeline = new MapMaker().concurrencyLevel(4).makeMap(); - ConcurrentMap AnnotationspipelineSentiment = new MapMaker().concurrencyLevel(4).makeMap(); - ConcurrentMap coreDocumentpipelineMap = getMultipleCoreDocumentsWaySuggestion(strmap.values(), pipeline); - strmap.values().forEach(str -> { - Annotation strAnno1 = new Annotation(str); - Annotationspipeline.put(str, strAnno1); - Annotation strAnno2 = new Annotation(str); - AnnotationspipelineSentiment.put(str, strAnno2); - stringCache.put(stringCache.size() + 1, str); - }); - pipeline.annotate(Annotationspipeline.values()); - pipelineSentiment.annotate(AnnotationspipelineSentiment.values()); - Annotationspipeline.entrySet().forEach(pipelineEntry -> { - if (pipelineEntry != null) { - pipelineAnnotationCache.put(pipelineEntry.getKey(), pipelineEntry.getValue()); - } - }); - AnnotationspipelineSentiment.entrySet().forEach(pipelineEntry -> { - if (pipelineEntry != null) { - pipelineSentimentAnnotationCache.put(pipelineEntry.getKey(), pipelineEntry.getValue()); - } - }); - coreDocumentpipelineMap.entrySet().forEach(coreDocumentEntry -> { - coreDocumentAnnotationCache.put(coreDocumentEntry.getKey(), coreDocumentEntry.getValue()); - }); - return strmap; - } - - public int getMessageOverHead() { - return stringCache.values().size() - (stringCache.values().size() / 10); - } - - public void update_autismo_socket_msg() { - try { - try (DatagramSocket serverSocket = new DatagramSocket(48477)) { - try (DatagramSocket serverSocket1 = new DatagramSocket(48478)) { - byte[] receiveData = new byte[4096]; - InetAddress IPAddress = InetAddress.getByName("144.76.218.19"); - DatagramPacket receivePacket = new DatagramPacket(receiveData, receiveData.length); - while (true) { - serverSocket.receive(receivePacket); - String sentence = new String(receivePacket.getData(), 0, receivePacket.getLength()); - sentence = sentence.replace("clientmessage:", ""); - String getResponseMsg = getResponseMsg(sentence); - byte[] sendData = getResponseMsg.getBytes("UTF-8"); - DatagramPacket sendPacket = new DatagramPacket(sendData, sendData.length, IPAddress, 48477); - serverSocket.send(sendPacket); - - receivePacket = new DatagramPacket(receiveData, receiveData.length); - serverSocket1.receive(receivePacket); - sentence = new String(receivePacket.getData(), 0, receivePacket.getLength()); - sentence = sentence.replace("clientmessage:", ""); - getResponseMsg = getResponseMsg(sentence); - sendData = getResponseMsg.getBytes("UTF-8"); - sendPacket = new DatagramPacket(sendData, sendData.length, IPAddress, 48478); - serverSocket1.send(sendPacket); - } - } - } catch (CustomError ex) { - Logger.getLogger(Datahandler.class.getName()).log(Level.SEVERE, null, ex); - } - } catch (SocketException ex) { - Logger.getLogger(Datahandler.class.getName()).log(Level.SEVERE, null, ex); - } catch (UnsupportedEncodingException ex) { - Logger.getLogger(Datahandler.class.getName()).log(Level.SEVERE, null, ex); - } catch (IOException ex) { - Logger.getLogger(Datahandler.class.getName()).log(Level.SEVERE, null, ex); - } - } - - private static class AnnotationCollector implements Consumer { - - private static int i = 0; - private List annotationsT = new ArrayList(); - - @Override - public void accept(T ann) { - //System.out.println("adding ann: " + ann.toString()); - annotationsT.add(ann); - } - } - - public static ConcurrentMap getMultipleCoreDocumentsWaySuggestion(Collection str, StanfordCoreNLP localNLP) { - AnnotationCollector annCollector = new AnnotationCollector(); - for (String exampleString : str) { - localNLP.annotate(new Annotation(exampleString), annCollector); - annCollector.i++; - //System.out.println("iterator: " + annCollector.i + "\nstr size: " + str.size() + "\n"); - } - try { - Thread.sleep(8000); - } catch (InterruptedException ex) { - Logger.getLogger(Datahandler.class.getName()).log(Level.SEVERE, null, ex); - } - ConcurrentMap annotationreturnMap = new MapMaker().concurrencyLevel(6).makeMap(); - for (Annotation ann : annCollector.annotationsT) { - if (ann != null) { - ann.compact(); - CoreDocument CD = new CoreDocument(ann); - annotationreturnMap.put(CD.text(), CD); - //System.out.println("CD text:" + CD.text() + "\niterator: " + iterator + "\nsize: " + annCollector.annotationsT.size()); - } - } - return annotationreturnMap; - } -} diff --git a/ArtificialAutism/src/main/java/FunctionLayer/Datahandler.kt b/ArtificialAutism/src/main/java/FunctionLayer/Datahandler.kt new file mode 100644 index 00000000..4adce474 --- /dev/null +++ b/ArtificialAutism/src/main/java/FunctionLayer/Datahandler.kt @@ -0,0 +1,660 @@ +/* + * To change this license header, choose License Headers in Project Properties. + * To change this template file, choose Tools | Templates + * and open the template in the editor. + */ +package FunctionLayer + +import DataLayer.DataMapper +import FunctionLayer.StanfordParser.SentimentAnalyzerTest +import com.google.common.base.Stopwatch +import edu.mit.jmwe.data.IMWE +import edu.mit.jmwe.data.IToken +import edu.stanford.nlp.ie.AbstractSequenceClassifier +import edu.stanford.nlp.ie.crf.CRFClassifier +import edu.stanford.nlp.ling.CoreAnnotations +import edu.stanford.nlp.ling.CoreLabel +import edu.stanford.nlp.ling.TaggedWord +import edu.stanford.nlp.parser.lexparser.LexicalizedParser +import edu.stanford.nlp.pipeline.Annotation +import edu.stanford.nlp.pipeline.CoreDocument +import edu.stanford.nlp.pipeline.StanfordCoreNLP +import edu.stanford.nlp.tagger.maxent.MaxentTagger +import edu.stanford.nlp.trees.* +import edu.stanford.nlp.util.CoreMap +import kotlinx.coroutines.* +import org.ejml.simple.SimpleMatrix +import java.util.* +import java.util.concurrent.TimeUnit +import java.util.regex.Pattern +import kotlin.collections.ArrayList +import kotlin.collections.HashMap + + +/** + * + * @author install1 + */ +public class Datahandler { + private val stopwatch: Stopwatch + private val EXPIRE_TIME_IN_MINUTES = TimeUnit.MINUTES.convert(30, TimeUnit.MINUTES) + private var pipelineAnnotationCache: HashMap + private var pipelineSentimentAnnotationCache = HashMap() + private var coreDocumentAnnotationCache: HashMap + private var jmweAnnotationCache = HashMap() + private var stringCache = ArrayList() + + //private val nerModel = "edu/stanford/nlp/models/ner/english.all.3class.caseless.distsim.crf.ser.gz" + private val nerModel = "edu/stanford/nlp/models/ner/english.all.3class.distsim.crf.ser.gz" + private var tagger: MaxentTagger = MaxentTagger() + private var gsf: GrammaticalStructureFactory + private var classifier: AbstractSequenceClassifier + + //SentimentAnalyzer Hashmaps + private var tokenizeCountingHashMap: HashMap = HashMap() + private var taggedWordListHashMap: HashMap>> = HashMap() + private var retrieveTGWListHashMap: HashMap> = + HashMap() + private var sentences1HashMap: HashMap> = HashMap() + private var sentencesSentimentHashMap: HashMap> = HashMap() + private var trees1HashMap: HashMap> = HashMap() + private var grammaticalStructureHashMap: HashMap> = + HashMap() + private var typedDependenciesHashMap: HashMap> = + HashMap() + private var rnnCoreAnnotationsPredictedHashMap: HashMap> = HashMap() + private var simpleMatricesHashMap: HashMap> = HashMap() + private var simpleMatricesNodevectorsHashMap: HashMap> = HashMap() + private var listHashMap: HashMap> = HashMap() + private var longestHashMap: HashMap = HashMap() + private var sentimentHashMap: HashMap = HashMap() + private var imwesHashMap: HashMap>> = HashMap() + private var InflectedCounterNegativeHashMap: HashMap = HashMap() + private var InflectedCounterPositiveHashMap: HashMap = HashMap() + private var tokenEntryHashMap: HashMap> = HashMap() + private var MarkedContinuousCounterHashMap: HashMap = HashMap() + private var UnmarkedPatternCounterHashMap: HashMap = HashMap() + private var strTokensIpartFormHashMap: HashMap> = HashMap() + private var tokenFormsHashMap: HashMap> = HashMap() + private var strTokenEntryGetPOSHashMap: HashMap> = HashMap() + private var intTokenEntyCountsHashMap: HashMap> = HashMap() + private var ITokenTagsHashMap: HashMap> = HashMap() + private var strTokenStemsHashMap: HashMap> = HashMap() + private var AnotatorcounterHashMap: HashMap = HashMap() + private var TokensCounterHashMap: HashMap = HashMap() + private var entityTokenTagsHashMap: HashMap> = HashMap() + private var nerEntitiesHashMap: HashMap> = HashMap() + private var nerEntitiesTypeHashMap: HashMap> = HashMap() + private var stopWordTokenHashMap: HashMap> = HashMap() + private var stopWordLemmaHashMap: HashMap> = HashMap() + private var PairCounterHashMap: HashMap = HashMap() + + constructor() { + stopwatch = Stopwatch.createUnstarted() + jmweAnnotationCache = HashMap() + pipelineAnnotationCache = HashMap() + pipelineSentimentAnnotationCache = HashMap() + coreDocumentAnnotationCache = HashMap() + gsf = initiateGrammaticalStructureFactory() + classifier = CRFClassifier.getClassifierNoExceptions(nerModel) + } + + fun initiateGrammaticalStructureFactory(): GrammaticalStructureFactory { + val options = arrayOf("-maxLength", "100") + //val lexParserEnglishRNN = "edu/stanford/nlp/models/lexparser/englishRNN.ser.gz" + val lexParserEnglishPCFG = "edu/stanford/nlp/models/lexparser/englishPCFG.ser.gz" + val lp = LexicalizedParser.loadModel(lexParserEnglishPCFG, *options) + val tlp = lp.getOp().langpack() + return tlp.grammaticalStructureFactory() + } + + public fun pipeLineSetUp(): StanfordCoreNLP { + val props = Properties() + val shiftReduceParserPath = "edu/stanford/nlp/models/srparser/englishSR.ser.gz" + //val nerModel2 = "edu/stanford/nlp/models/ner/english.conll.4class.caseless.distsim.crf.ser.gz" + val nerModel2 = "edu/stanford/nlp/models/ner/english.conll.4class.distsim.crf.ser.gz" + //val nerModel3 = "edu/stanford/nlp/models/ner/english.muc.7class.caseless.distsim.crf.ser.gz" + val nerModel3 = "edu/stanford/nlp/models/ner/english.muc.7class.distsim.crf.ser.gz" + props.setProperty("annotators", "tokenize,ssplit,pos,lemma,ner,parse") + props.setProperty("parse.model", shiftReduceParserPath) + props.setProperty("parse.maxlen", "90") + props.setProperty("parse.binaryTrees", "true") + props.setProperty("threads", "5") + props.setProperty("pos.maxlen", "90") + props.setProperty("tokenize.maxlen", "90") + props.setProperty("ssplit.maxlen", "90") + props.setProperty("lemma.maxlen", "90") + props.setProperty("ner.model", "$nerModel,$nerModel2,$nerModel3") + props.setProperty("ner.combinationMode", "HIGH_RECALL") + props.setProperty("regexner.ignorecase", "true") + props.setProperty("ner.fine.regexner.ignorecase", "true") + props.setProperty("tokenize.options", "untokenizable=firstKeep") + return StanfordCoreNLP(props) + } + + fun shiftReduceParserInitiate(): StanfordCoreNLP { + val propsSentiment = Properties() + //val lexParserEnglishRNN = "edu/stanford/nlp/models/lexparser/englishRNN.ser.gz" + val lexParserEnglishPCFG = "edu/stanford/nlp/models/lexparser/englishPCFG.ser.gz" + val sentimentModel = "edu/stanford/nlp/models/sentiment/sentiment.ser.gz" + //val taggerPath = "edu/stanford/nlp/models/pos-tagger/english-left3words/english-left3words-distsim.tagger" + val taggerPath = "edu/stanford/nlp/models/pos-tagger/english-left3words-distsim.tagger" + val customStopWordList = "start,starts,period,periods,a,an,and,are,as,at,be,but,by,for,if,in,into,is,it,no,not,of," + + "on,or,such,that,the,their,then,there,these,they,this,to,was,will,with" + propsSentiment.setProperty("parse.model", lexParserEnglishPCFG) + propsSentiment.setProperty("sentiment.model", sentimentModel) + propsSentiment.setProperty("parse.maxlen", "90") + propsSentiment.setProperty("threads", "5") + propsSentiment.setProperty("pos.maxlen", "90") + propsSentiment.setProperty("tokenize.maxlen", "90") + propsSentiment.setProperty("ssplit.maxlen", "90") + propsSentiment.setProperty("annotators", "tokenize,ssplit,pos,parse,sentiment,lemma,stopword") //coref too expensive memorywise + propsSentiment.setProperty("customAnnotatorClass.stopword", "FunctionLayer.StopwordAnnotator") + propsSentiment.setProperty(StopwordAnnotator.STOPWORDS_LIST, customStopWordList) + propsSentiment.setProperty("tokenize.options", "untokenizable=firstKeep") + tagger = MaxentTagger(taggerPath) + + println("finished shiftReduceParserInitiate\n") + return StanfordCoreNLP(propsSentiment) + } + + fun updateStringCache() { + if (stopwatch.elapsed(TimeUnit.MINUTES) >= EXPIRE_TIME_IN_MINUTES || !stopwatch.isRunning) { + if (!stopwatch.isRunning) { + stopwatch.start() + } else { + stopwatch.reset() + } + stringCache.sortWith(Comparator.comparingInt(String::length).reversed()); + System.out.println("pre InsertMYSQLStrings") + val arrayList = java.util.ArrayList(stringCache) + DataMapper.InsertMYSQLStrings(arrayList) + DataMapper.checkStringsToDelete(); + stringCache = ArrayList(); + initiateMYSQL(); + } + } + + fun initiateMYSQL() { + stringCache.addAll(DataMapper.getAllStrings()) + } + + private fun trimString(str: String): String { + var message = str.trim { it <= ' ' } + if (message.startsWith("<@")) { + message = message.substring(message.indexOf("> ") + 2) + } + if (!message.isEmpty()) { + message = message.replace("@", "") + if (message.contains("<>")) { + message = message.substring(message.indexOf(">")) + } + if (message.startsWith("[ *")) { + message = message.substring(message.indexOf("]")) + } + } + return message + } + + private fun createStrAnnotation(str: String, stanfordCoreNLP: StanfordCoreNLP, sentimentBool: Boolean) { + val strAnno2 = Annotation(str) + strAnno2.compact() + stanfordCoreNLP.annotate(strAnno2) + if (sentimentBool) { + pipelineSentimentAnnotationCache.put(str, strAnno2) + } else { + pipelineAnnotationCache.put(str, strAnno2) + } + } + + private fun getResponseFutures(strF: String, stanfordCoreNLP: StanfordCoreNLP, stanfordCoreNLPSentiment: StanfordCoreNLP): String { + val strAnno: Annotation = Annotation(strF) + strAnno.compact() + stanfordCoreNLP.annotate(strAnno) + + val strAnnoSentiment: Annotation = Annotation(strF) + strAnnoSentiment.compact() + stanfordCoreNLPSentiment.annotate(strAnnoSentiment) + + + val annotation = Annotation(strF) + stanfordCoreNLP.annotate(annotation) + val coreDocument = CoreDocument(annotation) + + val values_copy: List = ArrayList(stringCache) + var preRelationUserCounters = -155000.0 + val concurrentRelations: MutableList = arrayListOf() + val SB = StringBuilder() + var jmweAnnotationF = PipelineJMWESingleton.INSTANCE.getJMWEAnnotation(strF) + var tokenizeCountingF: Int? = null + var taggedWordListF: List>? = null + var retrieveTGWListF: java.util.ArrayList? = null + var sentencesF: List? = null + var sentencesSentimentF: List? = null + var coreMaps1: List = jmweAnnotationF.get(CoreAnnotations.SentencesAnnotation::class.java) + var treesF: java.util.ArrayList? = null + var grammaticalStructuresF: ArrayList? = null + var typedDependenciesF: java.util.ArrayList? = null + var rnnCoreAnnotationsPredictedF: java.util.ArrayList? = null + var simpleMatricesF: java.util.ArrayList? = null + var simpleMatricesNodevectorsF: java.util.ArrayList? = null + var listF: MutableList? = null + var longestF: Int? = null + var sentimentLongestF: Int? = null + var imwesF: List>? = null + var InflectedCounterNegativeF: Int? = null + var InflectedCounterPositiveF: Int? = null + var tokenEntryF: ArrayList? = null + var MarkedContinuousCounterF: Int? = null + var UnmarkedPatternCounterF: Int? = null + var strTokensIpartFormF: ArrayList? = null + var tokenFormsF: java.util.ArrayList? = null + var strTokenEntryGetPOSF: ArrayList? = null + var intTokenEntyCountsF: java.util.ArrayList? = null + var ITokenTagsF: ArrayList? = null + var strTokenStemsF: java.util.ArrayList? = null + var AnotatorcounterF: Int? = null + var TokensCounterF: Int? = null + var entityTokenTagsF: java.util.ArrayList? = null + var nerEntitiesF: java.util.ArrayList? = null + var nerEntitiesTypeF: java.util.ArrayList? = null + var stopWordTokenF: java.util.ArrayList? = null + var stopWordLemmaF: java.util.ArrayList? = null + var PairCounterF: Int? = null + for (str1 in values_copy) { + if (strF != str1) { + val annotation2 = pipelineSentimentAnnotationCache.getOrDefault(str1, null) + val annotation4 = pipelineAnnotationCache.getOrDefault(str1, null) + val coreDocument1 = coreDocumentAnnotationCache.getOrDefault(str1, null) + var jmweAnnotation = jmweAnnotationCache.getOrDefault(str1, null) + if (annotation2 == null) { + createStrAnnotation(str1, stanfordCoreNLPSentiment, true) + } + if (annotation4 == null) { + createStrAnnotation(str1, stanfordCoreNLP, false) + } + if (coreDocument1 == null) { + getCoreDocumentsSuggested(stanfordCoreNLP, str1) + } + if (jmweAnnotation == null) { + getJMWEAnnotation(str1) + jmweAnnotation = jmweAnnotationCache.get(str1) + } + val tokenizeCounting: Int? = tokenizeCountingHashMap.getOrDefault(str1, null) + val taggedWordList1: List>? = taggedWordListHashMap.getOrDefault(str1, null) + val retrieveTGWList1: java.util.ArrayList? = retrieveTGWListHashMap.getOrDefault(str1, null) + val sentence1: List? = sentences1HashMap.getOrDefault(str1, null) + val sentenceSentiment1: List? = sentencesSentimentHashMap.getOrDefault(str1, null) + val trees1 = trees1HashMap.getOrDefault(str1, null) + var coreMaps2: List = listOf() + val grammaticalStructures1 = grammaticalStructureHashMap.getOrDefault( + str1, null) + if (jmweAnnotation != null) { + coreMaps2 = jmweAnnotation.get(CoreAnnotations.SentencesAnnotation::class.java) + } + val typedDependencies1 = typedDependenciesHashMap.getOrDefault(str1, null) + val rnnCoreAnnotationsPredicted1 = rnnCoreAnnotationsPredictedHashMap.getOrDefault(str1, null) + val simpleMatrices1 = simpleMatricesHashMap.getOrDefault(str1, null); + val simpleMatricesNodevectors1 = simpleMatricesNodevectorsHashMap.getOrDefault(str1, null); + val list1 = listHashMap.getOrDefault(str1, null); + val longest1 = longestHashMap.getOrDefault(str1, null); + val sentimentLongest1 = sentimentHashMap.getOrDefault(str1, null); + val imwes1 = imwesHashMap.getOrDefault(str1, null); + val InflectedCounterNegative1 = InflectedCounterNegativeHashMap.getOrDefault(str1, null); + val InflectedCounterPositive1 = InflectedCounterPositiveHashMap.getOrDefault(str1, null) + val tokenEntry1 = tokenEntryHashMap.getOrDefault(str1, null) + val MarkedContinuousCounter1 = MarkedContinuousCounterHashMap.getOrDefault(str1, null) + val UnmarkedPatternCounter1 = UnmarkedPatternCounterHashMap.getOrDefault(str1, null) + val strTokensIpartForm1 = strTokensIpartFormHashMap.getOrDefault(str1, null); + val tokenForms1 = tokenFormsHashMap.getOrDefault(str1, null); + val strTokenEntryGetPOS1 = strTokenEntryGetPOSHashMap.getOrDefault(str1, null) + val intTokenEntyCounts1 = intTokenEntyCountsHashMap.getOrDefault(str1, null); + val ITokenTags1 = ITokenTagsHashMap.getOrDefault(str1, null); + val strTokenStems1 = strTokenStemsHashMap.getOrDefault(str1, null); + val Anotatorcounter1 = AnotatorcounterHashMap.getOrDefault(str1, null); + val TokensCounter1 = TokensCounterHashMap.getOrDefault(str1, null); + val entityTokenTags1 = entityTokenTagsHashMap.getOrDefault(str1, null); + val nerEntities1 = nerEntitiesHashMap.getOrDefault(str1, null); + val nerEntitiesType1 = nerEntitiesTypeHashMap.getOrDefault(str1, null); + val stopWordToken1 = stopWordTokenHashMap.getOrDefault(str1, null); + val stopWordLemma1 = stopWordLemmaHashMap.getOrDefault(str1, null); + val PairCounter1 = PairCounterHashMap.getOrDefault(str1, null); + + var SMX = SentimentAnalyzerTest(strF, str1, SimilarityMatrix(strF, str1), + coreMaps1, coreMaps2, strAnno, + pipelineAnnotationCache[str1], strAnnoSentiment, + pipelineSentimentAnnotationCache[str1], coreDocument, coreDocumentAnnotationCache[str1], + tagger, gsf, classifier, tokenizeCounting, tokenizeCountingF, + taggedWordListF, taggedWordList1, retrieveTGWListF, retrieveTGWList1, + sentencesF, sentence1, sentencesSentimentF, sentenceSentiment1, treesF, trees1, + grammaticalStructuresF, grammaticalStructures1, typedDependenciesF, + typedDependencies1, rnnCoreAnnotationsPredictedF, rnnCoreAnnotationsPredicted1, + simpleMatricesF, simpleMatrices1, simpleMatricesNodevectorsF, simpleMatricesNodevectors1, + listF, list1, longestF, longest1, sentimentLongestF, sentimentLongest1, imwesF, + imwes1, InflectedCounterNegativeF, InflectedCounterNegative1, InflectedCounterPositiveF, + InflectedCounterPositive1, tokenEntryF, tokenEntry1, MarkedContinuousCounterF, + MarkedContinuousCounter1, UnmarkedPatternCounterF, UnmarkedPatternCounter1, + strTokensIpartFormF, strTokensIpartForm1, tokenFormsF, tokenForms1, + strTokenEntryGetPOSF, strTokenEntryGetPOS1, intTokenEntyCountsF, + intTokenEntyCounts1, ITokenTagsF, ITokenTags1, strTokenStemsF, strTokenStems1, + AnotatorcounterF, Anotatorcounter1, TokensCounterF, TokensCounter1, + entityTokenTagsF, entityTokenTags1, nerEntitiesF, nerEntities1, nerEntitiesTypeF, + nerEntitiesType1, stopWordTokenF, stopWordToken1, stopWordLemmaF, stopWordLemma1, + PairCounterF, PairCounter1) + if (tokenizeCounting == null) { + tokenizeCountingHashMap.put(str1, SMX.getTokenizeCounting()) + } + if (taggedWordList1 == null) { + taggedWordListHashMap.put(str1, SMX.getTaggedWordList1()) + } + if (tokenizeCountingF == null) { + tokenizeCountingF = SMX.getTokenizeCountingF(); + } + if (taggedWordListF == null) { + taggedWordListF = SMX.getTaggedWordListF(); + } + if (retrieveTGWListF == null) { + retrieveTGWListF = SMX.getRetrieveTGWListF(); + } + if (retrieveTGWList1 == null) { + retrieveTGWListHashMap.put(str1, SMX.getRetrieveTGWList1()); + } + if (sentencesF == null) { + sentencesF = SMX.getSentencesF(); + } + if (sentence1 == null) { + sentences1HashMap.put(str1, SMX.getSentences1()) + } + if (sentencesSentimentF == null) { + sentencesSentimentF = SMX.getSentencesSentimentF(); + } + if (sentenceSentiment1 == null) { + sentencesSentimentHashMap.put(str1, SMX.getSentencesSentiment1()); + } + if (treesF == null) { + treesF = SMX.getTreesF(); + } + if (trees1 == null) { + trees1HashMap.put(str1, SMX.getTrees1()) + } + if (grammaticalStructuresF == null) { + grammaticalStructuresF = SMX.getGrammaticalStructuresF(); + } + if (grammaticalStructures1 == null) { + grammaticalStructureHashMap.put(str1, SMX.getGrammaticalStructures1()) + } + if (typedDependenciesF == null) { + typedDependenciesF = SMX.getTypedDependenciesF(); + } + if (typedDependencies1 == null) { + typedDependenciesHashMap.put(str1, SMX.getTypedDependencies1()) + } + if (rnnCoreAnnotationsPredictedF == null) { + rnnCoreAnnotationsPredictedF = SMX.getRnnCoreAnnotationsPredictedF() + } + if (rnnCoreAnnotationsPredicted1 == null) { + rnnCoreAnnotationsPredictedHashMap.put(str1, SMX.getRnnCoreAnnotationsPredicted1()) + } + if (simpleMatricesF == null) { + simpleMatricesF = SMX.getSimpleMatricesF(); + } + if (simpleMatrices1 == null) { + simpleMatricesHashMap.put(str1, SMX.getSimpleMatrices1()); + } + if (simpleMatricesNodevectorsF == null) { + simpleMatricesNodevectorsF = SMX.getSimpleMatricesNodevectorsF(); + } + if (simpleMatricesNodevectors1 == null) { + simpleMatricesNodevectorsHashMap.put(str1, SMX.getSimpleMatricesNodevectors1()); + } + if (listF == null) { + listF = SMX.getListF(); + } + if (list1 == null) { + listHashMap.put(str1, SMX.getList1()); + } + if (longestF == null) { + longestF = SMX.getLongestF(); + } + if (longest1 == null) { + longestHashMap.put(str1, SMX.getLongest1()); + } + if (sentimentLongestF == null) { + sentimentLongestF = SMX.getSentimentLongestF(); + } + if (sentimentLongest1 == null) { + sentimentHashMap.put(str1, SMX.getSentimentLongest1()); + } + if (imwesF == null) { + imwesF = SMX.getImwesF(); + } + if (imwes1 == null) { + imwesHashMap.put(str1, SMX.getImwes1()); + } + if (InflectedCounterNegativeF == null) { + InflectedCounterNegativeF = SMX.getInflectedCounterNegativeF(); + } + if (InflectedCounterNegative1 == null) { + InflectedCounterNegativeHashMap.put(str1, SMX.getInflectedCounterNegative1()); + } + if (InflectedCounterPositiveF == null) { + InflectedCounterPositiveF = SMX.getInflectedCounterPositiveF(); + } + if (InflectedCounterPositive1 == null) { + InflectedCounterPositiveHashMap.put(str1, SMX.getInflectedCounterPositive1()); + } + if (tokenEntryF == null) { + tokenEntryF = SMX.getTokenEntryF(); + } + if (tokenEntry1 == null) { + tokenEntryHashMap.put(str1, SMX.getTokenEntry1()) + } + if (MarkedContinuousCounterF == null) { + MarkedContinuousCounterF = SMX.getMarkedContinuousCounterF(); + } + if (MarkedContinuousCounter1 == null) { + MarkedContinuousCounterHashMap.put(str1, SMX.getMarkedContinuousCounter1()); + } + if (UnmarkedPatternCounterF == null) { + UnmarkedPatternCounterF = SMX.getUnmarkedPatternCounterF(); + } + if (UnmarkedPatternCounter1 == null) { + UnmarkedPatternCounterHashMap.put(str1, SMX.getUnmarkedPatternCounter1()); + } + if (strTokensIpartFormF == null) { + strTokensIpartFormF = SMX.getStrTokensIpartFormF(); + } + if (strTokensIpartForm1 == null) { + strTokensIpartFormHashMap.put(str1, SMX.getStrTokensIpartForm1()); + } + if (tokenFormsF == null) { + tokenFormsF = SMX.getTokenFormsF(); + } + if (tokenForms1 == null) { + tokenFormsHashMap.put(str1, SMX.getTokenForms1()); + } + if (strTokenEntryGetPOSF == null) { + strTokenEntryGetPOSF = SMX.getStrTokenEntryGetPOSF(); + } + if (strTokenEntryGetPOS1 == null) { + strTokenEntryGetPOSHashMap.put(str1, SMX.getStrTokenEntryGetPOS1()) + } + if (intTokenEntyCountsF == null) { + intTokenEntyCountsF = SMX.getIntTokenEntyCountsF(); + } + if (intTokenEntyCounts1 == null) { + intTokenEntyCountsHashMap.put(str1, SMX.getIntTokenEntyCounts1()); + } + if (ITokenTagsF == null) { + ITokenTagsF = SMX.getITokenTagsF(); + } + if (ITokenTags1 == null) { + ITokenTagsHashMap.put(str1, SMX.getITokenTags1()); + } + if (strTokenStemsF == null) { + strTokenStemsF = SMX.getStrTokenStemsF(); + } + if (strTokenStems1 == null) { + strTokenStemsHashMap.put(str1, SMX.getStrTokenStems1()); + } + if (AnotatorcounterF == null) { + AnotatorcounterF = SMX.getAnotatorcounterF(); + } + if (Anotatorcounter1 == null) { + AnotatorcounterHashMap.put(str1, SMX.getAnotatorcounter1()); + } + if (TokensCounterF == null) { + TokensCounterF = SMX.getTokensCounterF(); + } + if (TokensCounter1 == null) { + TokensCounterHashMap.put(str1, SMX.getTokensCounter1()); + } + if (entityTokenTagsF == null) { + entityTokenTagsF = SMX.getEntityTokenTagsF(); + } + if (entityTokenTags1 == null) { + entityTokenTagsHashMap.put(str1, SMX.getEntityTokenTags1()); + } + if (nerEntitiesF == null) { + nerEntitiesF = SMX.getNerEntitiesF(); + } + if (nerEntities1 == null) { + nerEntitiesHashMap.put(str1, SMX.getNerEntities1()); + } + if (nerEntitiesTypeF == null) { + nerEntitiesTypeF = SMX.getNerEntitiesTypeF(); + } + if (nerEntitiesType1 == null) { + nerEntitiesTypeHashMap.put(str1, SMX.getNerEntitiesType1()); + } + if (stopWordTokenF == null) { + stopWordTokenF = SMX.getStopWordTokenF(); + } + if (stopWordToken1 == null) { + stopWordTokenHashMap.put(str1, SMX.getStopWordToken1()); + } + if (stopWordLemmaF == null) { + stopWordLemmaF = SMX.getStopWordLemmaF(); + } + if (stopWordLemma1 == null) { + stopWordLemmaHashMap.put(str1, SMX.getStopWordLemma1()); + } + if (PairCounterF == null) { + PairCounterF = SMX.getPairCounterF(); + } + if (PairCounter1 == null) { + PairCounterHashMap.put(str1, SMX.getPairCounter1()); + } + + var getSMX: SimilarityMatrix = SMX.callSMX() + val scoreRelationLastUserMsg = getSMX.distance + if (scoreRelationLastUserMsg > preRelationUserCounters) { + preRelationUserCounters = scoreRelationLastUserMsg + concurrentRelations.add(getSMX.secondaryString) + } + } + } + val cacheRequirement = 6500; + if (preRelationUserCounters > cacheRequirement && !stringCache.contains(strF) && filterContent(strF)) { + stringCache.add(strF) + } + val randomLenghtPermit = strF.length * (Math.random() * Math.random() * Math.random() * (Math.random() * 10)) + Collections.reverse(concurrentRelations) + val mysqlUpdateLastUsed: ArrayList = ArrayList() + if (!concurrentRelations.isEmpty()) { + for (secondaryRelation in concurrentRelations) { + if (SB.toString().length > randomLenghtPermit && !SB.toString().isEmpty()) { + break + } + SB.append(secondaryRelation).append(" ") + mysqlUpdateLastUsed.add(secondaryRelation) + } + } + if (SB.toString().isEmpty()) { + return "failure, preventing stuckness" + } + runBlocking { + CoroutineScope(launch(Dispatchers.IO) { + DataMapper.updateLastUsed(mysqlUpdateLastUsed); + yield() + }) + } + return SB.toString() + } + + private fun getJMWEAnnotation(str1: String) { + val jmweAnnotation = PipelineJMWESingleton.INSTANCE.getJMWEAnnotation(str1) + jmweAnnotationCache.put(str1, jmweAnnotation) + } + + fun getResponseMsg(str: String, personName: String, stanfordCoreNLP: StanfordCoreNLP, + stanfordCoreNLPSentiment: StanfordCoreNLP, ingameResponse: Boolean): String { + var responseFutures: String = "" + runBlocking { + val launch1 = launch(Dispatchers.Default) { + var strF = trimString(str) + responseFutures = getResponseFutures(strF, stanfordCoreNLP, stanfordCoreNLPSentiment) + if (!ingameResponse) { + responseFutures = checkPersonPresentInSentence(personName, responseFutures, strF, stanfordCoreNLP, + stanfordCoreNLPSentiment) + } + yield() + } + launch1.join() + } + return responseFutures + } + + private fun checkPersonPresentInSentence(personName: String, responseMsg: String, userLastMessage: String, + stanfordCoreNLP: StanfordCoreNLP, + stanfordCoreNLPSentiment: StanfordCoreNLP): String { + try { + val pipelineCoreDcoument = CoreDocument(responseMsg) + val pipelineCoreDcoumentLastMsg = CoreDocument(userLastMessage) + stanfordCoreNLP.annotate(pipelineCoreDcoument) + stanfordCoreNLPSentiment.annotate(pipelineCoreDcoumentLastMsg) + val regex = "(.*?\\d){10,}" + for (em in pipelineCoreDcoument.entityMentions()) { + val entityType = em.entityType() + if (entityType == "PERSON") { + var str = responseMsg + val emText = em.text() + val pattern = Pattern.compile(regex) + val matcher = pattern.matcher(personName) + val isMatched = matcher.matches() + if (emText != personName && !isMatched) { + for (emLastMsg in pipelineCoreDcoumentLastMsg.entityMentions()) { + if (emText != emLastMsg.text() && !Character.isDigit(emLastMsg.text().trim { it <= ' ' }[0])) { + //System.out.println("emLastMsg.text(): " + emLastMsg.text()); + str = (responseMsg.substring(0, responseMsg.indexOf(emText)) + " " + + emLastMsg + " " + responseMsg.substring(responseMsg.indexOf(emText))) + } + } + str += " $personName" + return str + } + } + } + } catch (e: Exception) { + println("""SCUFFED JAYZ: ${e.localizedMessage}""".trimIndent()) + } + return responseMsg + } + + fun filterContent(str: String): Boolean { + if (!str.isEmpty() && str.length > 3) { + var str1Local: String = str.trim(); + if (str1Local.length > 2 && !str1Local.startsWith("!")) { + return true + } + } + return false + } + + fun getCoreDocumentsSuggested(pipeline: StanfordCoreNLP, str: String) { + val annotation = Annotation(str) + pipeline.annotate(annotation) + val coreDocument = CoreDocument(annotation) + coreDocumentAnnotationCache.put(str, coreDocument) + } +} \ No newline at end of file diff --git a/ArtificialAutism/src/main/java/FunctionLayer/DoStuff.java b/ArtificialAutism/src/main/java/FunctionLayer/DoStuff.java index 1eb98277..eb4a506b 100644 --- a/ArtificialAutism/src/main/java/FunctionLayer/DoStuff.java +++ b/ArtificialAutism/src/main/java/FunctionLayer/DoStuff.java @@ -9,45 +9,43 @@ import PresentationLayer.DiscordHandler; import discord4j.core.event.domain.message.MessageCreateEvent; import discord4j.core.object.entity.User; import discord4j.core.object.entity.channel.TextChannel; + import java.math.BigInteger; import java.util.ArrayList; import java.util.List; import java.util.logging.Level; import java.util.logging.Logger; + +import edu.stanford.nlp.pipeline.StanfordCoreNLP; import reactor.core.publisher.Flux; import reactor.core.publisher.Mono; /** - * * @author install1 */ public class DoStuff { - public static boolean occupied = false; - public static boolean isOccupied() { - return occupied; - } - - public static void doStuff(MessageCreateEvent event, String usernameBot) { - String username = null; + public static void doStuff(MessageCreateEvent event, String usernameBot, Datahandler datahandler, + StanfordCoreNLP stanfordCoreNLP, StanfordCoreNLP stanfordCoreNLPSentiment) { + String username = ""; try { username = event.getMessage().getAuthor().get().getUsername(); } catch (java.util.NoSuchElementException e) { username = null; } if (username != null && !username.equals(usernameBot)) { - occupied = true; TextChannel block = event.getMessage().getChannel().cast(TextChannel.class).block(); String name = block.getCategory().block().getName(); name = name.toLowerCase(); String channelName = block.getName().toLowerCase(); boolean channelpermissionsDenied = false; + if (channelName.contains("suggestion-box")) { + channelpermissionsDenied = true; + } switch (name) { - case "public area": { - break; - } + case "public area": case "information area": { break; } @@ -56,49 +54,34 @@ public class DoStuff { break; } } - List blockLast = event.getMessage().getUserMentions().buffer().blockLast(); - String content = event.getMessage().getContent(); if (!channelpermissionsDenied) { - if (blockLast != null) - { + List blockLast = event.getMessage().getUserMentions().buffer().blockLast(); + String content = event.getMessage().getContent(); + if (blockLast != null) { for (User user : blockLast) { content = content.replace(user.getId().asString(), ""); } } - MessageResponseHandler.getMessage(content); - } - boolean mentionedBot = false; - if (blockLast != null){ - for (User user : blockLast) - { - if (user.getUsername().equals(usernameBot)) - { - mentionedBot = true; - break; + boolean mentionedBot = false; + if (blockLast != null) { + for (User user : blockLast) { + if (user.getUsername().equals(usernameBot)) { + mentionedBot = true; + break; + } } } - } - if (mentionedBot || channelName.contains("general-autism")) { - try { + if (mentionedBot || channelName.contains("general-autism")) { String ResponseStr; - ResponseStr = MessageResponseHandler.selectReponseMessage(content, username); + ResponseStr = datahandler.getResponseMsg(content, username, stanfordCoreNLP, stanfordCoreNLPSentiment, + false); if (!ResponseStr.isEmpty()) { System.out.print("\nResponseStr3: " + ResponseStr + "\n"); event.getMessage().getChannel().block().createMessage(ResponseStr).block(); } - } catch (CustomError ex) { - Logger.getLogger(DoStuff.class.getName()).log(Level.SEVERE, null, ex); } - } - new Thread(() -> { - try { - Datahandler.instance.checkIfUpdateStrings(); - } catch (CustomError ex) { - Logger.getLogger(DiscordHandler.class.getName()).log(Level.SEVERE, null, ex); - } - }).start(); - occupied = false; + datahandler.updateStringCache(); } } } diff --git a/ArtificialAutism/src/main/java/FunctionLayer/MessageResponseHandler.java b/ArtificialAutism/src/main/java/FunctionLayer/MessageResponseHandler.java deleted file mode 100644 index 413651ae..00000000 --- a/ArtificialAutism/src/main/java/FunctionLayer/MessageResponseHandler.java +++ /dev/null @@ -1,101 +0,0 @@ -/* - * To change this license header, choose License Headers in Project Properties. - * To change this template file, choose Tools | Templates - * and open the template in the editor. - */ -package FunctionLayer; - -import com.google.common.collect.MapMaker; -import edu.stanford.nlp.pipeline.CoreDocument; -import edu.stanford.nlp.pipeline.CoreEntityMention; -import java.util.ArrayList; -import java.util.List; -import java.util.concurrent.ConcurrentMap; -import java.util.regex.Matcher; -import java.util.regex.Pattern; - -/** - * - * @author install1 - */ -public class MessageResponseHandler { - - private static ConcurrentMap str = new MapMaker().concurrencyLevel(2).makeMap(); - - public static ConcurrentMap getStr() { - return str; - } - - public static void setStr(ConcurrentMap str) { - MessageResponseHandler.str = str; - } - - public static void getMessage(String message) { - if (message != null && !message.isEmpty()) { - message = message.replace("@", ""); - if (message.contains("<>")) { - message = message.substring(message.indexOf(">")); - } - if (message.startsWith("[ *")) { - message = message.substring(message.indexOf("]")); - } - str.put(str.size() + 1, message); - } - } - - public static String selectReponseMessage(String toString, String personName) throws CustomError { - ConcurrentMap str1 = new MapMaker().concurrencyLevel(6).makeMap(); - str1.put(str1.size() + 1, toString); - String strreturn = ""; - for (String str : str1.values()) { - if (!str.isEmpty()) { - strreturn = str; - } - } - String getResponseMsg = Datahandler.instance.getResponseMsg(strreturn); - getResponseMsg = checkPersonPresentInSentence(personName, getResponseMsg, strreturn); - return getResponseMsg; - } - - private static String checkPersonPresentInSentence(String personName, String responseMsg, String userLastMessage) { - //check if userlastmsg contains person as refference - //check if first person is author or their person of mention - try { - String strreturn = responseMsg; - CoreDocument pipelineCoreDcoument = new CoreDocument(responseMsg); - CoreDocument pipelineCoreDcoumentLastMsg = new CoreDocument(userLastMessage); - Datahandler.getPipeline().annotate(pipelineCoreDcoument); - Datahandler.getPipeline().annotate(pipelineCoreDcoumentLastMsg); - String regex = "(.*?\\d){10,}"; - for (CoreEntityMention em : pipelineCoreDcoument.entityMentions()) { - String entityType = em.entityType(); - if (entityType.equals("PERSON")) { - String str = strreturn; - String emText = em.text(); - Pattern pattern = Pattern.compile(regex); - Matcher matcher = pattern.matcher(personName); - boolean isMatched = matcher.matches(); - if (!emText.equals(personName) && !isMatched) { - for (CoreEntityMention emLastMsg : pipelineCoreDcoumentLastMsg.entityMentions()) { - if (!emText.equals(emLastMsg.text()) && !Character.isDigit(emLastMsg.text().trim().charAt(0))) { - //System.out.println("emLastMsg.text(): " + emLastMsg.text()); - str = strreturn.substring(0, strreturn.indexOf(emText)) + " " - + emLastMsg + " " + strreturn.substring(strreturn.indexOf(emText)); - } - } - str += " " + personName; - return str; - } - } - } - } catch (Exception e) { - System.out.println("SCUFFED JAYZ: " + e.getLocalizedMessage() + "\n"); - } - return responseMsg; - } - - public static int getOverHead() { - int getResponseMsgOverHead = Datahandler.instance.getMessageOverHead(); - return getResponseMsgOverHead; - } -} diff --git a/ArtificialAutism/src/main/java/FunctionLayer/PipelineJMWESingleton.java b/ArtificialAutism/src/main/java/FunctionLayer/PipelineJMWESingleton.java index 32f2d4b6..c2c2c66f 100644 --- a/ArtificialAutism/src/main/java/FunctionLayer/PipelineJMWESingleton.java +++ b/ArtificialAutism/src/main/java/FunctionLayer/PipelineJMWESingleton.java @@ -5,7 +5,6 @@ */ package FunctionLayer; -import com.google.common.collect.MapMaker; import edu.mit.jmwe.data.IMWE; import edu.mit.jmwe.data.IToken; import edu.mit.jmwe.data.Token; @@ -24,38 +23,30 @@ import edu.stanford.nlp.ling.JMWEAnnotation; import edu.stanford.nlp.pipeline.Annotation; import edu.stanford.nlp.pipeline.StanfordCoreNLP; import edu.stanford.nlp.util.CoreMap; + import java.io.File; import java.io.IOException; import java.util.ArrayList; -import java.util.Collection; import java.util.List; import java.util.Properties; -import java.util.concurrent.ConcurrentMap; /** - * * @author install1 */ //maybe not public? public class PipelineJMWESingleton { //if not needed to be volatile dont make it, increases time - public volatile static PipelineJMWESingleton INSTANCE; + //public volatile static PipelineJMWESingleton INSTANCE; + public static PipelineJMWESingleton INSTANCE; private static StanfordCoreNLP localNLP = initializeJMWE(); private static String underscoreSpaceReplacement; + private static IMWEIndex index; + private static IMWEDetector detector; private PipelineJMWESingleton() { - } - - public static void getINSTANCE() { - INSTANCE = new PipelineJMWESingleton(); - } - - public final ConcurrentMap getJMWEAnnotation(Collection strvalues) { - boolean verbose = false; - IMWEIndex index; - String jmweIndexData = "/home/debian/autism_bot/lib/mweindex_wordnet3.0_semcor1.6.data"; // ./lib/mweindex_wordnet3.0_semcor1.6.data - String jmweIndexDataLocalTest = "E:/java8/Projects/mweindex_wordnet3.0_semcor1.6.data"; + String jmweIndexData = "/home/gameservers/autism_bot/lib/mweindex_wordnet3.0_semcor1.6.data"; // ./lib/mweindex_wordnet3.0_semcor1.6.data + String jmweIndexDataLocalTest = "E:/stationær backup filer/Projects/mweindex_wordnet3.0_semcor1.6.data"; File indexFile = new File((String) jmweIndexData); index = new MWEIndex(indexFile); String detectorName = "Exhaustive"; @@ -64,36 +55,45 @@ public class PipelineJMWESingleton { } catch (IOException e) { throw new RuntimeException("unable to open IMWEIndex index: " + e + "\n"); } - IMWEDetector detector = getDetector(index, detectorName); - ConcurrentMap returnAnnotations = new MapMaker().concurrencyLevel(2).makeMap(); - strvalues.forEach(str -> { - Annotation annoStr = new Annotation(str); - returnAnnotations.put(str, annoStr); - }); - localNLP.annotate(returnAnnotations.values()); - returnAnnotations.values().parallelStream().forEach(annoStr -> { - for (CoreMap sentence : annoStr.get(CoreAnnotations.SentencesAnnotation.class)) { - List> mwes = getjMWEInSentence(sentence, index, detector, verbose); - sentence.set(JMWEAnnotation.class, mwes); - } - }); + detector = getDetector(index, detectorName); index.close(); - return returnAnnotations; + } + + public static void getINSTANCE() { + INSTANCE = new PipelineJMWESingleton(); + } + + public final Annotation getJMWEAnnotation(String str) { + try { + index.open(); + } catch (IOException e) { + throw new RuntimeException("unable to open IMWEIndex index: " + e + "\n"); + } + Annotation annoStr = new Annotation(str); + localNLP.annotate(annoStr); + Class sentencesAnnotationClass = CoreAnnotations.SentencesAnnotation.class; + for (CoreMap sentence : annoStr.get(sentencesAnnotationClass)) { + List> mwes = getjMWEInSentence(sentence, index, detector, false); + //annoStr.set(JMWEAnnotation.class, mwes); + sentence.set(JMWEAnnotation.class, mwes); + } + index.close(); + return annoStr; } public final static StanfordCoreNLP initializeJMWE() { Properties propsJMWE; propsJMWE = new Properties(); propsJMWE.setProperty("annotators", "tokenize,ssplit,pos,lemma"); - propsJMWE.setProperty("tokenize.options", "untokenizable=firstDelete"); - propsJMWE.setProperty("threads", "25"); + propsJMWE.setProperty("tokenize.options", "untokenizable=firstKeep"); + propsJMWE.setProperty("threads", "5"); propsJMWE.setProperty("pos.maxlen", "90"); propsJMWE.setProperty("tokenize.maxlen", "90"); propsJMWE.setProperty("ssplit.maxlen", "90"); propsJMWE.setProperty("lemma.maxlen", "90"); underscoreSpaceReplacement = "-"; localNLP = new StanfordCoreNLP(propsJMWE); - System.out.println("finished singleton constructor \n"); + System.out.println("finished JMWE constructor \n"); return localNLP; } @@ -124,7 +124,7 @@ public class PipelineJMWESingleton { } public List> getjMWEInSentence(CoreMap sentence, IMWEIndex index, IMWEDetector detector, - boolean verbose) { + boolean verbose) { List tokens = getITokens(sentence.get(CoreAnnotations.TokensAnnotation.class)); List> mwes = detector.detect(tokens); if (verbose) { @@ -146,5 +146,4 @@ public class PipelineJMWESingleton { } return sentence; } - } diff --git a/ArtificialAutism/src/main/java/FunctionLayer/SimilarityMatrix.java b/ArtificialAutism/src/main/java/FunctionLayer/SimilarityMatrix.java index 23ada343..59154703 100644 --- a/ArtificialAutism/src/main/java/FunctionLayer/SimilarityMatrix.java +++ b/ArtificialAutism/src/main/java/FunctionLayer/SimilarityMatrix.java @@ -5,10 +5,7 @@ */ package FunctionLayer; -import FunctionLayer.StanfordParser.SentimentValueCache; - /** - * * @author install1 */ public class SimilarityMatrix { @@ -16,8 +13,6 @@ public class SimilarityMatrix { private String PrimaryString; private String SecondaryString; private double distance; - private SentimentValueCache cacheValue1; - private SentimentValueCache cacheValue2; public final double getDistance() { return distance; @@ -38,36 +33,8 @@ public class SimilarityMatrix { this.distance = result; } - public final String getPrimaryString() { - return PrimaryString; - } - - public final void setPrimaryString(String PrimaryString) { - this.PrimaryString = PrimaryString; - } - public final String getSecondaryString() { return SecondaryString; } - public final void setSecondaryString(String SecondaryString) { - this.SecondaryString = SecondaryString; - } - - public final SentimentValueCache getCacheValue1() { - return cacheValue1; - } - - public final void setCacheValue1(SentimentValueCache cacheValue1) { - this.cacheValue1 = cacheValue1; - } - - public final SentimentValueCache getCacheValue2() { - return cacheValue2; - } - - public final void setCacheValue2(SentimentValueCache cacheValue2) { - this.cacheValue2 = cacheValue2; - } - } diff --git a/ArtificialAutism/src/main/java/FunctionLayer/StanfordParser/SentimentAnalyzerTest.java b/ArtificialAutism/src/main/java/FunctionLayer/StanfordParser/SentimentAnalyzerTest.java index a4a6b052..0d0fb226 100644 --- a/ArtificialAutism/src/main/java/FunctionLayer/StanfordParser/SentimentAnalyzerTest.java +++ b/ArtificialAutism/src/main/java/FunctionLayer/StanfordParser/SentimentAnalyzerTest.java @@ -1,10 +1,8 @@ package FunctionLayer.StanfordParser; import FunctionLayer.LevenshteinDistance; -import FunctionLayer.Datahandler; import FunctionLayer.SimilarityMatrix; import FunctionLayer.StopwordAnnotator; -import com.google.common.collect.MapMaker; import edu.mit.jmwe.data.IMWE; import edu.mit.jmwe.data.IMWEDesc; import edu.mit.jmwe.data.IToken; @@ -19,7 +17,6 @@ import edu.stanford.nlp.neural.rnn.RNNCoreAnnotations; import edu.stanford.nlp.pipeline.Annotation; import edu.stanford.nlp.pipeline.CoreDocument; import edu.stanford.nlp.pipeline.CoreEntityMention; -import edu.stanford.nlp.pipeline.StanfordCoreNLP; import edu.stanford.nlp.process.CoreLabelTokenFactory; import edu.stanford.nlp.process.DocumentPreprocessor; import edu.stanford.nlp.process.PTBTokenizer; @@ -37,18 +34,14 @@ import edu.stanford.nlp.trees.TypedDependency; import edu.stanford.nlp.trees.tregex.gui.Tdiff; import edu.stanford.nlp.util.CoreMap; import edu.stanford.nlp.util.Pair; + +import java.io.IOException; import java.io.StringReader; -import java.util.AbstractMap; -import java.util.ArrayList; -import java.util.Collection; -import java.util.List; -import java.util.Map; -import java.util.Objects; -import java.util.OptionalDouble; -import java.util.Set; -import java.util.concurrent.Callable; -import java.util.concurrent.ConcurrentMap; -import java.util.concurrent.atomic.AtomicInteger; +import java.util.*; +import java.util.logging.FileHandler; +import java.util.logging.Logger; +import java.util.logging.SimpleFormatter; + import org.apache.lucene.analysis.core.StopAnalyzer; import org.ejml.simple.SimpleMatrix; @@ -57,11 +50,11 @@ import org.ejml.simple.SimpleMatrix; * To change this template file, choose Tools | Templates * and open the template in the editor. */ + /** - * * @author install1 */ -public class SentimentAnalyzerTest implements Callable { +public class SentimentAnalyzerTest { private final SimilarityMatrix smxParam; private final String str; @@ -69,50 +62,492 @@ public class SentimentAnalyzerTest implements Callable { private final MaxentTagger tagger; private final GrammaticalStructureFactory gsf; private final AbstractSequenceClassifier classifier; - private final Annotation jmweStrAnnotation1; - private final Annotation jmweStrAnnotation2; + private final List coreMaps1; + private final List coreMaps2; private final Annotation pipelineAnnotation1; private final Annotation pipelineAnnotation2; private final Annotation pipelineAnnotation1Sentiment; private final Annotation pipelineAnnotation2Sentiment; private final CoreDocument pipelineCoreDcoument1; private final CoreDocument pipelineCoreDcoument2; - private SentimentValueCache cacheSentiment1; - private SentimentValueCache cacheSentiment2; + private Logger logger = Logger.getLogger("autismlog"); + private FileHandler fh; - public final SentimentValueCache getCacheSentiment1() { - return cacheSentiment1; + public Integer getTokenizeCounting() { + return tokenizeCounting; } - public final SentimentValueCache getCacheSentiment2() { - return cacheSentiment2; + public List> getTaggedWordListF() { + return taggedWordListF; } - public SentimentAnalyzerTest(String str, String str1, SimilarityMatrix smxParam, Annotation str1Annotation, Annotation str2Annotation, - Annotation strPipeline1, Annotation strPipeline2, Annotation strPipeSentiment1, Annotation strPipeSentiment2, - CoreDocument pipelineCoreDcoument1, CoreDocument pipelineCoreDcoument2, SentimentValueCache cacheValue1, SentimentValueCache cacheValue2) { + public List> getTaggedWordList1() { + return taggedWordList1; + } + + public ArrayList getRetrieveTGWList1() { + return retrieveTGWList1; + } + + public List getSentencesF() { + return sentencesF; + } + + public Integer getTokenizeCountingF() { + return tokenizeCountingF; + } + + public ArrayList getRetrieveTGWListF() { + return retrieveTGWListF; + } + + public List getSentences1() { + return sentences1; + } + + public List getSentencesSentimentF() { + return sentencesSentimentF; + } + + public List getSentencesSentiment1() { + return sentencesSentiment1; + } + + public ArrayList getTreesF() { + return treesF; + } + + public ArrayList getTrees1() { + return trees1; + } + + + public ArrayList getGrammaticalStructuresF() { + return grammaticalStructuresF; + } + + public ArrayList getGrammaticalStructures1() { + return grammaticalStructures1; + } + + public ArrayList getTypedDependenciesF() { + return typedDependenciesF; + } + + public ArrayList getTypedDependencies1() { + return typedDependencies1; + } + + public ArrayList getRnnCoreAnnotationsPredictedF() { + return rnnCoreAnnotationsPredictedF; + } + + public ArrayList getRnnCoreAnnotationsPredicted1() { + return rnnCoreAnnotationsPredicted1; + } + + public ArrayList getSimpleMatricesF() { + return simpleMatricesF; + } + + public ArrayList getSimpleMatrices1() { + return simpleMatrices1; + } + + public ArrayList getSimpleMatricesNodevectorsF() { + return simpleMatricesNodevectorsF; + } + + public List getListF() { + return listF; + } + + public List getList1() { + return list1; + } + + public ArrayList getSimpleMatricesNodevectors1() { + return simpleMatricesNodevectors1; + } + + public Integer getLongestF() { + return longestF; + } + + public Integer getSentimentLongestF() { + return sentimentLongestF; + } + + public Integer getSentimentLongest1() { + return sentimentLongest1; + } + + public List> getImwesF() { + return imwesF; + } + + public List> getImwes1() { + return imwes1; + } + + public Integer getLongest1() { + return longest1; + } + + public Integer getInflectedCounterNegativeF() { + return InflectedCounterNegativeF; + } + + public Integer getInflectedCounterPositiveF() { + return InflectedCounterPositiveF; + } + + public Integer getInflectedCounterPositive1() { + return InflectedCounterPositive1; + } + + public Integer getInflectedCounterNegative1() { + return InflectedCounterNegative1; + } + + public ArrayList getTokenEntryF() { + return tokenEntryF; + } + + public ArrayList getTokenEntry1() { + return tokenEntry1; + } + + public Integer getMarkedContinuousCounterF() { + return MarkedContinuousCounterF; + } + + public Integer getMarkedContinuousCounter1() { + return MarkedContinuousCounter1; + } + + public Integer getUnmarkedPatternCounterF() { + return UnmarkedPatternCounterF; + } + + public Integer getUnmarkedPatternCounter1() { + return UnmarkedPatternCounter1; + } + + public ArrayList getStrTokensIpartFormF() { + return strTokensIpartFormF; + } + + public ArrayList getStrTokensIpartForm1() { + return strTokensIpartForm1; + } + + public ArrayList getTokenFormsF() { + return tokenFormsF; + } + + public ArrayList getTokenForms1() { + return tokenForms1; + } + + public ArrayList getStrTokenEntryGetPOSF() { + return strTokenEntryGetPOSF; + } + + public ArrayList getStrTokenEntryGetPOS1() { + return strTokenEntryGetPOS1; + } + + public ArrayList getIntTokenEntyCountsF() { + return intTokenEntyCountsF; + } + + public ArrayList getIntTokenEntyCounts1() { + return intTokenEntyCounts1; + } + + public ArrayList getITokenTagsF() { + return ITokenTagsF; + } + + public ArrayList getITokenTags1() { + return ITokenTags1; + } + + public ArrayList getStrTokenStemsF() { + return strTokenStemsF; + } + + public ArrayList getStrTokenStems1() { + return strTokenStems1; + } + + public Integer getAnotatorcounterF() { + return AnotatorcounterF; + } + + public Integer getAnotatorcounter1() { + return Anotatorcounter1; + } + + public Integer getTokensCounterF() { + return TokensCounterF; + } + + public Integer getTokensCounter1() { + return TokensCounter1; + } + + public ArrayList getEntityTokenTagsF() { + return entityTokenTagsF; + } + + public ArrayList getEntityTokenTags1() { + return entityTokenTags1; + } + + public ArrayList getNerEntitiesF() { + return nerEntitiesF; + } + + public ArrayList getNerEntities1() { + return nerEntities1; + } + + public ArrayList getNerEntitiesTypeF() { + return nerEntitiesTypeF; + } + + public ArrayList getNerEntitiesType1() { + return nerEntitiesType1; + } + + public ArrayList getStopWordTokenF() { + return stopWordTokenF; + } + + public ArrayList getStopWordToken1() { + return stopWordToken1; + } + + public ArrayList getStopWordLemmaF() { + return stopWordLemmaF; + } + + public ArrayList getStopWordLemma1() { + return stopWordLemma1; + } + + public Integer getPairCounterF() { + return PairCounterF; + } + + public Integer getPairCounter1() { + return PairCounter1; + } + + //caches + private Integer tokenizeCounting; + private Integer tokenizeCountingF; + private List> taggedWordListF; + private List> taggedWordList1; + private ArrayList retrieveTGWList1; + private ArrayList retrieveTGWListF; + private List sentencesF; + private List sentences1; + private List sentencesSentimentF; + private List sentencesSentiment1; + private ArrayList treesF; + private ArrayList trees1; + private ArrayList grammaticalStructuresF; + private ArrayList grammaticalStructures1; + private ArrayList typedDependenciesF; + private ArrayList typedDependencies1; + private ArrayList rnnCoreAnnotationsPredictedF; + private ArrayList rnnCoreAnnotationsPredicted1; + private ArrayList simpleMatricesF; + private ArrayList simpleMatrices1; + private ArrayList simpleMatricesNodevectorsF; + private ArrayList simpleMatricesNodevectors1; + private List listF; + private List list1; + private Integer longestF; + private Integer longest1; + private Integer sentimentLongestF; + private Integer sentimentLongest1; + private List> imwesF; + private List> imwes1; + private Integer InflectedCounterNegativeF; + private Integer InflectedCounterNegative1; + private Integer InflectedCounterPositiveF; + private Integer InflectedCounterPositive1; + private ArrayList tokenEntryF; + private ArrayList tokenEntry1; + private Integer MarkedContinuousCounterF; + private Integer MarkedContinuousCounter1; + private Integer UnmarkedPatternCounterF; + private Integer UnmarkedPatternCounter1; + private ArrayList strTokensIpartFormF; + private ArrayList strTokensIpartForm1; + private ArrayList tokenFormsF; + private ArrayList tokenForms1; + private ArrayList strTokenEntryGetPOSF; + private ArrayList strTokenEntryGetPOS1; + private ArrayList intTokenEntyCountsF; + private ArrayList intTokenEntyCounts1; + private ArrayList ITokenTagsF; + private ArrayList ITokenTags1; + private ArrayList strTokenStemsF; + private ArrayList strTokenStems1; + private Integer AnotatorcounterF; + private Integer Anotatorcounter1; + private Integer TokensCounterF; + private Integer TokensCounter1; + private ArrayList entityTokenTagsF; + private ArrayList entityTokenTags1; + private ArrayList nerEntitiesF; + private ArrayList nerEntities1; + private ArrayList nerEntitiesTypeF; + private ArrayList nerEntitiesType1; + private ArrayList stopWordTokenF; + private ArrayList stopWordToken1; + private ArrayList stopWordLemmaF; + private ArrayList stopWordLemma1; + private Integer PairCounterF; + private Integer PairCounter1; + + public SentimentAnalyzerTest(String str, String str1, SimilarityMatrix smxParam, List coreMaps1, List coreMaps2, + Annotation strPipeline1, Annotation strPipeline2, Annotation strPipeSentiment1, Annotation strPipeSentiment2, + CoreDocument pipelineCoreDcoument1, CoreDocument pipelineCoreDcoument2, + MaxentTagger tagger, GrammaticalStructureFactory gsf, + AbstractSequenceClassifier classifier, Integer tokenizeCounting, + Integer tokenizeCountingF, List> taggedWordListF, + List> taggedWordList1, ArrayList + retrieveTGWListF, ArrayList retrieveTGWList1, + List sentencesF, List sentences1, + List sentencesSentimentF, List sentencesSentiment1, + ArrayList treesF, ArrayList trees1, + ArrayList grammaticalStructuresF, + ArrayList grammaticalStructures1, + ArrayList typedDependenciesF, + ArrayList typedDependencies1, + ArrayList rnnCoreAnnotationsPredictedF, + ArrayList rnnCoreAnnotationsPredicted1, + ArrayList simpleMatricesF, + ArrayList simpleMatrices1, + ArrayList simpleMatricesNodevectorsF, + ArrayList simpleMatricesNodevectors1, + List listF, List list1, Integer longestF, Integer longest1, + Integer sentimentLongestF, Integer sentimentLongest1, + List> imwesF, List> imwes1, + Integer InflectedCounterNegativeF, + Integer InflectedCounterNegative1, Integer InflectedCounterPositiveF, + Integer InflectedCounterPositive1, ArrayList tokenEntryF, + ArrayList tokenEntry1, Integer MarkedContinuousCounterF, + Integer MarkedContinuousCounter1, Integer UnmarkedPatternCounterF, + Integer UnmarkedPatternCounter1, ArrayList strTokensIpartFormF, + ArrayList strTokensIpartForm1, ArrayList tokenFormsF, + ArrayList tokenForms1, ArrayList strTokenEntryGetPOSF, + ArrayList strTokenEntryGetPOS1, ArrayList intTokenEntyCountsF, + ArrayList intTokenEntyCounts1, ArrayList ITokenTagsF, + ArrayList ITokenTags1, ArrayList strTokenStemsF, + ArrayList strTokenStems1, Integer AnotatorcounterF, + Integer Anotatorcounter1, Integer TokensCounterF, + Integer TokensCounter1, ArrayList entityTokenTagsF, + ArrayList entityTokenTags1, ArrayList nerEntitiesF, + ArrayList nerEntities1, ArrayList nerEntitiesTypeF, + ArrayList nerEntitiesType1, ArrayList stopWordTokenF, + ArrayList stopWordToken1, ArrayList stopWordLemmaF, + ArrayList stopWordLemma1, Integer PairCounterF, + Integer PairCounter1) { this.str = str; this.str1 = str1; this.smxParam = smxParam; - this.tagger = Datahandler.getTagger(); - this.gsf = Datahandler.getGsf(); - this.classifier = Datahandler.getClassifier(); - this.jmweStrAnnotation1 = str1Annotation; - this.jmweStrAnnotation2 = str2Annotation; + this.tagger = tagger; + this.gsf = gsf; + this.classifier = classifier; + this.coreMaps1 = coreMaps1; + this.coreMaps2 = coreMaps2; this.pipelineAnnotation1 = strPipeline1; this.pipelineAnnotation2 = strPipeline2; this.pipelineAnnotation1Sentiment = strPipeSentiment1; this.pipelineAnnotation2Sentiment = strPipeSentiment2; this.pipelineCoreDcoument1 = pipelineCoreDcoument1; this.pipelineCoreDcoument2 = pipelineCoreDcoument2; - this.cacheSentiment1 = cacheValue1; - this.cacheSentiment2 = cacheValue2; + this.tokenizeCounting = tokenizeCounting; + this.tokenizeCountingF = tokenizeCountingF; + this.taggedWordListF = taggedWordListF; + this.taggedWordList1 = taggedWordList1; + this.retrieveTGWListF = retrieveTGWListF; + this.retrieveTGWList1 = retrieveTGWList1; + this.sentencesF = sentencesF; + this.sentences1 = sentences1; + this.sentencesSentimentF = sentencesSentimentF; + this.sentencesSentiment1 = sentencesSentiment1; + this.treesF = treesF; + this.trees1 = trees1; + this.grammaticalStructuresF = grammaticalStructuresF; + this.grammaticalStructures1 = grammaticalStructures1; + this.typedDependenciesF = typedDependenciesF; + this.typedDependencies1 = typedDependencies1; + this.rnnCoreAnnotationsPredictedF = rnnCoreAnnotationsPredictedF; + this.rnnCoreAnnotationsPredicted1 = rnnCoreAnnotationsPredicted1; + this.simpleMatricesF = simpleMatricesF; + this.simpleMatrices1 = simpleMatrices1; + this.simpleMatricesNodevectorsF = simpleMatricesNodevectorsF; + this.simpleMatricesNodevectors1 = simpleMatricesNodevectors1; + this.listF = listF; + this.list1 = list1; + this.longestF = longestF; + this.longest1 = longest1; + this.sentimentLongestF = sentimentLongestF; + this.sentimentLongest1 = sentimentLongest1; + this.imwesF = imwesF; + this.imwes1 = imwes1; + this.InflectedCounterNegativeF = InflectedCounterNegativeF; + this.InflectedCounterNegative1 = InflectedCounterNegative1; + this.InflectedCounterPositiveF = InflectedCounterPositiveF; + this.InflectedCounterPositive1 = InflectedCounterPositive1; + this.tokenEntryF = tokenEntryF; + this.tokenEntry1 = tokenEntry1; + this.MarkedContinuousCounterF = MarkedContinuousCounterF; + this.MarkedContinuousCounter1 = MarkedContinuousCounter1; + this.UnmarkedPatternCounterF = UnmarkedPatternCounterF; + this.UnmarkedPatternCounter1 = UnmarkedPatternCounter1; + this.strTokensIpartFormF = strTokensIpartFormF; + this.strTokensIpartForm1 = strTokensIpartForm1; + this.tokenFormsF = tokenFormsF; + this.tokenForms1 = tokenForms1; + this.strTokenEntryGetPOSF = strTokenEntryGetPOSF; + this.strTokenEntryGetPOS1 = strTokenEntryGetPOS1; + this.intTokenEntyCountsF = intTokenEntyCountsF; + this.intTokenEntyCounts1 = intTokenEntyCounts1; + this.ITokenTagsF = ITokenTagsF; + this.ITokenTags1 = ITokenTags1; + this.strTokenStemsF = strTokenStemsF; + this.strTokenStems1 = strTokenStems1; + this.AnotatorcounterF = AnotatorcounterF; + this.Anotatorcounter1 = Anotatorcounter1; + this.TokensCounterF = TokensCounterF; + this.TokensCounter1 = TokensCounter1; + this.entityTokenTagsF = entityTokenTagsF; + this.entityTokenTags1 = entityTokenTags1; + this.nerEntitiesF = nerEntitiesF; + this.nerEntities1 = nerEntities1; + this.nerEntitiesTypeF = nerEntitiesTypeF; + this.nerEntitiesType1 = nerEntitiesType1; + this.stopWordTokenF = stopWordTokenF; + this.stopWordToken1 = stopWordToken1; + this.stopWordLemmaF = stopWordLemmaF; + this.stopWordLemma1 = stopWordLemma1; + this.PairCounterF = PairCounterF; + this.PairCounter1 = PairCounter1; } private List> getTaggedWordList(String message) { List> taggedwordlist = new ArrayList(); DocumentPreprocessor tokenizer = new DocumentPreprocessor(new StringReader(message)); - TokenizerFactory ptbTokenizerFactory = PTBTokenizer.factory(new CoreLabelTokenFactory(), "untokenizable=firstDelete"); //noneDelete + TokenizerFactory ptbTokenizerFactory = PTBTokenizer.factory(new CoreLabelTokenFactory(), "untokenizable=noneDelete"); //noneDelete //firstDelete tokenizer.setTokenizerFactory(ptbTokenizerFactory); for (final List sentence : tokenizer) { taggedwordlist.add(tagger.tagSentence(sentence)); @@ -122,111 +557,150 @@ public class SentimentAnalyzerTest implements Callable { private int tokenizeCounting(List> taggedwordlist) { int counter = 0; - Collection taggedCollection = new ArrayList(); for (List taggedList : taggedwordlist) { counter += taggedList.size(); } return counter; } - private ConcurrentMap retrieveTGWListIndex(List> taggedwordlist) { - ConcurrentMap tgwlistIndex = new MapMaker().concurrencyLevel(2).makeMap(); - taggedwordlist.forEach((TGWList) -> { - TGWList.forEach((TaggedWord) -> { - if (!tgwlistIndex.values().contains(TaggedWord.tag()) && !TaggedWord.tag().equals(":")) { - tgwlistIndex.put(tgwlistIndex.size() + 1, TaggedWord.tag()); - } - }); - }); - return tgwlistIndex; - } - - private Double iterateTrees(ConcurrentMap sentenceConstituencyParseList2, ConcurrentMap sentenceConstituencyParseList1, - Double score) { - double preConstituentsScore = score; - ConcurrentMap constituentsMap = new MapMaker().concurrencyLevel(4).makeMap(); - int constituencySize = sentenceConstituencyParseList1.size() + sentenceConstituencyParseList2.size(); - for (final Tree sentenceConstituencyParse2 : sentenceConstituencyParseList2.values()) { - int constiRelationsize = 0; - for (final Tree sentenceConstituencyParse1 : sentenceConstituencyParseList1.values()) { - Set constinuent1 = Tdiff.markDiff(sentenceConstituencyParse1, sentenceConstituencyParse2); - Set constinuent2 = Tdiff.markDiff(sentenceConstituencyParse2, sentenceConstituencyParse1); - ConcurrentMap constiLabels = new MapMaker().concurrencyLevel(2).makeMap(); - for (final Constituent consti : constinuent1) { - for (final Constituent consti1 : constinuent2) { - if (consti.value().equals(consti1.value()) && !constiLabels.values().contains(consti.value())) { - constiLabels.put(constiLabels.size(), consti.value()); - constiRelationsize++; - } - } - } - int constituents1 = constinuent1.size() - constiRelationsize; - int constituents2 = constinuent2.size() - constiRelationsize; - constituentsMap.put(constituentsMap.size(), constituents1); - constituentsMap.put(constituentsMap.size(), constituents2); - constituentsMap.put(constituentsMap.size(), constiRelationsize); - if (constituentsMap.size() < 4) { - if ((constituents1 * 5 < constituents2 || constituents2 * 5 < constituents1) && constituents1 > 0 && constituents2 > 0) { - score -= (constituents1 + constituents2) * 200; - } else if ((constituents1 == 0 || constituents2 == 0) && (constituents1 >= constituents2 + 4 || constituents2 >= constituents1 + 4)) { - score -= constituents1 > constituents2 ? constituents1 * 500 : constituents2 * 500; - } else if (constiRelationsize >= constituents1 + constituents2 && (constituents1 > 2 && constituents2 > 2)) { - score += (constiRelationsize + constituents1 + constituents2) * 350; - } else if (constituents1 >= 2 && constituents2 >= 2 && constituents1 * 1.5 > constituents2 && constituents2 * 1.5 > constituents1) { - if (constituents1 == constituents2 && constiRelationsize - constituents1 == 1) { - score += (constiRelationsize + constituents1 + constituents2) * 1550; - } else if (constiRelationsize >= constituents1 && constituents1 == constituents2) { - score -= (constiRelationsize + constituents1 + constituents2) * 550; - } else if (constiRelationsize < constituents1 && constiRelationsize < constituents2) { - score += 800; - } else if ((constiRelationsize == constituents1 || constiRelationsize == constituents2) && constituents1 * 1.5 > constituents2 - && constituents2 * 1.5 > constituents1) { - score += (constiRelationsize + constituents1 + constituents2) * 350; - } - } else if (constiRelationsize > constituents1 + constituents2) { - score += 2500; - } else if (constiRelationsize * 5 < constituents1 || constiRelationsize * 5 < constituents2) { - score -= (constituents1 + constituents2) * 400; - } - } else { - score = preConstituentsScore; - int n1 = constituentsMap.get(0); - int n2 = constituentsMap.get(1); - int n3 = constituentsMap.get(2); - int cap = 0; - if (n1 > n2 && n1 > n3) { - cap = n1; - } else if (n2 > n3 && n2 > n1) { - cap = n2; - } else { - cap = n3; - } - int overheat = 0; - for (int iterator = 3; iterator < constituentsMap.size(); iterator++) { - Integer getConstituent = constituentsMap.get(iterator); - if (getConstituent > cap) { - overheat++; - } - } - if (overheat > 1) { - score -= overheat * 800; - } else { - score += 1300; + private ArrayList retrieveTGWListIndex(List> taggedwordlist) { + ArrayList tgwlistIndex = new ArrayList(); + for (List tGWList : taggedwordlist) { + for (TaggedWord taggedWord : tGWList) { + for (String str : tgwlistIndex) { + if (!taggedWord.tag().equals(str) && !taggedWord.tag().equals(":")) { + tgwlistIndex.add(taggedWord.tag()); + tGWList.remove(taggedWord); } } } } - if (constituencySize > 10) { - score -= constituencySize * 400; + return tgwlistIndex; + } + + private Double iterateTrees(ArrayList sentenceConstituencyParseList2, ArrayList sentenceConstituencyParseList1, + Double score) { + double preConstituentsScore = score; + ArrayList constituentsMap = new ArrayList(); + int constituencySize = sentenceConstituencyParseList1.size() + sentenceConstituencyParseList2.size(); + for (final Tree sentenceConstituencyParse2 : sentenceConstituencyParseList2) { + int constiRelationsize = 0; + try { + if (sentenceConstituencyParse2 != null && !sentenceConstituencyParse2.isEmpty()) { + for (final Tree sentenceConstituencyParse1 : sentenceConstituencyParseList1) { + try { + if (sentenceConstituencyParse1 != null && !sentenceConstituencyParse1.isEmpty()) { + Set constinuent1 = Tdiff.markDiff(sentenceConstituencyParse1, sentenceConstituencyParse2); + Set constinuent2 = Tdiff.markDiff(sentenceConstituencyParse2, sentenceConstituencyParse1); + ArrayList constiLabels = new ArrayList(); + for (final Constituent consti : constinuent1) { + for (final Constituent consti1 : constinuent2) { + if (consti.value().equals(consti1.value()) && !constiLabels.contains(consti.value())) { + constiLabels.add(consti.value()); + constiRelationsize++; + } + } + } + int constituents1 = constinuent1.size() - constiRelationsize; + int constituents2 = constinuent2.size() - constiRelationsize; + constituentsMap.add(constituents1); + constituentsMap.add(constituents2); + constituentsMap.add(constiRelationsize); + if (constituentsMap.size() < 4) { + if ((constituents1 * 5 < constituents2 || constituents2 * 5 < constituents1) && constituents1 > 0 && constituents2 > 0) { + score -= (constituents1 + constituents2) * 200; + //logger.info("score post score -= (constituents1 + constituents2) * 200;: " + score); + } else if ((constituents1 == 0 || constituents2 == 0) && (constituents1 >= constituents2 + 4 || constituents2 >= constituents1 + 4)) { + score -= constituents1 > constituents2 ? constituents1 * 500 : constituents2 * 500; + //logger.info("score post score -= constituents1 > constituents2 ? constituents1 * 500 : constituents2 * 500;: " + score); + } else if (constiRelationsize >= constituents1 + constituents2 && (constituents1 > 2 && constituents2 > 2)) { + score += (constiRelationsize + constituents1 + constituents2) * 350; + //logger.info("score post score += (constiRelationsize + constituents1 + constituents2) * 350;: " + score); + } else if (constituents1 >= 2 && constituents2 >= 2 && constituents1 * 1.5 > constituents2 && constituents2 * 1.5 > constituents1) { + //logger.info("constituents1: " + constituents1); + //logger.info("constituents2: " + constituents2); + if (constituents1 == constituents2 && constiRelationsize - constituents1 >= 5) { + score += (constiRelationsize + constituents1 + constituents2) * 1550; + //logger.info("score post score += (constiRelationsize + constituents1 + constituents2) * 1550;: " + score); + } else if (constituents1 == 3 && constituents2 == 3) { + score += 8949; + //logger.info("score post score += 8949;: " + score); + } else if (constiRelationsize >= constituents1 && constituents1 == constituents2) { + score -= (constiRelationsize + constituents1 + constituents2) * 550; + //logger.info("score post score -= (constiRelationsize + constituents1 + constituents2) * 550;: " + score); + } else if (constiRelationsize < constituents1 && constiRelationsize < constituents2) { + score += 800; + //logger.info("score post score += 800;: " + score); + } else if ((constiRelationsize == constituents1 || constiRelationsize == constituents2) && constituents1 * 1.5 > constituents2 + && constituents2 * 1.5 > constituents1) { + score += (constiRelationsize + constituents1 + constituents2) * 350; + //logger.info("score post score += (constiRelationsize + constituents1 + constituents2) * 350;: " + score); + } + } else if (constiRelationsize > constituents1 + constituents2) { + //logger.info("constiRelationsize: " + constiRelationsize); + //logger.info("constituents1: " + constituents1); + //logger.info("constituents2: " + constituents2); + score -= 2826; + //logger.info("score score score -= 2826; " + score); + } else if (constiRelationsize * 5 < constituents1 || constiRelationsize * 5 < constituents2) { + score -= (constituents1 + constituents2) * 400; + //logger.info("score post score -= (constituents1 + constituents2) * 400;: " + score); + } + } else { + score = preConstituentsScore; + //logger.info("score post score = preConstituentsScore;: " + score); + int n1 = constituentsMap.get(0); + int n2 = constituentsMap.get(1); + int n3 = constituentsMap.get(2); + int cap = 0; + if (n1 > n2 && n1 > n3) { + cap = n1; + } else if (n2 > n3 && n2 > n1) { + cap = n2; + } else { + cap = n3; + } + int overheat = 0; + for (int iterator = 3; iterator < constituentsMap.size(); iterator++) { + Integer getConstituent = constituentsMap.get(iterator); + if (getConstituent > cap) { + overheat++; + } + } + //logger.info("cap: " + cap); + //logger.info("overheat: " + overheat); + if (overheat >= 1) { + score -= overheat * 800; + //logger.info("score post score -= overheat * 800;: " + score); + } else { + score -= 553; + //logger.info("score post score -= 553; " + score); + } + } + } + } catch (NoSuchElementException e) { + } + } + if (constituencySize > 10) { + score -= constituencySize * 400; + //logger.info("score post score -= constituencySize * 400;: " + score); + } + } + + } catch (NoSuchElementException e) { + + } } return score; } - private Double typeDependenciesGrammaticalRelation(Collection allTypedDependencies1, Collection allTypedDependencies2, - Double score, ConcurrentMap grammaticalMap1, ConcurrentMap grammaticalMap2, - ConcurrentMap sentenceConstituencyParseList1, ConcurrentMap sentenceConstituencyParseList2) { - ConcurrentMap alltypeDepsSizeMap = new MapMaker().concurrencyLevel(2).makeMap(); - ConcurrentMap summationMap = new MapMaker().concurrencyLevel(2).makeMap(); + private Double typeDependenciesGrammaticalRelation + (Collection allTypedDependencies1, Collection allTypedDependencies2, + Double score, ArrayList grammaticalMap1, + ArrayList grammaticalMap2, + ArrayList sentenceConstituencyParseList1, ArrayList sentenceConstituencyParseList2) { + ArrayList alltypeDepsSize1 = new ArrayList(); + ArrayList summationList = new ArrayList(); int relationApplicable1 = 0; int relationApplicable2 = 0; int grammaticalRelation1 = 0; @@ -236,19 +710,27 @@ public class SentimentAnalyzerTest implements Callable { for (TypedDependency TDY1 : allTypedDependencies1) { IndexedWord dep = TDY1.dep(); IndexedWord gov = TDY1.gov(); - for (GrammaticalStructure gs : grammaticalMap1.values()) { + for (GrammaticalStructure gs : grammaticalMap1) { GrammaticalRelation grammaticalRelation = gs.getGrammaticalRelation(gov, dep); - for (Tree sentenceConstituencyParse2 : sentenceConstituencyParseList2.values()) { - if (grammaticalRelation.isApplicable(sentenceConstituencyParse2) && !treeCollectionGramatical.contains(sentenceConstituencyParse2)) { - score += 700; - grammaticalRelation1++; - treeCollectionGramatical.add(sentenceConstituencyParse2); - } - GrammaticalRelation reln = TDY1.reln(); - if (reln.isApplicable(sentenceConstituencyParse2) && !treeCollectionReln.contains(sentenceConstituencyParse2)) { - score += 525; - relationApplicable1++; - treeCollectionReln.add(sentenceConstituencyParse2); + for (Tree sentenceConstituencyParse2 : sentenceConstituencyParseList2) { + try { + if (sentenceConstituencyParse2 != null && !sentenceConstituencyParse2.isEmpty()) { + if (grammaticalRelation.isApplicable(sentenceConstituencyParse2) && !treeCollectionGramatical.contains(sentenceConstituencyParse2)) { + score += 701; + //logger.info("score post score += 701; :" + score); + grammaticalRelation1++; + treeCollectionGramatical.add(sentenceConstituencyParse2); + } + GrammaticalRelation reln = TDY1.reln(); + if (reln.isApplicable(sentenceConstituencyParse2) && !treeCollectionReln.contains(sentenceConstituencyParse2)) { + score += 528; + //logger.info("score post score += 528; :" + score); + relationApplicable1++; + treeCollectionReln.add(sentenceConstituencyParse2); + } + } + } catch (NoSuchElementException e) { + } } } @@ -258,26 +740,35 @@ public class SentimentAnalyzerTest implements Callable { for (TypedDependency TDY : allTypedDependencies2) { IndexedWord dep = TDY.dep(); IndexedWord gov = TDY.gov(); - for (GrammaticalStructure gs : grammaticalMap2.values()) { + for (GrammaticalStructure gs : grammaticalMap2) { GrammaticalRelation grammaticalRelation = gs.getGrammaticalRelation(gov, dep); - for (Tree sentenceConstituencyParse1 : sentenceConstituencyParseList1.values()) { - if (grammaticalRelation.isApplicable(sentenceConstituencyParse1) && !treeCollectionGramatical.contains(sentenceConstituencyParse1)) { - score += 700; - grammaticalRelation2++; - treeCollectionGramatical.add(sentenceConstituencyParse1); - } - GrammaticalRelation reln = TDY.reln(); - //sentenceConstituencyParse1 - if (reln.isApplicable(sentenceConstituencyParse1) && !treeCollectionReln.contains(sentenceConstituencyParse1)) { - score += 525; - relationApplicable2++; - treeCollectionReln.add(sentenceConstituencyParse1); + for (Tree sentenceConstituencyParse1 : sentenceConstituencyParseList1) { + try { + if (sentenceConstituencyParse1 != null && !sentenceConstituencyParse1.isEmpty()) { + if (grammaticalRelation.isApplicable(sentenceConstituencyParse1) && !treeCollectionGramatical.contains(sentenceConstituencyParse1)) { + score += 700; + //logger.info("score post score += 700; :" + score); + grammaticalRelation2++; + treeCollectionGramatical.add(sentenceConstituencyParse1); + } + GrammaticalRelation reln = TDY.reln(); + //sentenceConstituencyParse1 + if (reln.isApplicable(sentenceConstituencyParse1) && !treeCollectionReln.contains(sentenceConstituencyParse1)) { + score += 527; + //logger.info("score post score += 527; :" + score); + relationApplicable2++; + treeCollectionReln.add(sentenceConstituencyParse1); + } + } + } catch (NoSuchElementException r) { + } } } } if ((grammaticalRelation1 == 0 && grammaticalRelation2 > 4) || (grammaticalRelation2 == 0 && grammaticalRelation1 > 4)) { score -= 3450; + //logger.info("score post score -= 3450; :" + score); } if (!allTypedDependencies1.isEmpty() || !allTypedDependencies2.isEmpty()) { int allTypeDep1 = allTypedDependencies1.size(); @@ -286,71 +777,120 @@ public class SentimentAnalyzerTest implements Callable { if (allTypeDep1 > 0 && allTypeDep2 > 0 && (allTypeDep1 >= 5 || allTypeDep2 >= 5)) { if ((allTypeDep1 + 1 == allTypeDep2 || allTypeDep2 + 1 == allTypeDep1)) { score += allTypeDep1 > allTypeDep2 ? (allTypeDep1 - allTypeDep2) * 160 : (allTypeDep2 - allTypeDep1) * 160; + //logger.info("score post score += allTypeDep1 > allTypeDep2 ? (allTypeDep1 - allTypeDep2) * 160 : (allTypeDep2 - allTypeDep1) * 160; :" + score); } else if (allTypeDep1 * 2.5 >= allTypeDep2 && allTypeDep2 * 2.5 >= allTypeDep1) { score += allTypeDep1 > allTypeDep2 ? (allTypeDep1 - allTypeDep2) * 260 : (allTypeDep2 - allTypeDep1) * 260; + //logger.info("score post score += allTypeDep1 > allTypeDep2 ? (allTypeDep1 - allTypeDep2) * 260 : (allTypeDep2 - allTypeDep1) * 260; :" + score); } else if (allTypeDep1 >= 5 && allTypeDep1 <= 10 && allTypeDep2 >= 5 && allTypeDep2 <= 10) { score += allTypeDep1 > allTypeDep2 ? (allTypeDep1 - allTypeDep2) * 260 : (allTypeDep2 - allTypeDep1) * 260; + //logger.info("score post score += allTypeDep1 > allTypeDep2 ? (allTypeDep1 - allTypeDep2) * 260 : (allTypeDep2 - allTypeDep1) * 260; :" + score); } else { score -= allTypeDep1 > allTypeDep2 ? (allTypeDep1 - allTypeDep2) * 600 : (allTypeDep2 - allTypeDep1) * 600; + //logger.info("score post score -= allTypeDep1 > allTypeDep2 ? (allTypeDep1 - allTypeDep2) * 600 : (allTypeDep2 - allTypeDep1) * 600; :" + score); } - alltypeDepsSizeMap.put(alltypeDepsSizeMap.size() + 1, allTypeDep1); - alltypeDepsSizeMap.put(alltypeDepsSizeMap.size() + 1, allTypeDep2); + alltypeDepsSize1.add(allTypeDep1); + alltypeDepsSize1.add(allTypeDep2); } } if (allTypeDep1 >= 5 && allTypeDep2 >= 5) { int largerTypeDep = allTypeDep1 > allTypeDep2 ? allTypeDep1 : allTypeDep2; int smallerTypeDep = allTypeDep1 < allTypeDep2 ? allTypeDep1 : allTypeDep2; int summation = (largerTypeDep * largerTypeDep) - (smallerTypeDep * smallerTypeDep); + //logger.info("largerTypeDep: " + largerTypeDep); + //logger.info("smallerTypeDep: " + smallerTypeDep); + //logger.info("summation: " + summation); + if (summation >= 35 && summation < 45) { + score -= 4021; + //logger.info("score post score -= 4021;"); + } + if (largerTypeDep == 6 && smallerTypeDep == 6) { + score += 9340; + //logger.info("score post score += 9340;: " + score); + } if (summation / largerTypeDep < 15.0 && summation / largerTypeDep > 10.0 && smallerTypeDep * 2 > largerTypeDep - && !summationMap.values().contains(summation)) { + && !summationList.contains(summation)) { score += summation * 80; - summationMap.put(summationMap.size() + 1, summation); + //logger.info("score post score += summation * 80; :" + score); + summationList.add(summation); } else if (largerTypeDep == smallerTypeDep) { - score += 2500; + score += 2502; + //logger.info("score post score += 2502; :" + score); } } if (relationApplicable1 > 0 && relationApplicable2 > 0 && relationApplicable1 != relationApplicable2) { - score += 600; + score += 601; + //logger.info("score post score += 601; :" + score); } else if (allTypeDep1 * 5 < allTypeDep2 || allTypeDep2 * 5 < allTypeDep1) { score -= allTypeDep1 > allTypeDep2 ? (allTypeDep1 - allTypeDep2) * (allTypeDep2 * 450) : (allTypeDep2 - allTypeDep1) * (allTypeDep1 * 450); + //logger.info("score post score -= allTypeDep1 > allTypeDep2 ? (allTypeDep1 - allTypeDep2) * (allTypeDep2 * 450)\n" + + // " : (allTypeDep2 - allTypeDep1) * (allTypeDep1 * 450); :" + score); } if (relationApplicable1 > 1 && relationApplicable2 > 1 && relationApplicable1 * 3 > relationApplicable2 && relationApplicable2 * 3 > relationApplicable1) { score += relationApplicable1 > relationApplicable2 ? (relationApplicable1 - relationApplicable2) * 1500 : (relationApplicable2 - relationApplicable1) * 1500; + //logger.info("score post score += relationApplicable1 > relationApplicable2 ? (relationApplicable1 - relationApplicable2) * 1500\n" + + // " : (relationApplicable2 - relationApplicable1) * 1500; :" + score); } else if (relationApplicable1 * 5 < relationApplicable2 || relationApplicable2 * 5 < relationApplicable1) { score -= relationApplicable1 > relationApplicable2 ? (relationApplicable1 - relationApplicable2) * 500 : (relationApplicable2 - relationApplicable1) * 500; + //logger.info("score post score -= relationApplicable1 > relationApplicable2 ? (relationApplicable1 - relationApplicable2) * 500\n" + + // " : (relationApplicable2 - relationApplicable1) * 500; :" + score); } if (grammaticalRelation1 > 0 && grammaticalRelation2 > 0 && grammaticalRelation1 * 3 > grammaticalRelation2 && grammaticalRelation2 * 3 > grammaticalRelation1) { score += grammaticalRelation1 > grammaticalRelation2 ? (grammaticalRelation1 - grammaticalRelation2) * 1500 : (grammaticalRelation2 - grammaticalRelation1) * 1500; + //logger.info("score post score += grammaticalRelation1 > grammaticalRelation2 ? (grammaticalRelation1 - grammaticalRelation2) * 1500\n" + + //" : (grammaticalRelation2 - grammaticalRelation1) * 1500; :" + score); } else if (grammaticalRelation1 * 5 < grammaticalRelation2 || grammaticalRelation2 * 5 < grammaticalRelation1) { score -= grammaticalRelation1 > grammaticalRelation2 ? (grammaticalRelation1 - grammaticalRelation2) * 500 - : (grammaticalRelation2 - grammaticalRelation1) * 500; + : (grammaticalRelation2 - grammaticalRelation1) * 6500; + //logger.info("score post score -= grammaticalRelation1 > grammaticalRelation2 ? (grammaticalRelation1 - grammaticalRelation2) * 500\n" + + // " : (grammaticalRelation2 - grammaticalRelation1) * 500; :" + score); } } - ConcurrentMap filerTreeContent = new MapMaker().concurrencyLevel(2).makeMap(); - AtomicInteger runCount1 = new AtomicInteger(0); - for (Tree sentenceConstituencyParse1 : sentenceConstituencyParseList1.values()) { - for (Tree sentenceConstituencyParse2 : sentenceConstituencyParseList2.values()) { - sentenceConstituencyParse1.taggedLabeledYield().forEach((LBW) -> { - sentenceConstituencyParse2.taggedLabeledYield().stream().filter((LBW1) -> (LBW.lemma().equals(LBW1.lemma()) - && !filerTreeContent.values().contains(LBW.lemma()))).map((_item) -> { - filerTreeContent.put(filerTreeContent.size() + 1, LBW.lemma()); - return _item; - }).forEachOrdered((_item) -> { - runCount1.getAndIncrement(); - }); - }); + ArrayList filerTreeContent = new ArrayList(); + int runCount1 = 0; + for (Tree sentenceConstituencyParse1 : sentenceConstituencyParseList1) { + try { + if (sentenceConstituencyParse1 != null && !sentenceConstituencyParse1.isEmpty()) { + for (Tree sentenceConstituencyParse2 : sentenceConstituencyParseList2) { + try { + if (sentenceConstituencyParse2 != null && !sentenceConstituencyParse2.isEmpty()) { + for (CoreLabel LBW : sentenceConstituencyParse1.taggedLabeledYield()) { + for (CoreLabel LBW1 : sentenceConstituencyParse2.taggedLabeledYield()) { + if (LBW.lemma().equals(LBW1.lemma())) { + boolean found = false; + for (String str : filerTreeContent) { + if (str.equals(LBW.lemma())) { + found = true; + break; + } + } + if (!found) { + filerTreeContent.add(LBW.lemma()); + runCount1++; + } + } + } + } + } + } catch (NoSuchElementException e) { + + } + } + } + } catch (NoSuchElementException e) { + } } - score += runCount1.get() * 250; + score += runCount1 * 250; + //logger.info("score post score += runCount1 * 250; :" + score); int typeSizeSmallest = 100; int typeSizeLargest = 0; - for (Integer i : alltypeDepsSizeMap.values()) { + for (Integer i : alltypeDepsSize1) { if (i > typeSizeLargest) { typeSizeLargest = i; } @@ -360,10 +900,11 @@ public class SentimentAnalyzerTest implements Callable { } if (typeSizeLargest >= typeSizeSmallest * 3) { score -= typeSizeLargest * 160; + //logger.info("score post score -= typeSizeLargest * 160; :" + score); } typeSizeLargest = 0; typeSizeSmallest = 100; - for (int i : summationMap.values()) { + for (int i : summationList) { if (i > typeSizeLargest) { typeSizeLargest = i; } @@ -373,58 +914,63 @@ public class SentimentAnalyzerTest implements Callable { } if (typeSizeLargest >= typeSizeSmallest * 3) { score -= typeSizeLargest * 160; + //logger.info("score post score -= typeSizeLargest * 160; :" + score); } return score; } - private Double simpleRNNMatrixCalculations(Double score, ConcurrentMap simpleSMXlist1, ConcurrentMap simpleSMXlist2) { + private Double simpleRNNMatrixCalculations(Double score, ArrayList simpleSMXlist1, + ArrayList simpleSMXlist2) { List iteratedDoubleList = new ArrayList(); List iterateddotPredictions = new ArrayList(); double dotpredictionTransfer = 0.0; int iterationOverHeat = 0; double scoreFallback = score; - for (SimpleMatrix simpleSMX2 : simpleSMXlist2.values()) { - ConcurrentMap AccumulateDotMap = new MapMaker().concurrencyLevel(2).makeMap(); - ConcurrentMap subtractorMap = new MapMaker().concurrencyLevel(2).makeMap(); - ConcurrentMap dotPredictions = new MapMaker().concurrencyLevel(2).makeMap(); - ConcurrentMap DotOverTransfer = new MapMaker().concurrencyLevel(2).makeMap(); + for (SimpleMatrix simpleSMX2 : simpleSMXlist2) { + ArrayList AccumulateDotList = new ArrayList<>(); + ArrayList subtractorList = new ArrayList(); + ArrayList dotPredictions = new ArrayList(); + ArrayList DotOverTransfer = new ArrayList(); Double totalSubtraction = 0.0; Double largest = 10.0; Double shortest = 100.0; - for (SimpleMatrix simpleSMX1 : simpleSMXlist1.values()) { + for (SimpleMatrix simpleSMX1 : simpleSMXlist1) { double dotPrediction2 = simpleSMX2.dot(simpleSMX1) * 100; double dotPrediction1 = simpleSMX1.dot(simpleSMX2) * 100; - AccumulateDotMap.put(AccumulateDotMap.size() + 1, dotPrediction1); - AccumulateDotMap.put(AccumulateDotMap.size() + 1, dotPrediction2); + AccumulateDotList.add(dotPrediction1); + AccumulateDotList.add(dotPrediction2); double subtracter1 = dotPrediction1 > 50 ? dotPrediction1 - 100 : dotPrediction1 > 0 ? 100 - dotPrediction1 : 0; double subtracter2 = dotPrediction2 > 50 ? dotPrediction2 - 100 : dotPrediction2 > 0 ? 100 - dotPrediction2 : 0; - subtractorMap.put(subtractorMap.size() + 1, subtracter1); - subtractorMap.put(subtractorMap.size() + 1, subtracter2); + subtractorList.add(subtracter1); + subtractorList.add(subtracter2); dotpredictionTransfer = dotPrediction1; - if (!dotPredictions.values().contains(dotPrediction1)) { - for (Double transferDots : DotOverTransfer.values()) { + if (!dotPredictions.contains(dotPrediction1)) { + for (Double transferDots : DotOverTransfer) { if (transferDots == dotPrediction1) { totalSubtraction += transferDots; } else { score -= subtracter1 * 25; + //logger.info("score score -= subtracter1 * 25; : " + score); } } - DotOverTransfer.put(DotOverTransfer.size(), dotPrediction1); + DotOverTransfer.add(dotPrediction1); } else { subtracter1 -= 100; subtracter1 *= 25; score += subtracter1 * dotPrediction1; + //logger.info("score score += subtracter1 * dotPrediction1; : " + score); } - dotPredictions.put(dotPredictions.size() + 1, dotPrediction1); - if (!dotPredictions.values().contains(dotPrediction2)) { - for (Double transferDots : DotOverTransfer.values()) { + dotPredictions.add(dotPrediction1); + if (!dotPredictions.contains(dotPrediction2)) { + for (Double transferDots : DotOverTransfer) { if (transferDots == dotPrediction2) { totalSubtraction += transferDots; } else { score -= subtracter1 * 25; + //logger.info("score score -= subtracter1 * 25; : " + score); } } - DotOverTransfer.put(DotOverTransfer.size(), dotPrediction2); + DotOverTransfer.add(dotPrediction2); if (dotPrediction2 > largest) { largest = dotPrediction2; } @@ -434,97 +980,124 @@ public class SentimentAnalyzerTest implements Callable { Double dotPredictionIntervalDifference = largest - shortest; subtracter2 *= 25; if (dotPredictionIntervalDifference < 5.0) { - if (dotPredictions.values().size() > 0) { + if (dotPredictions.size() > 0) { if (subtracter2 > 0) { score -= subtracter2; + //logger.info("score score -= subtracter2; : " + score); } else { score += subtracter2; + //logger.info("score score += subtracter2; : " + score); } } } else { score -= subtracter2 / 10; + //logger.info("score score -= subtracter2 / 10; : " + score); } } else { if (subtracter2 > 55.0 && subtracter2 < 82.0) { if (dotPrediction2 > 30 && dotPrediction2 < 60) { if (dotpredictionTransfer != 0.0 && (subtracter2 / dotPrediction2 < 1.3 || (subtracter2 / dotPrediction2 > 1.9 && subtracter2 / dotPrediction2 < 1.99))) { + //logger.info("subtracter2: " + subtracter2); + //logger.info("dotpredictionTransfer: " + dotpredictionTransfer); + //logger.info("dotPrediction2: " + dotPrediction2); if (subtracter2 / dotPrediction2 < 1.248 && subtracter2 / dotPrediction2 > 1.238) { - score -= 2500; - } else { - score += 4500; + score -= 2502; + //logger.info("score score -= 2502; : " + score); + } else if (subtracter2 > 55.5) { + score -= 7530; + //logger.info("score score -= 7530; : " + score); } } else if (dotpredictionTransfer != 0.0 && subtracter2 / dotPrediction2 > 1.6 && subtracter2 / dotPrediction2 < 1.95) { if (subtracter2 > 61.1 && subtracter2 < 61.9 && dotPrediction2 > 37.5 && dotPrediction2 < 38.2) { score += 4500; + //logger.info("score score += 4500; : " + score); } else { score -= 4500; + //logger.info("score score -= 4500; : " + score); } } else if (!iterateddotPredictions.contains(dotPrediction2)) { - score -= subtracter2 * dotPrediction2; // += + score += subtracter2 * dotPrediction2; // += + //logger.info("score score += subtracter2 * dotPrediction2; : " + score); iterateddotPredictions.add(dotPrediction2); } else { score -= 550; + //logger.info("score score -= 550; : " + score); } } else if (dotPrediction2 < 25 && subtracter2 < 75) { score -= dotPrediction2 * 250; + //logger.info("score score -= dotPrediction2 * 250; : " + score); } else if (subtracter2 > 75.0) { if (dotPrediction2 > 20.0 && dotPrediction2 < 23.0) { score += 3500; + //logger.info("score score += 3500; : " + score); } else { if (subtracter2 < 75.9 && dotPrediction2 > 24.0 && dotPrediction2 < 24.9) { score += (subtracter2 * dotPrediction2) * 5; + //logger.info("score score += (subtracter2 * dotPrediction2) * 5; : " + score); } else { score -= 4500; + //logger.info("score score -= 4500; : " + score); } } } } } - dotPredictions.put(dotPredictions.size() + 1, dotPrediction2); + dotPredictions.add(dotPrediction2); iterationOverHeat++; } Double subTracPre = 0.0; - for (Double subtractors : subtractorMap.values()) { + for (Double subtractors : subtractorList) { if (Objects.equals(subTracPre, subtractors)) { if (subTracPre > 43.5 && subTracPre < 50.0) { - score += (subTracPre * 15) / subtractorMap.values().size(); + score += (subTracPre * 15) / subtractorList.size(); + //logger.info("score score += (subTracPre * 15) / subtractorList.size(); : " + score); } else if (subTracPre > 60.0 && subTracPre < 66.5) { - score += (subTracPre * 15) / subtractorMap.values().size(); + score += (subTracPre * 15) / subtractorList.size(); + //logger.info("score score += (subTracPre * 15) / subtractorList.size(); : " + score); } } else if (subTracPre > 75.0 && subTracPre < 90.0) { - score += (subTracPre * 50) / subtractorMap.values().size(); + score += (subTracPre * 50) / subtractorList.size(); + //logger.info("score score += (subTracPre * 50) / subtractorList.size(); : " + score); } else if (subTracPre >= 55.0) { score -= 2800; + //logger.info("score score -= 2800; : " + score); } else if (subTracPre < -25.0 && subTracPre > -45.0) { score += subTracPre * 100; + //logger.info("score score += subTracPre * 100; : " + score); } subTracPre = subtractors; } + //logger.info("totalSubtraction: " + totalSubtraction); if (totalSubtraction > 45.0) { score -= totalSubtraction * 25; + //logger.info("score score -= totalSubtraction * 25; : " + score); } else { score += totalSubtraction * 25; + //logger.info("score score += totalSubtraction * 25; : " + score); } Double preAccumulatorDot = 0.0; Double postAccumulatorDot = 0.0; - for (Double accumulators : AccumulateDotMap.values()) { + for (Double accumulators : AccumulateDotList) { if (Objects.equals(preAccumulatorDot, accumulators)) { if (Objects.equals(postAccumulatorDot, accumulators)) { score -= 1400; + //logger.info("score score -= 1400; : " + score); } postAccumulatorDot = accumulators; } preAccumulatorDot = accumulators; } subTracPre = 0.0; - for (Double subtractors : subtractorMap.values()) { + for (Double subtractors : subtractorList) { if (Objects.equals(subTracPre, subtractors) && subTracPre != 0.0) { if (!iteratedDoubleList.contains(subTracPre)) { score += 500; + //logger.info("score score += 500; : " + score); iteratedDoubleList.add(subTracPre); } else { score -= 150; + //logger.info("score score -= 150; : " + score); } } subTracPre = subtractors; @@ -532,21 +1105,23 @@ public class SentimentAnalyzerTest implements Callable { } if (iterationOverHeat > 12) { score = scoreFallback; - score -= 2500; + score -= 2501; + //logger.info("score score -= 2501; : " + score); } return score; } - private Double simpleRNNMaxtrixVectors(Double score, ConcurrentMap simpleSMXlistVector1, ConcurrentMap simpleSMXlistVector2) { - ConcurrentMap elementSumCounter = new MapMaker().concurrencyLevel(3).makeMap(); - ConcurrentMap dotMap = new MapMaker().concurrencyLevel(3).makeMap(); - ConcurrentMap elementSumMap = new MapMaker().concurrencyLevel(3).makeMap(); - ConcurrentMap dotSumMap = new MapMaker().concurrencyLevel(3).makeMap(); + private Double simpleRNNMaxtrixVectors(Double score, ArrayList simpleSMXlistVector1, + ArrayList simpleSMXlistVector2) { + ArrayList elementSumCounter = new ArrayList<>(); + ArrayList dotMap = new ArrayList<>(); + ArrayList elementSumMap = new ArrayList<>(); + ArrayList dotSumMap = new ArrayList<>(); Double preDot = 0.0; Double postDot = 0.0; - int iterateSize = simpleSMXlistVector1.values().size() + simpleSMXlistVector2.values().size(); - for (SimpleMatrix simpleSMX2 : simpleSMXlistVector2.values()) { - for (SimpleMatrix simpleSMX1 : simpleSMXlistVector1.values()) { + int iterateSize = simpleSMXlistVector1.size() + simpleSMXlistVector2.size(); + for (SimpleMatrix simpleSMX2 : simpleSMXlistVector2) { + for (SimpleMatrix simpleSMX1 : simpleSMXlistVector1) { double dot2 = simpleSMX2.dot(simpleSMX1); double elementSum2 = simpleSMX2.kron(simpleSMX1).elementSum(); double dot1 = simpleSMX1.dot(simpleSMX2); @@ -554,90 +1129,135 @@ public class SentimentAnalyzerTest implements Callable { if (preDot == dot2) { if (postDot == dot2) { score -= 500; + //logger.info("score score -= 500: " + score); } postDot = dot2; } if (preDot == dot1) { if (postDot == dot1) { score -= 500; + //logger.info("score score -= 500 1: " + score); } postDot = dot1; } preDot = dot1; elementSum1 = Math.round(elementSum1 * 100.0) / 100.0; - elementSumCounter.put(elementSumCounter.size() + 1, elementSum1); - dotMap.put(dotMap.size() + 1, dot1); + elementSumCounter.add(elementSum1); + dotMap.add(dot1); preDot = dot2; elementSum2 = Math.round(elementSum2 * 100.0) / 100.0; - elementSumCounter.put(elementSumCounter.size() + 1, elementSum2); - dotMap.put(dotMap.size() + 1, dot2); - if (!dotSumMap.values().contains(dot1)) { + elementSumCounter.add(elementSum2); + dotMap.add(dot2); + if (!dotSumMap.contains(dot1)) { if (dot1 < 0.1 && dot1 > 0.050) { score += 256; + //logger.info("score score += 256; " + score); } if (dot1 > 0.44 && dot1 < 0.60) { score -= 2400; + //logger.info("score score -= 2400; " + score); } else if (dot1 > 0.40 && dot1 < 0.445) { score += 3600; + //logger.info("score score += 3600; " + score); } - dotSumMap.put(dotSumMap.size() + 1, dot1); + dotSumMap.add(dot1); } else { score -= 50; + //logger.info("score score -= 50; " + score); } - if (!elementSumMap.values().contains(elementSum1)) { + if (!elementSumMap.contains(elementSum1)) { + //logger.info("elementSum1: " + elementSum1); + if (elementSum1 > 90.0 && elementSum1 < 95.0) { + score -= 7678; + //logger.info("score post score -= 7678;"); + } + if (elementSum1 > 80.0 && elementSum1 < 85.0) { + score -= 4521; + //logger.info("score post score -= 4521;: " + score); + } + if (elementSum1 > 125.0 && elementSum1 < 130.0) { + score += elementSum1 * 43; + //logger.info("score post score += elementSum1 * 43; : " + score); + } if (elementSum1 < 0.01 && elementSum1 > 0.00) { score += 1300; + //logger.info("score score += 1300; " + score); } else if (elementSum1 > 0.1 && elementSum1 < 1.0) { score += 1100; - } else { + //logger.info("score score += 1100; " + score); + } else if (elementSum1 > 1.0 && elementSum1 < 10.0) { score -= elementSum1 * 1024; + //logger.info("score score -= elementSum1 * 1024; " + score); + } else if (dot2 == elementSum2) { + score += 2501; + //logger.info("score score += 2501; " + score); } - elementSumMap.put(elementSumMap.size() + 1, elementSum1); + elementSumMap.add(elementSum1); } else { score -= 50; + //logger.info("score score -= 50; " + score); } - if (!dotSumMap.values().contains(dot2)) { + //logger.info("dot2: " + dot2); + if (!dotSumMap.contains(dot2)) { if (dot2 < 0.000) { score += dot2 * 500; + //logger.info("score score += dot2 * 500; " + score); } else if (dot2 < 0.1) { score += 256; + //logger.info("score score += 256; " + score); } if (dot2 > 0.50) { score -= 1200; + //logger.info("score score -= 1200; " + score); } - dotSumMap.put(dotSumMap.size() + 1, dot2); + dotSumMap.add(dot2); } else if (dot2 > 0.050 && dot2 < 0.10) { score -= 350; - } else { - score = score > 0 ? score - dot2 * 1200 : score + dot2 * 1200; + //logger.info("score score -= 350; " + score); + } else if (dot2 > 1.0 && dot2 < 10.0) { + score = score > 0 ? score - dot2 * 1200 : score + dot2 * 2200; + //logger.info("score score = score > 0 ? score - dot2 * 1200 : score + dot2 * 2200; " + score); + } else if (dot1 != elementSum1 && dot2 * 3 > elementSum1 && elementSum1 * 3 > dot2) { + score -= 6556; + //logger.info("score score -= 6556; " + score); } - if (!elementSumMap.values().contains(elementSum2)) { + if (!elementSumMap.contains(elementSum2)) { if (elementSum2 < 0.01 && elementSum2 > 0.00) { score += 3300; + //logger.info("score score += 3300; " + score); } else if (elementSum2 > 0.1 && elementSum2 < 0.2) { score += 1100; + //logger.info("score score += 1100; " + score); } else { score -= elementSum2 * 1024; + //logger.info("score score -= elementSum2 * 1024; " + score); } - elementSumMap.put(elementSumMap.size() + 1, elementSum2); + elementSumMap.add(elementSum2); } else if (elementSum2 > 0.050 && elementSum2 < 0.10) { score += 750; - } else { - score = score > 0 ? score - elementSum2 * 1200 : score + elementSum2 * 1200; + //logger.info("score score += 750; " + score); + } else if (elementSum2 > 1.0 && elementSum2 < 10.0) { + score -= 867 * elementSum2; + //logger.info("score score -= 867 * elementSum2; " + score); + } else if (dot2 != elementSum2 && dot2 * 3 < elementSum1 && elementSum1 * 3 < dot2) { + score -= 7501; + //logger.info("score post score -= 7501;" + score); } } } if (iterateSize > 5) { score -= iterateSize * 400; + //logger.info("score score -= iterateSize * 400; " + score); } score = elementsAndDotsRelation(score, dotMap, elementSumCounter); + //logger.info("score score = elementsAndDotsRelation(score, dotMap, elementSumCounter); " + score); return score; } - private Double elementsAndDotsRelation(Double score, ConcurrentMap dotMap, ConcurrentMap elementSumCounter) { - OptionalDouble minvalueDots = dotMap.values().stream().mapToDouble(Double::doubleValue).min(); - OptionalDouble maxvalueDots = dotMap.values().stream().mapToDouble(Double::doubleValue).max(); - double total = minvalueDots.getAsDouble() + maxvalueDots.getAsDouble(); + private Double elementsAndDotsRelation(Double + score, ArrayList dotMap, ArrayList elementSumCounter) { + OptionalDouble minvalueDots = dotMap.stream().mapToDouble(Double::doubleValue).min(); + OptionalDouble maxvalueDots = dotMap.stream().mapToDouble(Double::doubleValue).max(); boolean permitted = false; if (minvalueDots.getAsDouble() != maxvalueDots.getAsDouble()) { permitted = true; @@ -655,8 +1275,8 @@ public class SentimentAnalyzerTest implements Callable { score -= 3500; } } - OptionalDouble minvalueElements = elementSumCounter.values().stream().mapToDouble(Double::doubleValue).min(); - OptionalDouble maxvalueElements = elementSumCounter.values().stream().mapToDouble(Double::doubleValue).max(); + OptionalDouble minvalueElements = elementSumCounter.stream().mapToDouble(Double::doubleValue).min(); + OptionalDouble maxvalueElements = elementSumCounter.stream().mapToDouble(Double::doubleValue).max(); Double elementsVariance = maxvalueElements.getAsDouble() - minvalueElements.getAsDouble(); if (elementsVariance != 0.0) { if (elementsVariance <= 0.01 && maxvalueElements.getAsDouble() <= 0.02) { @@ -681,7 +1301,8 @@ public class SentimentAnalyzerTest implements Callable { return score; } - private Double sentimentMatrixVariances(Double score, int longest1, int longest2, int mainSentiment1, int mainSentiment2) { + private Double sentimentMatrixVariances(Double score, int longest1, int longest2, int mainSentiment1, + int mainSentiment2) { if (longest1 != longest2) { long deffLongest = longest1 > longest2 ? longest1 : longest2; long deffshorter = longest1 < longest2 ? longest1 : longest2; @@ -717,28 +1338,16 @@ public class SentimentAnalyzerTest implements Callable { return score; } - private final Map.Entry> classifyRawEvaluation(Double score, SentimentValueCache cacheSentimentLocal1, - SentimentValueCache cacheSentimentLocal2) { - if (cacheSentiment1 == null || cacheSentiment2 == null) { - DocumentReaderAndWriter readerAndWriter = classifier.makePlainTextReaderAndWriter(); - if (cacheSentiment1 == null) { - cacheSentimentLocal1.setClassifyRaw(classifier.classifyRaw(str, readerAndWriter)); - } - if (cacheSentiment2 == null) { - cacheSentimentLocal2.setClassifyRaw(classifier.classifyRaw(str1, readerAndWriter)); - } - } - final List classifyRaw1 = cacheSentiment1 == null ? cacheSentimentLocal1.getClassifyRaw() : cacheSentiment1.getClassifyRaw(); - final List classifyRaw2 = cacheSentiment2 == null ? cacheSentimentLocal2.getClassifyRaw() : cacheSentiment2.getClassifyRaw(); - score -= (classifyRaw1.size() > classifyRaw2.size() ? classifyRaw1.size() - classifyRaw2.size() : classifyRaw2.size() - classifyRaw1.size()) * 200; - Map.Entry< Double, Map.Entry> entry - = new AbstractMap.SimpleEntry(score, new AbstractMap.SimpleEntry(cacheSentimentLocal1, cacheSentimentLocal2)); - return entry; + private int classifyRawEvaluation() { + final List classifyRaw1 = this.listF; + final List classifyRaw2 = this.list1; + return (classifyRaw1.size() > classifyRaw2.size() ? classifyRaw1.size() - classifyRaw2.size() : classifyRaw2.size() - classifyRaw1.size()) * 200; } - private Double entryCountsRelation(Double score, SentimentValueCache cacheSentimentLocal1, SentimentValueCache cacheSentimentLocal2) { - int entry1 = cacheSentiment1 == null ? cacheSentimentLocal1.getEntryCounts().values().size() : cacheSentiment1.getEntryCounts().values().size(); - int entry2 = cacheSentiment2 == null ? cacheSentimentLocal2.getEntryCounts().values().size() : cacheSentiment2.getEntryCounts().values().size(); + private Double entryCountsRelation(Double + score, ArrayList cacheSentimentLocal1, ArrayList cacheSentimentLocal2) { + int entry1 = cacheSentimentLocal1.size(); + int entry2 = cacheSentimentLocal2.size(); if (entry1 > 0 && entry2 > 0) { if ((entry1 >= entry2 * 5) || (entry2 >= entry1 * 5)) { score -= entry1 > entry2 ? (entry1 - entry2) * 450 : (entry2 - entry1) * 450; @@ -747,7 +1356,7 @@ public class SentimentAnalyzerTest implements Callable { } else if ((entry1 >= entry2 * 2 || entry2 >= entry1 * 2) && entry1 * 2 >= entry2 && entry2 * 2 >= entry1) { score -= entry1 > entry2 ? (entry1 - entry2) * 450 : (entry2 - entry1) * 450; } else if (entry1 * 3 >= entry2 && entry2 * 3 >= entry1) { - score += entry1 > entry2 ? (entry1 - entry2) * 550 : (entry2 - entry1) * 550; + score -= entry1 > entry2 ? (entry1 - entry2) * 550 : (entry2 - entry1) * 550; } else if (entry1 > 10 && entry2 > 10 && entry1 * 2 > entry2 && entry2 * 2 > entry1) { score += entry1 > entry2 ? entry2 * 600 : entry1 * 600; } @@ -755,104 +1364,116 @@ public class SentimentAnalyzerTest implements Callable { return score; } - private SentimentValueCache GrammaticStructureSetup(SentimentValueCache cacheSentimentLocal, Annotation pipelineAnnotation) { - for (CoreMap sentence : pipelineAnnotation.get(CoreAnnotations.SentencesAnnotation.class)) { - Tree sentenceConstituencyParse = sentence.get(TreeCoreAnnotations.TreeAnnotation.class); - cacheSentimentLocal.addSentenceConstituencyParse(sentenceConstituencyParse); - GrammaticalStructure gs = gsf.newGrammaticalStructure(sentenceConstituencyParse); - cacheSentimentLocal.addTypedDependencies(gs.allTypedDependencies()); - cacheSentimentLocal.addGS(gs); + private ArrayList grammaticalStructureAllTypedDependencies( + ArrayList grammaticalStructures) { + ArrayList typedDependenciesArr = new ArrayList<>(); + for (GrammaticalStructure gs : grammaticalStructures) { + Collection typedDependencies = gs.allTypedDependencies(); + typedDependenciesArr.addAll(typedDependencies); } - return cacheSentimentLocal; + return typedDependenciesArr; } - private SentimentValueCache initializeCacheSetup(String str, SentimentValueCache cacheSentimentLocal) { - cacheSentimentLocal = new SentimentValueCache(str); - cacheSentimentLocal.setTaggedwords(getTaggedWordList(str)); - cacheSentimentLocal.setCounter(tokenizeCounting(cacheSentimentLocal.getTaggedwordlist())); - return cacheSentimentLocal; + private ArrayList grammaticalStructureSetup(ArrayList trees) { + ArrayList grammaticalStructures = new ArrayList(); + for (Tree tree : trees) { + try { + if (!tree.isEmpty()) { + GrammaticalStructure gs = gsf.newGrammaticalStructure(tree); + grammaticalStructures.add(gs); + } + } catch (NoSuchElementException e) { + + } + } + return grammaticalStructures; } - private SentimentValueCache sentimentCoreAnnotationSetup(Annotation pipelineAnnotationSentiment, SentimentValueCache cacheSentimentLocal) { - for (CoreMap sentence : pipelineAnnotationSentiment.get(CoreAnnotations.SentencesAnnotation.class)) { - Tree tree = sentence.get(SentimentCoreAnnotations.SentimentAnnotatedTree.class); + private ArrayList retrieveTrees(List sentences) { + ArrayList treeList = new ArrayList(); + for (CoreMap sentence : sentences) { + Tree sentenceConstituencyParse = sentence.get(TreeCoreAnnotations.TreeAnnotation.class); + treeList.add(sentenceConstituencyParse); + } + return treeList; + } + + private ArrayList sentimentRNNCorePredicted(List sentences, + Class sentimentAnnotatedTreeClass) { + ArrayList rnnCoreAnnotationsPrediction = new ArrayList<>(); + for (CoreMap sentence : sentences) { + Tree tree = sentence.get(sentimentAnnotatedTreeClass); + if (tree != null) { + SimpleMatrix predictions = RNNCoreAnnotations.getPredictions(tree); + rnnCoreAnnotationsPrediction.add(predictions); + } + } + return rnnCoreAnnotationsPrediction; + } + + private ArrayList sentimentRNNCoreNodevectors(List sentences, + Class sentimentAnnotatedTreeClass) { + ArrayList rnnCoreAnnotationsNodevectors = new ArrayList<>(); + for (CoreMap sentence : sentences) { + Tree tree = sentence.get(sentimentAnnotatedTreeClass); + if (tree != null) { + SimpleMatrix nodeVector = RNNCoreAnnotations.getNodeVector(tree); + rnnCoreAnnotationsNodevectors.add(nodeVector); + } + } + return rnnCoreAnnotationsNodevectors; + } + + private ArrayList sentimentRNNCoreAnnotations(List sentences, + Class sentimentAnnotatedTreeClass) { + ArrayList rnnCoreAnnotationsPredicted = new ArrayList<>(); + for (CoreMap sentence : sentences) { + Tree tree = sentence.get(sentimentAnnotatedTreeClass); if (tree != null) { int predictedClass = RNNCoreAnnotations.getPredictedClass(tree); - SimpleMatrix predictions = RNNCoreAnnotations.getPredictions(tree); - SimpleMatrix nodeVector = RNNCoreAnnotations.getNodeVector(tree); - cacheSentimentLocal.addRNNPredictClass(predictedClass); - cacheSentimentLocal.addSimpleMatrix(predictions); - cacheSentimentLocal.addSimpleMatrixVector(nodeVector); + rnnCoreAnnotationsPredicted.add(predictedClass); } } - return cacheSentimentLocal; + return rnnCoreAnnotationsPredicted; } - private SentimentValueCache setupMainSentimentandLongestVal(Annotation pipelineAnnotationSentiment, SentimentValueCache cacheSentimentLocal) { - for (CoreMap sentence : pipelineAnnotationSentiment.get(CoreAnnotations.SentencesAnnotation.class)) { - Tree tree = sentence.get(SentimentCoreAnnotations.SentimentAnnotatedTree.class); + private int setupMainSentiment(List sentences4, + Class sentimentAnnotatedTreeClass) { + int longest = 0; + int longestSentiment = 0; + for (CoreMap sentence : sentences4) { + Tree tree = sentence.get(sentimentAnnotatedTreeClass); int sentiment = RNNCoreAnnotations.getPredictedClass(tree); String partText = sentence.toString(); - //SimpleMatrix predictions = RNNCoreAnnotations.getPredictions(tree); - if (partText.length() > cacheSentimentLocal.getLongest()) { - cacheSentimentLocal.setMainSentiment(sentiment); - cacheSentimentLocal.setLongest(partText.length()); + if (partText.length() > longest) { + longestSentiment = sentiment; + longest = partText.length(); } } - return cacheSentimentLocal; + return longestSentiment; } - private SentimentValueCache jmweAnnotationSetup(Annotation jmweStrAnnotation, SentimentValueCache cacheSentimentLocal) { - List sentences = jmweStrAnnotation.get(CoreAnnotations.SentencesAnnotation.class); - Collection> tokeninflectionMap = new ArrayList(); - int tokenadder = 0; + private int setupMainLongest(List sentences) { + int longest = 0; for (CoreMap sentence : sentences) { - for (IMWE token : sentence.get(JMWEAnnotation.class)) { - if (token.isInflected()) { - cacheSentimentLocal.setInflectedCounterPositive(cacheSentimentLocal.getInflectedCounterPositive() + 1); - } else if (!tokeninflectionMap.contains(token)) { - cacheSentimentLocal.setInflectedCounterNegative(cacheSentimentLocal.getInflectedCounterNegative() + 1); - tokeninflectionMap.add(token); - } - cacheSentimentLocal.addstrTokenForm(token.getForm()); - cacheSentimentLocal.addstrTokenGetEntry(token.getEntry().toString().substring(token.getEntry().toString().length() - 1)); - Collection values = token.getPartMap().values(); - IMWEDesc entry = token.getEntry(); - cacheSentimentLocal.setMarkedContinuousCounter(cacheSentimentLocal.getMarkedContinuousCounter() + entry.getMarkedContinuous()); - cacheSentimentLocal.setUnmarkedPatternCounter(cacheSentimentLocal.getUnmarkedPatternCounter() + entry.getUnmarkedPattern()); - for (IMWEDesc.IPart iPart : values) { - cacheSentimentLocal.addstrTokenGetiPart(iPart.getForm()); - } - for (String strPostPrefix : entry.getPOS().getPrefixes()) { - cacheSentimentLocal.addstrTokenEntryPOS(strPostPrefix); - } - for (int counts : entry.getCounts()) { - cacheSentimentLocal.addEntryCounts(counts); - } - for (IToken tokens : token.getTokens()) { - cacheSentimentLocal.addITokenMapTag(tokens.getTag()); - for (String strtoken : tokens.getStems()) { - cacheSentimentLocal.addstrTokenStems(strtoken); - cacheSentimentLocal.setMarkedContiniousCounterEntries(cacheSentimentLocal.getMarkedContiniousCounterEntries() + 1); - } - } - tokenadder += 1; + String partText = sentence.toString(); + if (partText.length() > longest) { + longest = partText.length(); } - cacheSentimentLocal.setAnotatorcounter(cacheSentimentLocal.getAnotatorcounter() + 1); } - cacheSentimentLocal.setTokensCounter(tokenadder); - return cacheSentimentLocal; + return longest; } - private Double entryCountsScoring(Double score, SentimentValueCache cacheSentimentLocal1, SentimentValueCache cacheSentimentLocal2) { - ConcurrentMap countsMap = new MapMaker().concurrencyLevel(2).makeMap(); - int totalsize = cacheSentimentLocal1.getEntryCounts().values().size() + cacheSentimentLocal2.getEntryCounts().values().size(); - for (int counts : cacheSentimentLocal1.getEntryCounts().values()) { - for (int counts1 : cacheSentimentLocal2.getEntryCounts().values()) { + private Double entryCountsScoring(Double + score, ArrayList cacheSentimentLocal1, ArrayList cacheSentimentLocal2) { + ArrayList countsMap = new ArrayList(); + int totalsize = cacheSentimentLocal1.size() + cacheSentimentLocal2.size(); + for (int counts : cacheSentimentLocal1) { + for (int counts1 : cacheSentimentLocal2) { if (counts > 0 && counts1 > 0) { - if (counts == counts1 && !countsMap.values().contains(counts)) { + if (counts == counts1 && !countsMap.contains(counts)) { score += (counts * 250) / totalsize; - countsMap.put(countsMap.size() + 1, counts); + countsMap.add(counts); } else if (counts * 3 < counts1 || counts1 * 3 < counts) { score -= 600; } @@ -862,17 +1483,18 @@ public class SentimentAnalyzerTest implements Callable { return score; } - private Double tokenEntryPosScoring(Double score, SentimentValueCache cacheSentimentLocal1, SentimentValueCache cacheSentimentLocal2) { - if (cacheSentimentLocal1.getstrTokenEntryPOS().values().size() > 1 && cacheSentimentLocal2.getstrTokenEntryPOS().values().size() > 1) { - for (String strTokenPos1 : cacheSentimentLocal1.getstrTokenEntryPOS().values()) { - for (String strTokenPos2 : cacheSentimentLocal2.getstrTokenEntryPOS().values()) { + private Double tokenEntryPosScoring(Double + score, ArrayList cacheSentimentLocal1, ArrayList cacheSentimentLocal2) { + if (cacheSentimentLocal1.size() > 1 && cacheSentimentLocal2.size() > 1) { + for (String strTokenPos1 : cacheSentimentLocal1) { + for (String strTokenPos2 : cacheSentimentLocal2) { if (strTokenPos1.equals(strTokenPos2)) { score += 500; } } } - int posEntrySize1 = cacheSentimentLocal1.getstrTokenEntryPOS().values().size(); - int posEntrySize2 = cacheSentimentLocal2.getstrTokenEntryPOS().values().size(); + int posEntrySize1 = cacheSentimentLocal1.size(); + int posEntrySize2 = cacheSentimentLocal2.size(); if (posEntrySize1 * 3 > posEntrySize2 && posEntrySize2 * 3 > posEntrySize1) { score += posEntrySize1 > posEntrySize2 ? (posEntrySize1 - posEntrySize2) * 700 : (posEntrySize2 - posEntrySize1) * 700; } @@ -880,9 +1502,8 @@ public class SentimentAnalyzerTest implements Callable { return score; } - private Double unmarkedPatternCounterScoring(Double score, SentimentValueCache cacheSentimentLocal1, SentimentValueCache cacheSentimentLocal2) { - int UnmarkedPatternCounter1 = cacheSentimentLocal1.getUnmarkedPatternCounter(); - int UnmarkedPatternCounter2 = cacheSentimentLocal2.getUnmarkedPatternCounter(); + private Double unmarkedPatternCounterScoring(Double score, int UnmarkedPatternCounter1, + int UnmarkedPatternCounter2) { if (UnmarkedPatternCounter1 > 0 && UnmarkedPatternCounter2 > 0) { if (UnmarkedPatternCounter1 < 100 && UnmarkedPatternCounter2 < 100) { if (UnmarkedPatternCounter1 * 2 > UnmarkedPatternCounter2 && UnmarkedPatternCounter2 * 2 > UnmarkedPatternCounter1) { @@ -897,11 +1518,10 @@ public class SentimentAnalyzerTest implements Callable { return score; } - private Double markedContiniousCounterScoring(Double score, SentimentValueCache cacheSentimentLocal1, SentimentValueCache cacheSentimentLocal2) { - int MarkedContinuousCounter1 = cacheSentimentLocal1.getMarkedContinuousCounter(); - int MarkedContinuousCounter2 = cacheSentimentLocal2.getMarkedContinuousCounter(); - int MarkedContiniousCounter1Entries = cacheSentimentLocal1.getMarkedContiniousCounterEntries(); - int MarkedContiniousCounter2Entries = cacheSentimentLocal2.getMarkedContiniousCounterEntries(); + private Double markedContiniousCounterScoring(Double score, int MarkedContinuousCounter1, + int MarkedContinuousCounter2) { + int MarkedContiniousCounter1Entries = MarkedContinuousCounter1; + int MarkedContiniousCounter2Entries = MarkedContinuousCounter2; if (MarkedContinuousCounter1 > 0 && MarkedContinuousCounter2 > 0) { if (MarkedContinuousCounter1 > MarkedContinuousCounter2 * 50 || MarkedContinuousCounter2 > MarkedContinuousCounter1 * 50) { score -= MarkedContinuousCounter1 > MarkedContinuousCounter2 ? MarkedContinuousCounter1 * 120 : MarkedContinuousCounter2 * 120; @@ -929,19 +1549,20 @@ public class SentimentAnalyzerTest implements Callable { return score; } - private Double strTokensMapScoring(Double score, SentimentValueCache cacheSentimentLocal1, SentimentValueCache cacheSentimentLocal2) { - ConcurrentMap strtokensMap = new MapMaker().concurrencyLevel(2).makeMap(); - for (String strTokeniPart1 : cacheSentimentLocal1.getstrTokenGetiPart().values()) { - for (String strTokeniPart2 : cacheSentimentLocal2.getstrTokenGetiPart().values()) { - if (strTokeniPart1.equals(strTokeniPart2) && !strtokensMap.values().contains(strTokeniPart2)) { - strtokensMap.put(strtokensMap.size() + 1, strTokeniPart2); + private Double strTokensMapScoring(Double + score, ArrayList cacheSentimentLocal1, ArrayList cacheSentimentLocal2) { + ArrayList strtokensMap = new ArrayList(); + for (String strTokeniPart1 : cacheSentimentLocal1) { + for (String strTokeniPart2 : cacheSentimentLocal2) { + if (strTokeniPart1.equals(strTokeniPart2) && !strtokensMap.contains(strTokeniPart2)) { + strtokensMap.add(strTokeniPart2); score += 800; } } } - int tokenIPartSize1 = cacheSentimentLocal1.getstrTokenGetiPart().values().size(); - int tokenIPartSize2 = cacheSentimentLocal2.getstrTokenGetiPart().values().size(); - int strTokenMapSize = strtokensMap.values().size(); + int tokenIPartSize1 = cacheSentimentLocal1.size(); + int tokenIPartSize2 = cacheSentimentLocal2.size(); + int strTokenMapSize = strtokensMap.size(); if (tokenIPartSize1 * 2 > tokenIPartSize2 && tokenIPartSize2 * 2 > tokenIPartSize1) { score += tokenIPartSize1 > tokenIPartSize2 ? (tokenIPartSize1 - tokenIPartSize2) * 700 : (tokenIPartSize2 - tokenIPartSize1) * 700; score += strTokenMapSize * 600; @@ -951,46 +1572,67 @@ public class SentimentAnalyzerTest implements Callable { return score; } - private Double strTokenEntryScoring(Double score, SentimentValueCache cacheSentimentLocal1, SentimentValueCache cacheSentimentLocal2) { - int tokenEntry1 = cacheSentimentLocal1.getstrTokenGetEntry().values().size(); - int tokenEntry2 = cacheSentimentLocal2.getstrTokenGetEntry().values().size(); + private Double strTokenEntryScoring(Double score, ArrayList cacheSentimentLocal1, + ArrayList cacheSentimentLocal2) { + int tokenEntry1 = cacheSentimentLocal1.size(); + int tokenEntry2 = cacheSentimentLocal2.size(); boolean boundaryLeaks = false; int remnantCounter = 0; if (tokenEntry1 * 2 != tokenEntry2 && tokenEntry2 * 2 != tokenEntry1) { boundaryLeaks = true; } - ConcurrentMap entryTokenMap = new MapMaker().concurrencyLevel(2).makeMap(); - for (String strTokenEntry1 : cacheSentimentLocal1.getstrTokenGetEntry().values()) { - for (String strTokenEntry2 : cacheSentimentLocal2.getstrTokenGetEntry().values()) { - if (!entryTokenMap.values().contains(strTokenEntry2)) { + ArrayList entryTokenMap = new ArrayList(); + for (String strTokenEntry1 : cacheSentimentLocal1) { + for (String strTokenEntry2 : cacheSentimentLocal2) { + if (!entryTokenMap.contains(strTokenEntry2)) { if (strTokenEntry1.equals(strTokenEntry2)) { score += boundaryLeaks ? 2500 : 2500 / 2; + //logger.info("score post score += boundaryLeaks ? 2500 : 2500 / 2; : " + score); } else if (!boundaryLeaks) { score -= 450; + //logger.info("score post score -= 450;: " + score); } else { remnantCounter++; } } - entryTokenMap.put(entryTokenMap.size() + 1, strTokenEntry2); + entryTokenMap.add(strTokenEntry2); } } + //logger.info("tokenEntry2: " + tokenEntry2); + //logger.info("tokenEntry1: " + tokenEntry1); + //logger.info("remnantCounter: " + remnantCounter); + //logger.info("boundaryLeaks: " + boundaryLeaks); + if (tokenEntry1 == 1) { + score -= 9453; + //logger.info("score post score -= 9453;: " + score); + } + if (remnantCounter == 1) { + score -= 4083; + //logger.info("score post score -= 4083;: " + score); + } + if (tokenEntry2 == 2) { + score += 4563; + //logger.info("score score += 4563; " + score); + } score += remnantCounter * 250; + //logger.info("score post score += remnantCounter * 250;: " + score); return score; } - private Double strTokenMapTagsScoring(Double score, SentimentValueCache cacheSentimentLocal1, SentimentValueCache cacheSentimentLocal2) { - ConcurrentMap iTokenMapTagsMap = new MapMaker().concurrencyLevel(2).makeMap(); - for (String strmapTag : cacheSentimentLocal1.getITokenMapTag().values()) { - for (String strmapTag1 : cacheSentimentLocal2.getITokenMapTag().values()) { - if (strmapTag.equals(strmapTag1) && !iTokenMapTagsMap.values().contains(strmapTag1)) { + private Double strTokenMapTagsScoring(Double + score, ArrayList cacheSentimentLocal1, ArrayList cacheSentimentLocal2) { + ArrayList iTokenMapTagsMap = new ArrayList(); + for (String strmapTag : cacheSentimentLocal1) { + for (String strmapTag1 : cacheSentimentLocal2) { + if (strmapTag.equals(strmapTag1) && !iTokenMapTagsMap.contains(strmapTag1)) { score -= 1450; - iTokenMapTagsMap.put(iTokenMapTagsMap.size() + 1, strmapTag); + iTokenMapTagsMap.add(strmapTag); } } } - int mapTagsize1 = cacheSentimentLocal1.getITokenMapTag().values().size(); - int mapTagsize2 = cacheSentimentLocal2.getITokenMapTag().values().size(); - int tokenTagMapSize = iTokenMapTagsMap.values().size(); + int mapTagsize1 = cacheSentimentLocal1.size(); + int mapTagsize2 = cacheSentimentLocal2.size(); + int tokenTagMapSize = iTokenMapTagsMap.size(); if (mapTagsize1 != 0 && mapTagsize2 != 0) { if (mapTagsize1 * 2 > mapTagsize2 && mapTagsize2 * 2 > mapTagsize1) { score += mapTagsize1 > mapTagsize2 ? (mapTagsize1 - mapTagsize2) * 700 : (mapTagsize2 - mapTagsize1) * 700; @@ -1002,13 +1644,14 @@ public class SentimentAnalyzerTest implements Callable { return score; } - private Double tokenformSizeScoring(Double score, SentimentValueCache cacheSentimentLocal1, SentimentValueCache cacheSentimentLocal2) { - int tokenform1size = cacheSentimentLocal1.getstrTokenForm().values().size(); - int tokenform2size = cacheSentimentLocal2.getstrTokenForm().values().size(); + private Double tokenformSizeScoring(Double + score, ArrayList cacheSentimentLocal1, ArrayList cacheSentimentLocal2) { + int tokenform1size = cacheSentimentLocal1.size(); + int tokenform2size = cacheSentimentLocal2.size(); if (tokenform1size > 0 || tokenform2size > 0) { if (tokenform1size < tokenform2size * 5 && tokenform2size < tokenform1size * 5) { - for (String strTokenForm1itr1 : cacheSentimentLocal1.getstrTokenForm().values()) { - for (String strTokenForm1itr2 : cacheSentimentLocal2.getstrTokenForm().values()) { + for (String strTokenForm1itr1 : cacheSentimentLocal1) { + for (String strTokenForm1itr2 : cacheSentimentLocal2) { if (strTokenForm1itr1.equals(strTokenForm1itr2)) { score -= 1600; } else { @@ -1027,25 +1670,26 @@ public class SentimentAnalyzerTest implements Callable { return score; } - private Double tokenStemmingMapScoring(Double score, SentimentValueCache cacheSentimentLocal1, SentimentValueCache cacheSentimentLocal2) { - ConcurrentMap tokenStemmingMap = new MapMaker().concurrencyLevel(2).makeMap(); - for (String strTokenStem : cacheSentimentLocal1.getstrTokenStems().values()) { - for (String strTokenStem1 : cacheSentimentLocal2.getstrTokenStems().values()) { - if (strTokenStem.equals(strTokenStem1) && !tokenStemmingMap.values().contains(strTokenStem)) { + private Double tokenStemmingMapScoring(Double + score, ArrayList cacheSentimentLocal1, ArrayList cacheSentimentLocal2) { + ArrayList tokenStemmingMap = new ArrayList(); + for (String strTokenStem : cacheSentimentLocal1) { + for (String strTokenStem1 : cacheSentimentLocal2) { + if (strTokenStem.equals(strTokenStem1) && !tokenStemmingMap.contains(strTokenStem)) { score += 500; - tokenStemmingMap.put(tokenStemmingMap.size() + 1, strTokenStem); + tokenStemmingMap.add(strTokenStem); } } } return score; } - private Double inflectedCounterScoring(Double score, SentimentValueCache cacheSentimentLocal1, SentimentValueCache cacheSentimentLocal2) { - int inflectedCounterPositive1 = cacheSentimentLocal1.getInflectedCounterPositive(); - int inflectedCounterPositive2 = cacheSentimentLocal2.getInflectedCounterPositive(); - int inflectedCounterNegative = cacheSentimentLocal1.getInflectedCounterNegative() > cacheSentimentLocal2.getInflectedCounterNegative() - ? cacheSentimentLocal1.getInflectedCounterNegative() - cacheSentimentLocal2.getInflectedCounterNegative() - : cacheSentimentLocal2.getInflectedCounterNegative() - cacheSentimentLocal1.getInflectedCounterNegative(); + private Double inflectedCounterScoring(Double score, int inflectedCounterPositive1, + int inflectedCounterPositive2, + int inflectedCounterNegative1, int inflectedCounterNegative2) { + int inflectedCounterNegative = inflectedCounterNegative1 > inflectedCounterNegative1 + ? inflectedCounterNegative1 - inflectedCounterNegative2 + : inflectedCounterNegative2 - inflectedCounterNegative1; if ((inflectedCounterPositive1 + inflectedCounterPositive2) > inflectedCounterNegative && inflectedCounterNegative > 0) { score += ((inflectedCounterPositive1 + inflectedCounterPositive2) - inflectedCounterNegative) * 650; } @@ -1064,9 +1708,7 @@ public class SentimentAnalyzerTest implements Callable { return score; } - private Double annotatorCountScoring(Double score, SentimentValueCache cacheSentimentLocal1, SentimentValueCache cacheSentimentLocal2) { - int anotatorcounter1 = cacheSentimentLocal1.getAnotatorcounter(); - int anotatorcounter2 = cacheSentimentLocal2.getAnotatorcounter(); + private Double annotatorCountScoring(Double score, int anotatorcounter1, int anotatorcounter2) { if (anotatorcounter1 > 1 && anotatorcounter2 > 1) { if (anotatorcounter1 * 2 > anotatorcounter2 && anotatorcounter2 * 2 > anotatorcounter1) { score += anotatorcounter1 > anotatorcounter2 ? (anotatorcounter1 - anotatorcounter2) * 700 @@ -1078,67 +1720,58 @@ public class SentimentAnalyzerTest implements Callable { return score; } - private Double tokensCounterScoring(Double score, SentimentValueCache cacheSentimentLocal1, SentimentValueCache cacheSentimentLocal2) { - int tokensCounter1 = cacheSentimentLocal1.getTokensCounter(); - int tokensCounter2 = cacheSentimentLocal2.getTokensCounter(); + private Double tokensCounterScoring(Double score, int tokensCounter1, int tokensCounter2) { if ((tokensCounter1 > 1 && tokensCounter2 > 1) && tokensCounter1 < tokensCounter2 * 5 && tokensCounter2 < tokensCounter1 * 5) { if (tokensCounter1 > tokensCounter2 / 2 && tokensCounter2 > tokensCounter1 / 2 && tokensCounter1 < 10 && tokensCounter2 < 10) { score += (tokensCounter1 + tokensCounter2) * 500; + //logger.info("score post score += (tokensCounter1 + tokensCounter2) * 500;" + score); } else { - score -= 500; + score -= 503; + //logger.info("Score post score -= 503; :" + score); } } else { int elseint = tokensCounter1 >= tokensCounter2 ? (tokensCounter1 - tokensCounter2) * 500 : (tokensCounter2 - tokensCounter1) * 500; if ((tokensCounter1 > tokensCounter2 * 5 || tokensCounter2 > tokensCounter1 * 5) && tokensCounter1 > 0 && tokensCounter2 > 0) { score -= tokensCounter1 > tokensCounter2 ? (tokensCounter1 - tokensCounter2) * 500 : (tokensCounter2 - tokensCounter1) * 500; + //logger.info("Score post score -= tokensCounter1 > tokensCounter2 ? (tokensCounter1 - tokensCounter2) * 500 : (tokensCounter2 - tokensCounter1) * 500; :" + score); } else if (elseint > 0 && tokensCounter1 > 0 && tokensCounter2 > 0) { if (elseint > 500 && elseint < 2000) { score += elseint * 10; + //logger.info("Score post score += elseint * 10; :" + score); } else { score -= elseint * 15; + //logger.info("Score post score -= elseint * 15; :" + score); } } else { + //logger.info("tokensCounter1: " + tokensCounter1); + //logger.info("tokensCounter2 " + tokensCounter2); + //logger.info("elseint: " + elseint); + if (tokensCounter1 == 0 && tokensCounter2 == 1 && elseint == 500) { + score += 3012; + ////logger.info("score post : score += 3012;" + score); + } if (elseint > 0 && elseint < 1500 && (tokensCounter1 > 0 || tokensCounter2 > 0) && tokensCounter1 <= 2 && tokensCounter2 <= 2) { - score -= elseint * 1.5; + score += elseint * 1.5; + //logger.info("Score post score += elseint * 1.5; :" + score); + } else if (elseint == 0 && tokensCounter1 == 0 && tokensCounter2 == 0) { + score += 3064; + //logger.info("score post score += 3064; : " + score); } else if (tokensCounter1 < 5 && tokensCounter2 < 5) { score -= elseint * (tokensCounter1 + tokensCounter2); + //logger.info("Score post score -= elseint * (tokensCounter1 + tokensCounter2); :" + score); } } } return score; } - private SentimentValueCache setupNEREntitiesAndTokenTags(CoreDocument pipelineCoreDcoument, SentimentValueCache cacheSentimentLocal) { - for (CoreEntityMention em : pipelineCoreDcoument.entityMentions()) { - Set> entrySet = em.entityTypeConfidences().entrySet(); - String entityType = em.entityType(); - Double EntityConfidences = 0.0; - for (Map.Entry entries : entrySet) { - EntityConfidences = entries.getValue(); - } - List tokens = em.tokens(); - for (CoreLabel token : tokens) { - if (token != null) { - if (!cacheSentimentLocal.getnerEntityTokenTags().values().contains(token.tag())) { - if (entityType.equals("PERSON") && EntityConfidences > 0.80) { - cacheSentimentLocal.addnerEntityTokenTags(token.tag()); - } - } - } - } - if (!cacheSentimentLocal.getnerEntities1().values().contains(em.text())) { - cacheSentimentLocal.addNEREntities1(em.text()); - cacheSentimentLocal.addNEREntities2(em.entityType()); - } - } - return cacheSentimentLocal; - } - - private Double nerEntitiesAndTokenScoring(Double score, SentimentValueCache cacheSentimentLocal1, SentimentValueCache cacheSentimentLocal2) { + private Double nerEntitiesAndTokenScoring(Double score, ArrayList entityTokenTags1, + ArrayList entityTokenTags2, ArrayList nerEntities1, + ArrayList nerEntities2) { List entsCollection = new ArrayList(); - for (String strEnts1 : cacheSentimentLocal1.getnerEntities1().values()) { - for (String strEnts2 : cacheSentimentLocal2.getnerEntities2().values()) { + for (String strEnts1 : nerEntities1) { + for (String strEnts2 : nerEntities2) { if (strEnts1.equalsIgnoreCase(strEnts2) && !entsCollection.contains(strEnts1)) { score += 2500; entsCollection.add(strEnts1); @@ -1146,8 +1779,8 @@ public class SentimentAnalyzerTest implements Callable { } } entsCollection = new ArrayList(); - for (String strToken : cacheSentimentLocal1.getnerEntityTokenTags().values()) { - for (String strToken1 : cacheSentimentLocal2.getnerEntityTokenTags().values()) { + for (String strToken : entityTokenTags1) { + for (String strToken1 : entityTokenTags2) { if (strToken.equalsIgnoreCase(strToken1) && !entsCollection.contains(strToken)) { score += 2000; entsCollection.add(strToken); @@ -1157,43 +1790,50 @@ public class SentimentAnalyzerTest implements Callable { return score; } - private SentimentValueCache setupStoWordTokensLemma(Annotation pipelineAnnotationSentiment, SentimentValueCache cacheSentimentLocal) { - String customStopWordList = "start,starts,period,periods,a,an,and,are,as,at,be,but,by,for,if,in,into,is,it,no,not,of,on,or,such,that,the,their,then,there,these,they,this,to,was,will,with"; - List tokensSentiment = pipelineAnnotationSentiment.get(CoreAnnotations.TokensAnnotation.class); - Set stopWords = StopAnalyzer.ENGLISH_STOP_WORDS_SET; - Set stopWordsCustom = StopwordAnnotator.getStopWordList(customStopWordList, true); - for (CoreLabel token : tokensSentiment) { - Pair stopword = token.get(StopwordAnnotator.class); - String word = token.word().toLowerCase(); - if (stopWords.contains(word) || stopWordsCustom.contains(word)) { - cacheSentimentLocal.addstopwordTokens(word); - } - String lemma = token.lemma().toLowerCase(); - if (stopWords.contains(lemma) || stopWordsCustom.contains(lemma)) { - cacheSentimentLocal.addStopWordLemma(lemma); - } - if (stopword.first() && stopword.second()) { - cacheSentimentLocal.setPairCounter(cacheSentimentLocal.getPairCounter() + 1); - } - } - return cacheSentimentLocal; - } - - private Double stopWordTokenLemmaScoring(Double score, SentimentValueCache cacheSentimentLocal1, SentimentValueCache cacheSentimentLocal2) { + private Double stopWordTokenLemmaScoring(Double score, ArrayList stopWordToken1, + ArrayList stopWordToken2, ArrayList stopWordLemma1, + ArrayList stopWordLemma2) { Collection stopWordCollection = new ArrayList(); - for (String stopwords1 : cacheSentimentLocal1.getStopwordTokens().values()) { - for (String stopwords2 : cacheSentimentLocal2.getStopwordTokens().values()) { + //logger.info("stopWordToken1 size: " + stopWordToken1.size()); + //logger.info("stopWordToken2 size: " + stopWordToken2.size()); + //logger.info("stopWordLemma1 size: " + stopWordLemma1.size()); + //logger.info("stopWordLemma2 size: " + stopWordLemma2.size()); + if (stopWordLemma1.size() == 3 && stopWordLemma2.size() == 3 && stopWordToken1.size() == 2 + && stopWordToken2.size() == 2) { + score += 7392; + //logger.info("score post score += 7392;: " + score); + } + for (String stopwords1 : stopWordToken1) { + for (String stopwords2 : stopWordToken2) { if (stopwords1.equals(stopwords2) && !stopWordCollection.contains(stopwords1)) { - score -= 50; - stopWordCollection.add(stopwords1); + if (stopWordToken1.size() == stopWordToken2.size() && stopWordLemma1.size() == stopWordToken2.size() && + stopWordLemma1.size() == 1 && stopWordToken1.size() == 1) { + score += 4103; + //logger.info("score post score += 4103; : " + score); + } else if (stopWordLemma2.size() == 3 && stopWordLemma1.size() == 3 && stopWordToken1.size() == 3) { + score += 545; + //logger.info("score post score += 2345;: " + score); + } else if (stopWordLemma1.size() == 1 && stopWordToken1.size() == 0) { + score -= 8530; + //logger.info("score postscore -= 8530;: " + score); + stopWordCollection.add(stopwords1); + } else if (stopWordLemma1.size() == 4 || stopWordToken1.size() == 4 || stopWordLemma2.size() == 4 + || stopWordToken2.size() == 4) { + score -= 8654; + //logger.info("score post score -= 8654; :" + score); + } else if (stopWordLemma1.size() == 2 && stopWordToken1.size() == 2) { + score -= 1479; + //logger.info("score post score += 1479; : " + score); + } } } } stopWordCollection = new ArrayList(); - for (String stopwords1 : cacheSentimentLocal1.getStopWordLemma().values()) { - for (String stopwords2 : cacheSentimentLocal2.getStopWordLemma().values()) { + for (String stopwords1 : stopWordLemma1) { + for (String stopwords2 : stopWordLemma2) { if (stopwords1.equals(stopwords2) && !stopWordCollection.contains(stopwords1)) { - score -= 50; + score -= 51; + //logger.info("score post score -= 51; " + score); stopWordCollection.add(stopwords1); } } @@ -1201,307 +1841,616 @@ public class SentimentAnalyzerTest implements Callable { return score; } - private Double stopwordTokenPairCounterScoring(Double score, SentimentValueCache cacheSentimentLocal1, SentimentValueCache cacheSentimentLocal2) { - if (!cacheSentimentLocal1.getStopwordTokens().values().isEmpty() && !cacheSentimentLocal2.getStopwordTokens().values().isEmpty()) { - int stopwordsize1 = cacheSentimentLocal1.getStopwordTokens().values().size(); - int stopwordsize2 = cacheSentimentLocal2.getStopwordTokens().values().size(); + private Double stopwordTokenPairCounterScoring(Double score, ArrayList stopWordToken1, + ArrayList stopWordToken2, int pairCounter1, + int pairCounter2) { + if (!stopWordToken1.isEmpty() && !stopWordToken2.isEmpty()) { + int stopwordsize1 = stopWordToken1.size(); + int stopwordsize2 = stopWordToken2.size(); + //logger.info("stopwordsize1: " + stopwordsize1); + //logger.info("stopwordsize2: " + stopwordsize2); if (stopwordsize1 * 5 < stopwordsize2 || stopwordsize2 * 5 < stopwordsize1) { score -= stopwordsize1 > stopwordsize2 ? (stopwordsize1 - stopwordsize2) * 850 : (stopwordsize2 - stopwordsize1) * 850; - } else if (stopwordsize1 == stopwordsize2 && stopwordsize1 > 1) { + //logger.info("score post score -= stopwordsize1 > stopwordsize2 ? (stopwordsize1 - stopwordsize2) * 850 : (stopwordsize2 - stopwordsize1) * 850;: " + score); + } else if (stopwordsize1 == 2 && stopwordsize2 == 2) { + score -= 7312; + //logger.info("score post score -= 7312;: " + score); + } else if (stopwordsize1 == stopwordsize2 && stopwordsize1 > 5) { score -= stopwordsize1 * 450; + //logger.info("score post score -= stopwordsize1 * 450;: " + score); } else if ((stopwordsize1 / 2 == stopwordsize2 || stopwordsize2 / 2 == stopwordsize1) && stopwordsize1 + stopwordsize2 >= 4) { score -= 2500; - } else { - score += stopwordsize1 > stopwordsize2 ? (stopwordsize1 - stopwordsize2) * 850 : (stopwordsize2 - stopwordsize1) * 850; + //logger.info("score post score -= 2500;: " + score); + } else if (stopwordsize1 == 3 && stopwordsize2 == 3) { + score -= 513; + //logger.info("score post score -= 513;: " + score); } } - int pairCounter1 = cacheSentimentLocal1.getPairCounter(); - int pairCounter2 = cacheSentimentLocal2.getPairCounter(); if (pairCounter1 > 0 && pairCounter2 > 0) { + //logger.info("pairCounter1: " + pairCounter1); + //logger.info("pairCounter2: " + pairCounter2); + if (pairCounter1 == 1 && pairCounter2 == 1) { + score -= 2554; + //logger.info("score post score -= 2554;: " + score); + } if (pairCounter1 * 5 <= pairCounter2 || pairCounter2 * 5 <= pairCounter1) { score -= pairCounter1 > pairCounter2 ? (pairCounter1 - pairCounter2) * 1500 : (pairCounter2 - pairCounter1) * 1500; - } else if (pairCounter1 == pairCounter2 && pairCounter1 > 1) { + //logger.info("score post score -= pairCounter1 > pairCounter2 ? (pairCounter1 - pairCounter2) * 1500 : (pairCounter2 - pairCounter1) * 1500;: " + score); + } else if (pairCounter1 == pairCounter2 && pairCounter1 > 5) { score -= pairCounter1 * 450; + //logger.info("score post score -= pairCounter1 * 450; : " + score); } else if ((pairCounter1 / 2 == pairCounter2 || pairCounter2 / 2 == pairCounter1) && pairCounter1 + pairCounter2 >= 4) { score -= 2500; - } else { - score += pairCounter1 > pairCounter2 ? (pairCounter1 - pairCounter2) * 700 : (pairCounter2 - pairCounter1) * 700; + //logger.info("score post score -= 2500;: " + score); + } else if (pairCounter1 == 3 && pairCounter2 == 3) { + score += 512; + //logger.info("score post score += 512; " + score); } } return score; } - private Double tgwListScoreIncrementer(Double score, SentimentValueCache cacheSentimentLocal1, SentimentValueCache cacheSentimentLocal2) { - AtomicInteger runCount = new AtomicInteger(0); - cacheSentimentLocal1.getTgwlistIndex().values().forEach(TaggedWord -> { - if (!cacheSentimentLocal2.getTgwlistIndex().values().contains(TaggedWord)) { - cacheSentimentLocal2.addTgwlistIndex(TaggedWord); - runCount.getAndIncrement(); + private Double tgwListScoreIncrementer(Double score, ArrayList tgwListIndex1, + ArrayList tgwListIndex2) { + int runCount = 0; + for (String taggedWord : tgwListIndex1) { + boolean found = false; + for (String taggedWord1 : tgwListIndex2) { + if (taggedWord.equals(taggedWord1)) { + found = true; + break; + } } - }); - score += runCount.get() * 64; + if (!found) { + runCount++; + } + } + score += runCount * 64; return score; } - @Override - public final SimilarityMatrix call() { + private List> getIMWES(List coreMaps) { + List> tokenList = new ArrayList<>(); + for (CoreMap sentence : coreMaps) { + List> imwes = sentence.get(JMWEAnnotation.class); + tokenList.addAll(imwes); + } + return tokenList; + } + + private int getInflictedCounterPositive(List> imwesFLocal) { + int InflectedCounterPositive = 0; + for (IMWE token : imwesFLocal) { + if (token.isInflected()) { + InflectedCounterPositive++; + } + } + return InflectedCounterPositive; + } + + private int getUnmarkedPatterns(List> imwesFLocal) { + int unmarked = 0; + for (IMWE token : imwesFLocal) { + IMWEDesc entry = token.getEntry(); + unmarked += entry.getUnmarkedPattern(); + } + return unmarked; + } + + private ArrayList gettokenForms(List> imwesFLocal) { + ArrayList arrs = new ArrayList<>(); + for (IMWE token : imwesFLocal) { + String form = token.getForm(); + arrs.add(form); + } + return arrs; + } + + private ArrayList getStrtokenEntryPos(List> imwesFLocal) { + ArrayList arrs = new ArrayList<>(); + for (IMWE token : imwesFLocal) { + IMWEDesc entry = token.getEntry(); + for (String strPostPrefix : entry.getPOS().getPrefixes()) { + arrs.add(strPostPrefix); + } + } + return arrs; + } + + private ArrayList getintTokenEntyCounts(List> imwesFLocal) { + ArrayList arrs = new ArrayList<>(); + for (IMWE token : imwesFLocal) { + IMWEDesc entry = token.getEntry(); + for (int counts : entry.getCounts()) { + arrs.add(counts); + } + } + return arrs; + } + + private ArrayList getITokenTags(List> imwesFLocal) { + ArrayList arrs = new ArrayList<>(); + for (IMWE token : imwesFLocal) { + for (IToken tokens : token.getTokens()) { + arrs.add(tokens.getTag()); + } + } + return arrs; + } + + private ArrayList getstrTokenStems(List> imwesFLocal) { + ArrayList arrs = new ArrayList<>(); + for (IMWE token : imwesFLocal) { + for (IToken tokens : token.getTokens()) { + for (String strtoken : tokens.getStems()) { + arrs.add(strtoken); + } + } + } + return arrs; + } + + private Integer getAnotatorcounter(List> imwesFLocal) { + return imwesFLocal.size(); + } + + private ArrayList getnerEntities(CoreDocument coreDocument) { + if (coreDocument == null || coreDocument.entityMentions() == null) { + return new ArrayList(); + } + ArrayList arrs = new ArrayList<>(); + for (CoreEntityMention em : coreDocument.entityMentions()) { + if (!arrs.contains(em.text())) { + arrs.add(em.text()); + } + } + return arrs; + } + + private ArrayList getnerEntitiesType(CoreDocument coreDocument) { + if (coreDocument == null || coreDocument.entityMentions() == null) { + return new ArrayList(); + } + ArrayList arrs = new ArrayList<>(); + for (CoreEntityMention em : coreDocument.entityMentions()) { + if (!arrs.contains(em.entityType())) { + arrs.add(em.entityType()); + } + } + return arrs; + } + + private Integer getPairCounter(Annotation pipelineAnnotationSentiment) { + int counter = 0; + List tokensSentiment = pipelineAnnotationSentiment. + get(CoreAnnotations.TokensAnnotation.class); + for (CoreLabel token : tokensSentiment) { + Pair stopword = token.get(StopwordAnnotator.class); + if (stopword.first() && stopword.second()) { + counter++; + } + } + return counter; + } + + private ArrayList getstopWordLemma(Annotation pipelineAnnotationSentiment) { + ArrayList arrs = new ArrayList<>(); + List tokensSentiment = pipelineAnnotationSentiment. + get(CoreAnnotations.TokensAnnotation.class); + String customStopWordList = "start,starts,period,periods,a,an,and,are,as,at,be,but,by,for," + + "if,in,into,is,it,no,not,of,on,or,such,that,the,their,then,there,these," + + "they,this,to,was,will,with"; + for (CoreLabel token : tokensSentiment) { + Set stopWords = StopAnalyzer.ENGLISH_STOP_WORDS_SET; + Set stopWordsCustom = StopwordAnnotator.getStopWordList(customStopWordList, true); + String lemma = token.lemma().toLowerCase(); + if (stopWords.contains(lemma) || stopWordsCustom.contains(lemma)) { + arrs.add(lemma); + } + } + return arrs; + } + + private ArrayList getstopWordToken(Annotation pipelineAnnotationSentiment) { + ArrayList arrs = new ArrayList<>(); + List tokensSentiment = pipelineAnnotationSentiment. + get(CoreAnnotations.TokensAnnotation.class); + String customStopWordList = "start,starts,period,periods,a,an,and,are,as,at,be,but,by,for," + + "if,in,into,is,it,no,not,of,on,or,such,that,the,their,then,there,these," + + "they,this,to,was,will,with"; + for (CoreLabel token : tokensSentiment) { + String word = token.word().toLowerCase(); + Set stopWords = StopAnalyzer.ENGLISH_STOP_WORDS_SET; + Set stopWordsCustom = StopwordAnnotator.getStopWordList(customStopWordList, true); + if (stopWords.contains(word) || stopWordsCustom.contains(word)) { + arrs.add(word); + } + } + return arrs; + } + + private ArrayList getentityTokenTags(CoreDocument coreDocument) { + if (coreDocument == null || coreDocument.entityMentions() == null) { + return new ArrayList(); + } + ArrayList arrs = new ArrayList<>(); + if (coreDocument != null) { + for (CoreEntityMention em : coreDocument.entityMentions()) { + List tokens = em.tokens(); + String entityType = em.entityType(); + Double EntityConfidences = 0.0; + Set> entrySet = em.entityTypeConfidences().entrySet(); + for (Map.Entry entries : entrySet) { + if (EntityConfidences < entries.getValue()) { + EntityConfidences = entries.getValue(); + } + } + for (CoreLabel token : tokens) { + if (token != null) { + if (!arrs.contains(token.tag())) { + if (entityType.equals("PERSON") && EntityConfidences > 0.80) { + arrs.add(token.tag()); + } + } + } + } + } + } + return arrs; + } + + private ArrayList getstrTokensIpartForm(List> imwesFLocal) { + ArrayList arrs = new ArrayList<>(); + for (IMWE token : imwesFLocal) { + Collection values = token.getPartMap().values(); + for (IMWEDesc.IPart iPart : values) { + String iPartForm = iPart.getForm(); + arrs.add(iPartForm); + } + } + return arrs; + } + + private int getMarkedCounter(List> imwesFLocal) { + int marked = 0; + for (IMWE token : imwesFLocal) { + IMWEDesc entry = token.getEntry(); + marked += entry.getMarkedContinuous(); + for (IToken tokens : token.getTokens()) { + marked += tokens.getStems().size(); + } + } + return marked; + } + + public int getInflictedCounterNegative(List> imwesFLocal) { + int InflectedCounterNegative = 0; + Collection> tokeninflectionMap = new ArrayList(); + for (IMWE token : imwesFLocal) { + if (!token.isInflected() && !tokeninflectionMap.contains(token)) { + InflectedCounterNegative++; + tokeninflectionMap.add(token); + } + } + return InflectedCounterNegative; + } + + public ArrayList getTokenEntries(List> imwesFLocal) { + ArrayList tokenStrList = new ArrayList<>(); + for (IMWE token : imwesFLocal) { + final String substring = token.getEntry().toString().substring(token.getEntry() + .toString().length() - 1); + tokenStrList.add(substring); + } + return tokenStrList; + } + + + public void validateStringCaches() { + Class sentimentAnnotatedTreeClass = + SentimentCoreAnnotations.SentimentAnnotatedTree.class; + + if (this.tokenizeCountingF == null) { + this.tokenizeCountingF = tokenizeCounting(getTaggedWordList(str1)); + } + if (this.tokenizeCounting == null) { + this.tokenizeCounting = tokenizeCounting(getTaggedWordList(str)); + } + if (this.taggedWordListF == null) { + this.taggedWordListF = getTaggedWordList(str); + } + if (this.taggedWordList1 == null) { + this.taggedWordList1 = getTaggedWordList(str1); + } + if (this.retrieveTGWListF == null) { + this.retrieveTGWListF = retrieveTGWListIndex(this.taggedWordListF); + } + if (this.retrieveTGWList1 == null) { + this.retrieveTGWList1 = retrieveTGWListIndex(this.taggedWordList1); + } + if (this.sentencesF == null) { + this.sentencesF = pipelineAnnotation1.get(CoreAnnotations.SentencesAnnotation.class); + } + if (this.sentences1 == null) { + this.sentences1 = pipelineAnnotation2.get(CoreAnnotations.SentencesAnnotation.class); + } + if (this.sentencesSentimentF == null) { + this.sentencesSentimentF = pipelineAnnotation1Sentiment.get(CoreAnnotations.SentencesAnnotation.class); + } + if (this.sentencesSentiment1 == null) { + this.sentencesSentiment1 = pipelineAnnotation2Sentiment.get(CoreAnnotations.SentencesAnnotation.class); + } + if (this.treesF == null) { + this.treesF = retrieveTrees(this.sentencesF); + } + if (this.trees1 == null) { + this.trees1 = retrieveTrees(this.sentences1); + } + if (this.grammaticalStructuresF == null) { + this.grammaticalStructuresF = grammaticalStructureSetup(this.treesF); + } + if (this.grammaticalStructures1 == null) { + this.grammaticalStructures1 = grammaticalStructureSetup(this.trees1); + } + if (this.typedDependenciesF == null) { + this.typedDependenciesF = grammaticalStructureAllTypedDependencies(this.grammaticalStructuresF); + } + if (this.typedDependencies1 == null) { + this.typedDependencies1 = grammaticalStructureAllTypedDependencies(this.grammaticalStructures1); + } + if (this.rnnCoreAnnotationsPredictedF == null) { + this.rnnCoreAnnotationsPredictedF = sentimentRNNCoreAnnotations(this.sentencesSentimentF, + sentimentAnnotatedTreeClass); + } + if (this.rnnCoreAnnotationsPredicted1 == null) { + this.rnnCoreAnnotationsPredicted1 = sentimentRNNCoreAnnotations(this.sentencesSentiment1, + sentimentAnnotatedTreeClass); + } + if (this.simpleMatricesF == null) { + this.simpleMatricesF = sentimentRNNCorePredicted(this.sentencesSentimentF, sentimentAnnotatedTreeClass); + } + if (this.simpleMatrices1 == null) { + this.simpleMatrices1 = sentimentRNNCorePredicted(this.sentencesSentiment1, sentimentAnnotatedTreeClass); + } + if (this.simpleMatricesNodevectorsF == null) { + this.simpleMatricesNodevectorsF = sentimentRNNCoreNodevectors(this.sentencesSentimentF, sentimentAnnotatedTreeClass); + } + if (this.simpleMatricesNodevectors1 == null) { + this.simpleMatricesNodevectors1 = sentimentRNNCoreNodevectors(this.sentencesSentiment1, sentimentAnnotatedTreeClass); + } + if (this.listF == null) { + DocumentReaderAndWriter readerAndWriter = classifier.makePlainTextReaderAndWriter(); + this.listF = classifier.classifyRaw(str, readerAndWriter); + } + if (this.list1 == null) { + DocumentReaderAndWriter readerAndWriter = classifier.makePlainTextReaderAndWriter(); + this.list1 = classifier.classifyRaw(str1, readerAndWriter); + } + if (this.longestF == null) { + this.longestF = setupMainLongest(this.sentencesSentimentF); + } + if (this.longest1 == null) { + this.longest1 = setupMainLongest(this.sentencesSentiment1); + } + if (this.sentimentLongestF == null) { + this.sentimentLongestF = setupMainSentiment(this.sentencesSentimentF, sentimentAnnotatedTreeClass); + } + if (this.sentimentLongest1 == null) { + this.sentimentLongest1 = setupMainSentiment(this.sentencesSentiment1, sentimentAnnotatedTreeClass); + } + if (this.imwesF == null) { + this.imwesF = getIMWES(this.coreMaps1); + } + if (this.imwes1 == null) { + this.imwes1 = getIMWES(this.coreMaps2); + } + if (this.InflectedCounterNegativeF == null) { + this.InflectedCounterNegativeF = getInflictedCounterNegative(this.imwesF); + } + if (this.InflectedCounterNegative1 == null) { + this.InflectedCounterNegative1 = getInflictedCounterNegative(this.imwes1); + } + if (this.InflectedCounterPositiveF == null) { + this.InflectedCounterPositiveF = getInflictedCounterPositive(this.imwesF); + } + if (this.InflectedCounterPositive1 == null) { + this.InflectedCounterPositive1 = getInflictedCounterPositive(this.imwes1); + } + if (this.tokenEntryF == null) { + this.tokenEntryF = getTokenEntries(this.imwesF); + } + if (this.tokenEntry1 == null) { + this.tokenEntry1 = getTokenEntries(this.imwes1); + } + if (this.MarkedContinuousCounterF == null) { + this.MarkedContinuousCounterF = getMarkedCounter(this.imwesF); + } + if (this.MarkedContinuousCounter1 == null) { + this.MarkedContinuousCounter1 = getMarkedCounter(this.imwes1); + } + if (this.UnmarkedPatternCounterF == null) { + this.UnmarkedPatternCounterF = getUnmarkedPatterns(this.imwesF); + } + if (this.UnmarkedPatternCounter1 == null) { + this.UnmarkedPatternCounter1 = getUnmarkedPatterns(this.imwes1); + } + if (this.strTokensIpartFormF == null) { + this.strTokensIpartFormF = getstrTokensIpartForm(this.imwesF); + } + if (this.strTokensIpartForm1 == null) { + this.strTokensIpartForm1 = getstrTokensIpartForm(this.imwes1); + } + if (this.tokenFormsF == null) { + this.tokenFormsF = gettokenForms(this.imwesF); + } + if (this.tokenForms1 == null) { + this.tokenForms1 = gettokenForms(this.imwes1); + } + if (this.strTokenEntryGetPOSF == null) { + this.strTokenEntryGetPOSF = getStrtokenEntryPos(this.imwesF); + } + if (this.strTokenEntryGetPOS1 == null) { + this.strTokenEntryGetPOS1 = getStrtokenEntryPos(this.imwes1); + } + if (this.intTokenEntyCountsF == null) { + this.intTokenEntyCountsF = getintTokenEntyCounts(this.imwesF); + } + if (this.intTokenEntyCounts1 == null) { + this.intTokenEntyCounts1 = getintTokenEntyCounts(this.imwes1); + } + if (this.ITokenTagsF == null) { + this.ITokenTagsF = getITokenTags(this.imwesF); + } + if (this.ITokenTags1 == null) { + this.ITokenTags1 = getITokenTags(this.imwes1); + } + if (this.strTokenStemsF == null) { + this.strTokenStemsF = getstrTokenStems(this.imwesF); + } + if (this.strTokenStems1 == null) { + this.strTokenStems1 = getstrTokenStems(this.imwes1); + } + if (this.AnotatorcounterF == null) { + this.AnotatorcounterF = getAnotatorcounter(this.imwesF); + } + if (this.Anotatorcounter1 == null) { + this.Anotatorcounter1 = getAnotatorcounter(this.imwes1); + } + if (this.TokensCounterF == null) { + this.TokensCounterF = getAnotatorcounter(this.imwesF); + } + if (this.TokensCounter1 == null) { + this.TokensCounter1 = getAnotatorcounter(this.imwes1); + } + if (this.entityTokenTagsF == null) { + this.entityTokenTagsF = getentityTokenTags(this.pipelineCoreDcoument1); + } + if (this.entityTokenTags1 == null) { + this.entityTokenTags1 = getentityTokenTags(this.pipelineCoreDcoument2); + } + if (this.nerEntitiesF == null) { + this.nerEntitiesF = getnerEntities(this.pipelineCoreDcoument1); + } + if (this.nerEntities1 == null) { + this.nerEntities1 = getnerEntities(this.pipelineCoreDcoument2); + } + if (this.nerEntitiesTypeF == null) { + this.nerEntitiesTypeF = getnerEntitiesType(this.pipelineCoreDcoument1); + } + if (this.nerEntitiesType1 == null) { + this.nerEntitiesType1 = getnerEntitiesType(this.pipelineCoreDcoument2); + } + if (this.stopWordTokenF == null) { + this.stopWordTokenF = getstopWordToken(this.pipelineAnnotation1Sentiment); + } + if (this.stopWordToken1 == null) { + this.stopWordToken1 = getstopWordToken(this.pipelineAnnotation2Sentiment); + } + if (this.stopWordLemmaF == null) { + this.stopWordLemmaF = getstopWordLemma(this.pipelineAnnotation1Sentiment); + } + if (this.stopWordLemma1 == null) { + this.stopWordLemma1 = getstopWordLemma(this.pipelineAnnotation2Sentiment); + } + if (this.PairCounterF == null) { + this.PairCounterF = getPairCounter(this.pipelineAnnotation1Sentiment); + } + if (this.PairCounter1 == null) { + this.PairCounter1 = getPairCounter(this.pipelineAnnotation2Sentiment); + } + } + + + public SimilarityMatrix callSMX() { Double score = -100.0; - SentimentValueCache cacheSentimentLocal1 = null; - SentimentValueCache cacheSentimentLocal2 = null; + try { + fh = new FileHandler("E:/stationær backup filer/Projects/ArtificialAutism/logs/autismlog.log"); + logger.addHandler(fh); + SimpleFormatter formatter = new SimpleFormatter(); + fh.setFormatter(formatter); + } catch (SecurityException e) { + e.printStackTrace(); + } catch (IOException e) { + e.printStackTrace(); + } + //logger.info("Sent1: " + str); + //logger.info("Sent2: " + str1); int counter1; int counter2; - try { - if (cacheSentiment1 == null) { - cacheSentimentLocal1 = initializeCacheSetup(str, cacheSentimentLocal1); - } - if (cacheSentiment2 == null) { - cacheSentimentLocal2 = initializeCacheSetup(str1, cacheSentimentLocal2); - } - } catch (Exception ex) { + validateStringCaches(); + counter1 = this.tokenizeCountingF; + counter2 = this.tokenizeCounting; + final int overValue = (counter1 >= counter2 ? counter1 - counter2 : counter2 - counter1) * 32; + score -= overValue; + //logger.info("score post overValue: " + score); + score = tgwListScoreIncrementer(score, this.retrieveTGWListF, this.retrieveTGWList1); + //logger.info("score post tgwListScoreIncrementer: " + score); + Class sentimentAnnotatedTreeClass = + SentimentCoreAnnotations.SentimentAnnotatedTree.class; - } - try { - counter1 = cacheSentiment1 == null ? cacheSentimentLocal1.getCounter() : cacheSentiment1.getCounter(); - counter2 = cacheSentiment2 == null ? cacheSentimentLocal2.getCounter() : cacheSentiment2.getCounter(); + score = iterateTrees(this.treesF, this.trees1, score); + //logger.info("\n\n \n \n \nscore post iterateTrees: " + score); + score = typeDependenciesGrammaticalRelation(this.typedDependenciesF, typedDependencies1, score, this.grammaticalStructuresF, this.grammaticalStructures1, + this.treesF, this.trees1); + //logger.info("score post typeDependenciesGrammaticalRelation: " + score); - final int overValue = (counter1 >= counter2 ? counter1 - counter2 : counter2 - counter1) * 32; - score -= overValue; - } catch (Exception ex) { + score = simpleRNNMatrixCalculations(score, this.simpleMatricesF, this.simpleMatrices1); + //logger.info("score post simpleRNNMatrixCalculations: " + score); + score = simpleRNNMaxtrixVectors(score, this.simpleMatricesNodevectorsF, this.simpleMatricesNodevectors1); + //logger.info("score post simpleRNNMaxtrixVectors: " + score); + Integer sentiment1 = this.rnnCoreAnnotationsPredictedF.size(); + Integer sentiment2 = this.rnnCoreAnnotationsPredicted1.size(); + score -= (sentiment1 > sentiment2 ? sentiment2 - sentiment1 : sentiment1 - sentiment2) * 500; + //logger.info("score post rnnCoreAnnotationsPredicted " + score); + score -= classifyRawEvaluation(); + //logger.info("score post classifyRawEvaluation " + score); + score = sentimentMatrixVariances(score, this.longestF, this.longest1, this.sentimentLongestF, this.sentimentLongest1); + //logger.info("score post sentimentMatrixVariances " + score); + score = entryCountsRelation(score, this.intTokenEntyCountsF, this.intTokenEntyCounts1); + //logger.info("score post entryCountsRelation " + score); + score = entryCountsScoring(score, this.intTokenEntyCountsF, this.intTokenEntyCounts1); + //logger.info("score post entryCountsScoring " + score); + score = tokenEntryPosScoring(score, this.strTokenEntryGetPOSF, this.strTokenEntryGetPOS1); + //logger.info("score post tokenEntryPosScoring " + score); + score = unmarkedPatternCounterScoring(score, this.UnmarkedPatternCounterF, + this.UnmarkedPatternCounter1); + //logger.info("score post unmarkedPatternCounterScoring: " + score); + score = markedContiniousCounterScoring(score, this.MarkedContinuousCounterF, + this.MarkedContinuousCounter1); + //logger.info("score post markedContiniousCounterScoring " + score); + score = strTokensMapScoring(score, this.strTokensIpartFormF, this.strTokensIpartForm1); + //logger.info("score post strTokensMapScoring " + score); + score = strTokenEntryScoring(score, this.tokenEntryF, this.tokenEntry1); + //logger.info("score post strTokenEntryScoring " + score); + score = strTokenMapTagsScoring(score, this.ITokenTagsF, this.ITokenTags1); + //logger.info("score post strTokenMapTagsScoring " + score); + score = tokenformSizeScoring(score, this.tokenFormsF, this.tokenForms1); + //logger.info("score post tokenformSizeScoring " + score); + score = tokenStemmingMapScoring(score, this.strTokenStemsF, this.strTokenStems1); + //logger.info("score post tokenStemmingMapScoring " + score); - } - try { - if (cacheSentiment1 == null) { - ConcurrentMap retrieveTGWListIndex = retrieveTGWListIndex(cacheSentimentLocal1.getTaggedwordlist()); - for (String str : retrieveTGWListIndex.values()) { - cacheSentimentLocal1.addTgwlistIndex(str); - } - } - if (cacheSentiment2 == null) { - ConcurrentMap retrieveTGWListIndex = retrieveTGWListIndex(cacheSentimentLocal2.getTaggedwordlist()); - for (String str : retrieveTGWListIndex.values()) { - cacheSentimentLocal2.addTgwlistIndex(str); - } - } - } catch (Exception ex) { - - } - try { - score = tgwListScoreIncrementer(score, cacheSentiment1 == null - ? cacheSentimentLocal1 : cacheSentiment1, cacheSentiment2 == null ? cacheSentimentLocal2 : cacheSentiment2); - if (cacheSentiment1 == null) { - cacheSentimentLocal1 = GrammaticStructureSetup(cacheSentimentLocal1, pipelineAnnotation1); - } - if (cacheSentiment2 == null) { - cacheSentimentLocal2 = GrammaticStructureSetup(cacheSentimentLocal2, pipelineAnnotation2); - } - } catch (Exception ex) { - - } - ConcurrentMap sentenceConstituencyParseList1 = null; - ConcurrentMap sentenceConstituencyParseList2 = null; - try { - sentenceConstituencyParseList2 = cacheSentiment2 == null - ? cacheSentimentLocal2.getSentenceConstituencyParseList() : cacheSentiment2.getSentenceConstituencyParseList(); - sentenceConstituencyParseList1 = cacheSentiment1 == null - ? cacheSentimentLocal1.getSentenceConstituencyParseList() : cacheSentiment1.getSentenceConstituencyParseList(); - score = iterateTrees(sentenceConstituencyParseList2, sentenceConstituencyParseList1, score); - } catch (Exception ex) { - - } - try { - Collection allTypedDependencies2 = cacheSentiment2 == null ? cacheSentimentLocal2.getAllTypedDependencies() - : cacheSentiment2.getAllTypedDependencies(); - Collection allTypedDependencies1 = cacheSentiment1 == null ? cacheSentimentLocal1.getAllTypedDependencies() - : cacheSentiment1.getAllTypedDependencies(); - - ConcurrentMap grammaticalMap1 = cacheSentiment1 == null ? cacheSentimentLocal1.getGs() : cacheSentiment1.getGs(); - ConcurrentMap grammaticalMap2 = cacheSentiment2 == null ? cacheSentimentLocal2.getGs() : cacheSentiment2.getGs(); - score = typeDependenciesGrammaticalRelation(allTypedDependencies1, allTypedDependencies2, score, grammaticalMap1, grammaticalMap2, - sentenceConstituencyParseList1, sentenceConstituencyParseList2); - } catch (Exception ex) { - - } - try { - if (cacheSentiment1 == null) { - cacheSentimentLocal1 = sentimentCoreAnnotationSetup(pipelineAnnotation1Sentiment, cacheSentimentLocal1); - } - if (cacheSentiment2 == null) { - cacheSentimentLocal2 = sentimentCoreAnnotationSetup(pipelineAnnotation2Sentiment, cacheSentimentLocal2); - } - } catch (Exception ex) { - - } - try { - final ConcurrentMap simpleSMXlist1 = cacheSentiment1 == null - ? cacheSentimentLocal1.getSimpleSMXlist() : cacheSentiment1.getSimpleSMXlist(); - final ConcurrentMap simpleSMXlist2 = cacheSentiment2 == null - ? cacheSentimentLocal2.getSimpleSMXlist() : cacheSentiment2.getSimpleSMXlist(); - final ConcurrentMap simpleSMXlistVector1 = cacheSentiment1 == null - ? cacheSentimentLocal1.getSimpleSMXlistVector() : cacheSentiment1.getSimpleSMXlistVector(); - final ConcurrentMap simpleSMXlistVector2 = cacheSentiment2 == null - ? cacheSentimentLocal2.getSimpleSMXlistVector() : cacheSentiment2.getSimpleSMXlistVector(); - score = simpleRNNMatrixCalculations(score, simpleSMXlist1, simpleSMXlist2); - score = simpleRNNMaxtrixVectors(score, simpleSMXlistVector1, simpleSMXlistVector2); - } catch (Exception ex) { - - } - try { - int sentiment1 = cacheSentiment1 == null ? cacheSentimentLocal1.getRnnPrediectClassMap().size() : cacheSentiment1.getRnnPrediectClassMap().size(); - int sentiment2 = cacheSentiment2 == null ? cacheSentimentLocal2.getRnnPrediectClassMap().size() : cacheSentiment2.getRnnPrediectClassMap().size(); - score -= (sentiment1 > sentiment2 ? sentiment1 - sentiment2 : sentiment2 - sentiment1) * 500; - Map.Entry> classifyRawEvaluationEntry = classifyRawEvaluation(score, cacheSentimentLocal1, - cacheSentimentLocal2); - score = classifyRawEvaluationEntry.getKey(); - if (cacheSentiment1 == null) { - cacheSentimentLocal1 = classifyRawEvaluationEntry.getValue().getKey(); - } - if (cacheSentiment2 == null) { - cacheSentimentLocal2 = classifyRawEvaluationEntry.getValue().getValue(); - } - } catch (Exception ex) { - - } - try { - if (cacheSentiment1 == null) { - cacheSentimentLocal1 = setupMainSentimentandLongestVal(pipelineAnnotation1Sentiment, cacheSentimentLocal1); - } - if (cacheSentiment2 == null) { - cacheSentimentLocal2 = setupMainSentimentandLongestVal(pipelineAnnotation2Sentiment, cacheSentimentLocal2); - } - score = sentimentMatrixVariances(score, cacheSentiment1 == null ? cacheSentimentLocal1.getLongest() : cacheSentiment1.getLongest(), - cacheSentiment2 == null ? cacheSentimentLocal2.getLongest() : cacheSentiment2.getLongest(), cacheSentiment1 == null - ? cacheSentimentLocal1.getMainSentiment() : cacheSentiment1.getMainSentiment(), cacheSentiment2 == null - ? cacheSentimentLocal2.getMainSentiment() : cacheSentiment2.getMainSentiment()); - } catch (Exception ex) { - - } - try { - if (cacheSentiment1 == null) { - cacheSentimentLocal1 = jmweAnnotationSetup(jmweStrAnnotation1, cacheSentimentLocal1); - } - if (cacheSentiment2 == null) { - cacheSentimentLocal2 = jmweAnnotationSetup(jmweStrAnnotation2, cacheSentimentLocal2); - } - } catch (Exception ex) { - - } - - SentimentValueCache scoringCache1 = cacheSentiment1 == null ? cacheSentimentLocal1 : cacheSentiment1; - SentimentValueCache scoringCache2 = cacheSentiment2 == null ? cacheSentimentLocal2 : cacheSentiment2; - try { - score = entryCountsRelation(score, scoringCache1, scoringCache2); - } catch (Exception ex) { - - } - try { - score = entryCountsScoring(score, scoringCache1, scoringCache2); - } catch (Exception ex) { - - } - try { - score = tokenEntryPosScoring(score, scoringCache1, scoringCache2); - } catch (Exception ex) { - - } - try { - score = unmarkedPatternCounterScoring(score, scoringCache1, scoringCache2); - } catch (Exception ex) { - - } - try { - score = markedContiniousCounterScoring(score, scoringCache1, scoringCache2); - } catch (Exception ex) { - - } - try { - score = strTokensMapScoring(score, scoringCache1, scoringCache2); - } catch (Exception ex) { - - } - try { - score = strTokenEntryScoring(score, scoringCache1, scoringCache2); - } catch (Exception ex) { - - } - try { - score = strTokenMapTagsScoring(score, scoringCache1, scoringCache2); - } catch (Exception ex) { - - } - try { - score = tokenformSizeScoring(score, scoringCache1, scoringCache2); - } catch (Exception ex) { - - } - try { - score = tokenStemmingMapScoring(score, scoringCache1, scoringCache2); - } catch (Exception ex) { - - } - try { - score = inflectedCounterScoring(score, scoringCache1, scoringCache2); - } catch (Exception ex) { - - } - try { - score = annotatorCountScoring(score, scoringCache1, scoringCache2); - } catch (Exception ex) { - - } - try { - score = tokensCounterScoring(score, scoringCache1, scoringCache2); - } catch (Exception ex) { - - } - try { - LevenshteinDistance leven = new LevenshteinDistance(str, str1); - double SentenceScoreDiff = leven.computeLevenshteinDistance(); - SentenceScoreDiff *= 15; - score -= SentenceScoreDiff; - } catch (Exception ex) { - - } - try { - if (cacheSentiment1 == null) { - cacheSentimentLocal1 = setupNEREntitiesAndTokenTags(pipelineCoreDcoument1, cacheSentimentLocal1); - } - if (cacheSentiment2 == null) { - cacheSentimentLocal2 = setupNEREntitiesAndTokenTags(pipelineCoreDcoument2, cacheSentimentLocal2); - } - score = nerEntitiesAndTokenScoring(score, cacheSentiment1 == null ? cacheSentimentLocal1 : cacheSentiment1, cacheSentiment2 == null - ? cacheSentimentLocal2 : cacheSentiment2); - } catch (Exception ex) { - - } - try { - if (cacheSentiment1 == null) { - cacheSentimentLocal1 = setupStoWordTokensLemma(pipelineAnnotation1Sentiment, cacheSentimentLocal1); - } - if (cacheSentiment2 == null) { - cacheSentimentLocal2 = setupStoWordTokensLemma(pipelineAnnotation2Sentiment, cacheSentimentLocal2); - } - score = stopWordTokenLemmaScoring(score, cacheSentiment1 == null ? cacheSentimentLocal1 : cacheSentiment1, cacheSentiment2 == null - ? cacheSentimentLocal2 : cacheSentiment2); - } catch (Exception ex) { - - } - try { - score = stopwordTokenPairCounterScoring(score, cacheSentiment1 == null ? cacheSentimentLocal1 : cacheSentiment1, cacheSentiment2 == null - ? cacheSentimentLocal2 : cacheSentiment2); - } catch (Exception ex) { - - } + score = inflectedCounterScoring(score, this.InflectedCounterPositiveF, this.InflectedCounterPositive1, + this.InflectedCounterNegativeF, this.InflectedCounterNegative1); + //logger.info("score post inflectedCounterScoring " + score); + score = annotatorCountScoring(score, this.AnotatorcounterF, this.Anotatorcounter1); + //logger.info("score post annotatorCountScoring " + score); + score = tokensCounterScoring(score, this.TokensCounterF, this.TokensCounter1); + //logger.info("score post tokensCounterScoring " + score); + LevenshteinDistance leven = new LevenshteinDistance(str, str1); + double SentenceScoreDiff = leven.computeLevenshteinDistance(); + SentenceScoreDiff *= 15; + score -= SentenceScoreDiff; + //logger.info("score post SentenceScoreDiff " + score); + score = nerEntitiesAndTokenScoring(score, this.entityTokenTagsF, this.entityTokenTags1, + this.nerEntitiesF, this.nerEntities1); + //logger.info("score post nerEntitiesAndTokenScoring " + score); + score = stopWordTokenLemmaScoring(score, this.stopWordTokenF, this.stopWordToken1, + this.stopWordLemmaF, this.stopWordLemma1); + //logger.info("score post stopWordTokenLemmaScoring " + score); + score = stopwordTokenPairCounterScoring(score, this.stopWordTokenF, this.stopWordToken1, + this.PairCounterF, this.PairCounter1); + //logger.info("score post stopwordTokenPairCounterScoring " + score); smxParam.setDistance(score); - try { - if (cacheSentiment1 == null) { - smxParam.setCacheValue1(cacheSentimentLocal1); - } - if (cacheSentiment2 == null) { - smxParam.setCacheValue2(cacheSentimentLocal2); - } - } catch (Exception ex) { - - } return smxParam; } } diff --git a/ArtificialAutism/src/main/java/FunctionLayer/StanfordParser/SentimentValueCache.java b/ArtificialAutism/src/main/java/FunctionLayer/StanfordParser/SentimentValueCache.java deleted file mode 100644 index 33455779..00000000 --- a/ArtificialAutism/src/main/java/FunctionLayer/StanfordParser/SentimentValueCache.java +++ /dev/null @@ -1,334 +0,0 @@ -/* - * To change this license header, choose License Headers in Project Properties. - * To change this template file, choose Tools | Templates - * and open the template in the editor. - */ -package FunctionLayer.StanfordParser; - -import com.google.common.collect.MapMaker; -import edu.stanford.nlp.ling.TaggedWord; -import edu.stanford.nlp.trees.GrammaticalStructure; -import edu.stanford.nlp.trees.Tree; -import edu.stanford.nlp.trees.TypedDependency; -import java.util.ArrayList; -import java.util.Collection; -import java.util.List; -import java.util.Map; -import java.util.concurrent.ConcurrentMap; -import org.ejml.simple.SimpleMatrix; - -/** - * - * @author install1 - */ -public class SentimentValueCache { - - private String sentence; - private int counter; - private List> taggedwordlist = new ArrayList(); - private final ConcurrentMap tgwlistIndex = new MapMaker().concurrencyLevel(2).makeMap(); - private final ConcurrentMap sentenceConstituencyParseList = new MapMaker().concurrencyLevel(2).makeMap(); - private final Collection allTypedDependencies = new ArrayList(); - private final ConcurrentMap gsMap = new MapMaker().concurrencyLevel(2).makeMap(); - private final ConcurrentMap simpleSMXlist = new MapMaker().concurrencyLevel(3).makeMap(); - private final ConcurrentMap simpleSMXlistVector = new MapMaker().concurrencyLevel(3).makeMap(); - private final ConcurrentMap rnnPredictClassMap = new MapMaker().concurrencyLevel(3).makeMap(); - private List classifyRaw; - private int mainSentiment = 0; - private int longest = 0; - private int tokensCounter = 0; - private int anotatorcounter = 0; - private int inflectedCounterPositive = 0; - private int inflectedCounterNegative = 0; - private int MarkedContinuousCounter = 0; - private int MarkedContiniousCounterEntries = 0; - private int UnmarkedPatternCounter = 0; - private int pairCounter = 0; - private final ConcurrentMap ITokenMapTag = new MapMaker().concurrencyLevel(2).makeMap(); - private final ConcurrentMap strTokenStems = new MapMaker().concurrencyLevel(2).makeMap(); - private final ConcurrentMap strTokenForm = new MapMaker().concurrencyLevel(2).makeMap(); - private final ConcurrentMap strTokenGetEntry = new MapMaker().concurrencyLevel(2).makeMap(); - private final ConcurrentMap strTokenGetiPart = new MapMaker().concurrencyLevel(2).makeMap(); - private final ConcurrentMap strTokenEntryPOS = new MapMaker().concurrencyLevel(2).makeMap(); - private final ConcurrentMap entryCounts = new MapMaker().concurrencyLevel(2).makeMap(); - private final ConcurrentMap nerEntities1 = new MapMaker().concurrencyLevel(2).makeMap(); - private final ConcurrentMap nerEntities2 = new MapMaker().concurrencyLevel(2).makeMap(); - private final ConcurrentMap nerEntityTokenTags = new MapMaker().concurrencyLevel(3).makeMap(); - private final ConcurrentMap stopwordTokens = new MapMaker().concurrencyLevel(2).makeMap(); - private final ConcurrentMap stopWordLemma = new MapMaker().concurrencyLevel(2).makeMap(); - - public int getPairCounter() { - return pairCounter; - } - - public void setPairCounter(int pairCounter) { - this.pairCounter = pairCounter; - } - - public void addStopWordLemma(String str) { - stopWordLemma.put(stopWordLemma.size(), str); - } - - public void addstopwordTokens(String str) { - stopwordTokens.put(stopwordTokens.size(), str); - } - - public ConcurrentMap getStopwordTokens() { - return stopwordTokens; - } - - public ConcurrentMap getStopWordLemma() { - return stopWordLemma; - } - - public void addnerEntityTokenTags(String str) { - nerEntityTokenTags.put(nerEntityTokenTags.size(), str); - } - - public ConcurrentMap getnerEntityTokenTags() { - return nerEntityTokenTags; - } - - public ConcurrentMap getnerEntities1() { - return nerEntities1; - } - - public ConcurrentMap getnerEntities2() { - return nerEntities2; - } - - public void addNEREntities1(String str) { - nerEntities1.put(nerEntities1.size(), str); - } - - public void addNEREntities2(String str) { - nerEntities2.put(nerEntities2.size(), str); - } - - public void setTaggedwords(List> twlist) { - taggedwordlist = twlist; - } - - public List> getTaggedwordlist() { - return taggedwordlist; - } - - public void addEntryCounts(int counts) { - entryCounts.put(entryCounts.size(), counts); - } - - public ConcurrentMap getEntryCounts() { - return entryCounts; - } - - public void addstrTokenEntryPOS(String str) { - strTokenEntryPOS.put(strTokenEntryPOS.size(), str); - } - - public ConcurrentMap getstrTokenEntryPOS() { - return strTokenEntryPOS; - } - - public void addstrTokenGetiPart(String str) { - strTokenGetiPart.put(strTokenGetiPart.size(), str); - } - - public ConcurrentMap getstrTokenGetiPart() { - return strTokenGetiPart; - } - - public ConcurrentMap getstrTokenGetEntry() { - return strTokenGetEntry; - } - - public void addstrTokenGetEntry(String str) { - strTokenGetEntry.put(strTokenGetEntry.size(), str); - } - - public ConcurrentMap getstrTokenForm() { - return strTokenForm; - } - - public void addstrTokenForm(String str) { - strTokenForm.put(strTokenForm.size(), str); - } - - public ConcurrentMap getstrTokenStems() { - return strTokenStems; - } - - public void addstrTokenStems(String str) { - strTokenStems.put(strTokenStems.size(), str); - } - - public ConcurrentMap getITokenMapTag() { - return ITokenMapTag; - } - - public void addITokenMapTag(String str) { - ITokenMapTag.put(ITokenMapTag.size(), str); - } - - public int getUnmarkedPatternCounter() { - return UnmarkedPatternCounter; - } - - public void setUnmarkedPatternCounter(int UnmarkedPatternCounter) { - this.UnmarkedPatternCounter = UnmarkedPatternCounter; - } - - public int getMarkedContiniousCounterEntries() { - return MarkedContiniousCounterEntries; - } - - public void setMarkedContiniousCounterEntries(int MarkedContiniousCounterEntries) { - this.MarkedContiniousCounterEntries = MarkedContiniousCounterEntries; - } - - public int getMarkedContinuousCounter() { - return MarkedContinuousCounter; - } - - public void setMarkedContinuousCounter(int MarkedContinuousCounter) { - this.MarkedContinuousCounter = MarkedContinuousCounter; - } - - public int getInflectedCounterNegative() { - return inflectedCounterNegative; - } - - public void setInflectedCounterNegative(int inflectedCounterNegative) { - this.inflectedCounterNegative = inflectedCounterNegative; - } - - public int getInflectedCounterPositive() { - return inflectedCounterPositive; - } - - public void setInflectedCounterPositive(int inflectedCounterPositive) { - this.inflectedCounterPositive = inflectedCounterPositive; - } - - public int getAnotatorcounter() { - return anotatorcounter; - } - - public void setAnotatorcounter(int anotatorcounter) { - this.anotatorcounter = anotatorcounter; - } - - public int getTokensCounter() { - return tokensCounter; - } - - public void setTokensCounter(int tokensCounter) { - this.tokensCounter = tokensCounter; - } - - public int getMainSentiment() { - return mainSentiment; - } - - public void setMainSentiment(int mainSentiment) { - this.mainSentiment = mainSentiment; - } - - public int getLongest() { - return longest; - } - - public void setLongest(int longest) { - this.longest = longest; - } - - public List getClassifyRaw() { - return classifyRaw; - } - - public void setClassifyRaw(List classifyRaw) { - this.classifyRaw = classifyRaw; - } - - public ConcurrentMap getRnnPrediectClassMap() { - return rnnPredictClassMap; - } - - public void addRNNPredictClass(int rnnPrediction) { - rnnPredictClassMap.put(rnnPredictClassMap.size(), rnnPrediction); - } - - public void addSimpleMatrix(SimpleMatrix SMX) { - simpleSMXlist.put(simpleSMXlist.size(), SMX); - } - - public void addSimpleMatrixVector(SimpleMatrix SMX) { - simpleSMXlistVector.put(simpleSMXlistVector.size(), SMX); - } - - public ConcurrentMap getGsMap() { - return gsMap; - } - - public ConcurrentMap getSimpleSMXlist() { - return simpleSMXlist; - } - - public ConcurrentMap getSimpleSMXlistVector() { - return simpleSMXlistVector; - } - - public ConcurrentMap getGs() { - return gsMap; - } - - public int getCounter() { - return counter; - } - - public void addGS(GrammaticalStructure gs) { - gsMap.put(gsMap.size(), gs); - } - - public Collection getAllTypedDependencies() { - return allTypedDependencies; - } - - public void addTypedDependencies(Collection TDPlist) { - for (TypedDependency TDP : TDPlist) { - allTypedDependencies.add(TDP); - } - } - - public ConcurrentMap getSentenceConstituencyParseList() { - return sentenceConstituencyParseList; - } - - public void addSentenceConstituencyParse(Tree tree) { - sentenceConstituencyParseList.put(sentenceConstituencyParseList.size(), tree); - } - - public void setCounter(int counter) { - counter = counter; - } - - public String getSentence() { - return sentence; - } - - public SentimentValueCache(String str, int counter) { - this.sentence = str; - this.counter = counter; - } - - public ConcurrentMap getTgwlistIndex() { - return tgwlistIndex; - } - - public void addTgwlistIndex(String str) { - tgwlistIndex.put(tgwlistIndex.size(), str); - } - - public SentimentValueCache(String str) { - this.sentence = str; - } -} diff --git a/ArtificialAutism/src/main/java/META-INF/MANIFEST.MF b/ArtificialAutism/src/main/java/META-INF/MANIFEST.MF new file mode 100644 index 00000000..3de0fd0a --- /dev/null +++ b/ArtificialAutism/src/main/java/META-INF/MANIFEST.MF @@ -0,0 +1,3 @@ +Manifest-Version: 1.0 +Main-Class: PresentationLayer.DiscordHandler + diff --git a/ArtificialAutism/src/main/java/PresentationLayer/DiscordHandler.java b/ArtificialAutism/src/main/java/PresentationLayer/DiscordHandler.java index 23d87a64..26179d8d 100644 --- a/ArtificialAutism/src/main/java/PresentationLayer/DiscordHandler.java +++ b/ArtificialAutism/src/main/java/PresentationLayer/DiscordHandler.java @@ -1,71 +1,111 @@ -/* - * To change this license header, choose License Headers in Project Properties. - * To change this template file, choose Tools | Templates - * and open the template in the editor. - - ps ax | grep EventNotfierDiscordBot-1.0 - kill $pid (number) - -nohup screen -d -m -S nonroot java -Xmx6048M -jar /home/javatests/ArtificialAutism-1.0.jar -nohup screen -d -m -S nonroot java -Xmx6800M -jar /home/javatests/ArtificialAutism-1.0.jar - -screen -ls (number1) -screen -X -S (number1) quit - */ package PresentationLayer; +import DataLayer.settings; import FunctionLayer.Datahandler; -import FunctionLayer.DoStuff; import FunctionLayer.PipelineJMWESingleton; import discord4j.core.DiscordClient; import discord4j.core.GatewayDiscordClient; -import java.io.IOException; -import java.sql.SQLException; -import java.util.Timer; -import java.util.TimerTask; -import java.util.logging.Level; -import java.util.logging.Logger; -import DataLayer.settings; -import discord4j.common.util.Snowflake; import discord4j.core.event.domain.message.MessageCreateEvent; -import java.math.BigInteger; +import edu.stanford.nlp.pipeline.StanfordCoreNLP; + +import java.io.IOException; +import java.io.UnsupportedEncodingException; +import java.net.*; +import java.sql.SQLException; +import java.util.ArrayList; /** - * * @author install1 */ public class DiscordHandler { - public static void main(String[] args) { - System.setProperty("java.util.concurrent.ForkJoinPool.common.parallelism", "15"); + + private static void receiveAndSendPacket(DatagramSocket serverSocket, InetAddress ipAddress, int port, + Datahandler datahandler, StanfordCoreNLP stanfordCoreNLP, StanfordCoreNLP stanfordCoreNLPSentiment) throws IOException { + byte[] receiveData = new byte[4096]; + DatagramPacket receivePacket = new DatagramPacket(receiveData, receiveData.length); try { - Datahandler.instance.initiateMYSQL(); - //nohup screen -d -m -S nonroot java -Xmx6900M -jar /home/javatests/ArtificialAutism-1.0.jar - //uncomment db fetch when ready, just keep the comment for future reference - System.out.println("finished initiating MYSQL"); - } catch (SQLException | IOException ex) { - Logger.getLogger(DiscordHandler.class.getName()).log(Level.SEVERE, null, ex); + serverSocket.receive(receivePacket); + } catch (IOException e) { + e.printStackTrace(); } + String sentence = new String(receivePacket.getData(), 0, + receivePacket.getLength()); + sentence = sentence.replace("clientmessage:", ""); + String ResponseMsg = datahandler.getResponseMsg(sentence, "", stanfordCoreNLP, stanfordCoreNLPSentiment, + true); + byte[] sendData = ResponseMsg.getBytes("UTF-8"); + int deliver_port = 0; + switch (port) { + case 48475: + deliver_port = 48470; + break; + case 48476: + deliver_port = 48471; + break; + case 48477: + deliver_port = 48472; + break; + case 48478: + deliver_port = 48473; + break; + } + DatagramPacket sendPacket = new DatagramPacket(sendData, sendData.length, ipAddress, deliver_port); + serverSocket.send(sendPacket); + } + + public static void handleUDPTraffic(int port, Datahandler datahandler, + StanfordCoreNLP stanfordCoreNLP, StanfordCoreNLP stanfordCoreNLPSentiment) { + + try (DatagramSocket serverSocket = new DatagramSocket(port)) { + + String hostIP = "195.154.53.196"; + if (port == 48477 || port == 48478) { + hostIP = "51.158.20.245"; + } + InetAddress ipAddress = InetAddress.getByName(hostIP);//used ip' + while (true) { + receiveAndSendPacket(serverSocket, ipAddress, port, datahandler, stanfordCoreNLP, stanfordCoreNLPSentiment); + } + } catch (SocketException | UnknownHostException e) { + e.printStackTrace(); + } catch (UnsupportedEncodingException e) { + e.printStackTrace(); + } catch (IOException e) { + e.printStackTrace(); + } + } + + public static void main(String[] args) throws IOException, SQLException { + Datahandler datahandler = new Datahandler(); + datahandler.initiateMYSQL(); + PipelineJMWESingleton.getINSTANCE(); - Datahandler.instance.instantiateAnnotationMapJMWE(); - Datahandler.instance.shiftReduceParserInitiate(); - Datahandler.instance.instantiateAnnotationMap(); + StanfordCoreNLP stanfordCoreNLP = datahandler.pipeLineSetUp(); + StanfordCoreNLP stanfordCoreNLPSentiment = datahandler.shiftReduceParserInitiate(); System.out.println("FINISHED ALL ANNOTATIONS"); - Datahandler.instance.addHLstatsMessages(); - Datahandler.instance.updateStringCache(); - //String token = "NTI5NzAxNTk5NjAyMjc4NDAx.Dw0vDg.7-aMjVWdQMYPl8qVNyvTCPS5F_A"; + datahandler.updateStringCache(); + System.out.println("updatedstring cache"); String token = new settings().getDiscordToken(); final DiscordClient client = DiscordClient.create(token); final GatewayDiscordClient gateway = client.login().block(); String usernameBot = gateway.getSelf().block().getUsername(); - new Thread(() -> { - Datahandler.instance.update_autismo_socket_msg(); - }).start(); + int autismbotCount = 4; + //make sure not to use ports that are already occupied. + for (int i = 0; i < autismbotCount; i++) { + final int j = i; + new Thread(() -> { + ArrayList ports = new ArrayList(); + ports.add(48475); + ports.add(48476); + ports.add(48477); + ports.add(48478); + handleUDPTraffic(ports.get(j), datahandler, stanfordCoreNLP, stanfordCoreNLPSentiment); + }).start(); + } gateway.on(MessageCreateEvent.class).subscribe(event -> { - if (!FunctionLayer.DoStuff.isOccupied()) { - FunctionLayer.DoStuff.doStuff(event, usernameBot); - } + FunctionLayer.DoStuff.doStuff(event, usernameBot, datahandler, stanfordCoreNLP, stanfordCoreNLPSentiment); }); gateway.onDisconnect().block(); - } + } //3.1.1 discord4j version }