had accidently uploaded wrong version before
This commit is contained in:
		
							parent
							
								
									4c205f49d5
								
							
						
					
					
						commit
						e9367f62fa
					
				@ -5,69 +5,41 @@
 | 
			
		||||
 */
 | 
			
		||||
package DataLayer;
 | 
			
		||||
 | 
			
		||||
import FunctionLayer.SimilarityMatrix;
 | 
			
		||||
import FunctionLayer.CustomError;
 | 
			
		||||
import com.google.common.collect.MapMaker;
 | 
			
		||||
import org.jetbrains.annotations.NotNull;
 | 
			
		||||
 | 
			
		||||
import java.sql.Connection;
 | 
			
		||||
import java.sql.PreparedStatement;
 | 
			
		||||
import java.sql.ResultSet;
 | 
			
		||||
import java.sql.SQLException;
 | 
			
		||||
import java.sql.Statement;
 | 
			
		||||
import java.util.ArrayList;
 | 
			
		||||
import java.util.HashMap;
 | 
			
		||||
import java.util.LinkedHashMap;
 | 
			
		||||
import java.util.List;
 | 
			
		||||
import java.util.Map;
 | 
			
		||||
import java.util.concurrent.ConcurrentMap;
 | 
			
		||||
import java.util.*;
 | 
			
		||||
import java.util.logging.Level;
 | 
			
		||||
import java.util.logging.Logger;
 | 
			
		||||
 | 
			
		||||
/**
 | 
			
		||||
 *
 | 
			
		||||
 * @author install1
 | 
			
		||||
 */
 | 
			
		||||
public class DataMapper {
 | 
			
		||||
 | 
			
		||||
    public static void createTables() throws CustomError {
 | 
			
		||||
        Connection l_cCon = null;
 | 
			
		||||
        PreparedStatement l_pStatement = null;
 | 
			
		||||
        ResultSet l_rsSearch = null;
 | 
			
		||||
        try {
 | 
			
		||||
            l_cCon = DBCPDataSource.getConnection();
 | 
			
		||||
            String l_sSQL = "CREATE TABLE IF NOT EXISTS `ArtificialAutism`.`Sentences` (`Strings` text NOT NULL)";
 | 
			
		||||
            l_pStatement = l_cCon.prepareStatement(l_sSQL);
 | 
			
		||||
            l_pStatement.execute();
 | 
			
		||||
        } catch (SQLException ex) {
 | 
			
		||||
            throw new CustomError("failed in DataMapper  " + ex.getMessage());
 | 
			
		||||
        } finally {
 | 
			
		||||
            CloseConnections(l_pStatement, l_rsSearch, l_cCon);
 | 
			
		||||
        }
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    public static ConcurrentMap<Integer, String> getAllStrings() throws CustomError {
 | 
			
		||||
        ConcurrentMap<Integer, String> allStrings = new MapMaker().concurrencyLevel(2).makeMap();
 | 
			
		||||
    public static ArrayList<String> getAllStrings() throws SQLException {
 | 
			
		||||
        Connection l_cCon = null;
 | 
			
		||||
        PreparedStatement l_pStatement = null;
 | 
			
		||||
        ResultSet l_rsSearch = null;
 | 
			
		||||
        ArrayList<String> arrayListStr = new ArrayList();
 | 
			
		||||
        try {
 | 
			
		||||
            l_cCon = DBCPDataSource.getConnection();
 | 
			
		||||
            String l_sSQL = "SELECT * FROM `Sentences`";
 | 
			
		||||
            l_pStatement = l_cCon.prepareStatement(l_sSQL);
 | 
			
		||||
            l_rsSearch = l_pStatement.executeQuery();
 | 
			
		||||
            int ij = 0;
 | 
			
		||||
            while (l_rsSearch.next()) {
 | 
			
		||||
                allStrings.put(ij, l_rsSearch.getString(1));
 | 
			
		||||
                ij++;
 | 
			
		||||
                arrayListStr.add(l_rsSearch.getString(1));
 | 
			
		||||
            }
 | 
			
		||||
        } catch (SQLException ex) {
 | 
			
		||||
            throw new CustomError("failed in DataMapper  " + ex.getMessage());
 | 
			
		||||
        } finally {
 | 
			
		||||
            CloseConnections(l_pStatement, l_rsSearch, l_cCon);
 | 
			
		||||
        }
 | 
			
		||||
        return allStrings;
 | 
			
		||||
        return arrayListStr;
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    public static void InsertMYSQLStrings(ConcurrentMap<Integer, String> str) throws CustomError {
 | 
			
		||||
    public static void InsertMYSQLStrings(ArrayList<String> str) throws SQLException {
 | 
			
		||||
        Connection l_cCon = null;
 | 
			
		||||
        PreparedStatement l_pStatement = null;
 | 
			
		||||
        ResultSet l_rsSearch = null;
 | 
			
		||||
@ -75,35 +47,15 @@ public class DataMapper {
 | 
			
		||||
        try {
 | 
			
		||||
            l_cCon = DBCPDataSource.getConnection();
 | 
			
		||||
            l_pStatement = l_cCon.prepareStatement(l_sSQL);
 | 
			
		||||
            for (String str1 : str.values()) {
 | 
			
		||||
                //System.out.println("adding str1: " + str1 + "\n");
 | 
			
		||||
            for (String str1 : str) {
 | 
			
		||||
                l_pStatement.setString(1, str1);
 | 
			
		||||
                l_pStatement.addBatch();
 | 
			
		||||
                l_pStatement.execute();
 | 
			
		||||
            }
 | 
			
		||||
            l_pStatement.executeBatch();
 | 
			
		||||
        } catch (SQLException ex) {
 | 
			
		||||
            throw new CustomError("failed in DataMapper  " + ex.getMessage());
 | 
			
		||||
        } finally {
 | 
			
		||||
            CloseConnections(l_pStatement, l_rsSearch, l_cCon);
 | 
			
		||||
        }
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    public static ConcurrentMap<Integer, String> getHLstatsMessages() {
 | 
			
		||||
        ConcurrentMap<Integer, String> hlStatsMessages = new MapMaker().concurrencyLevel(2).makeMap();
 | 
			
		||||
        try (Connection l_cCon = DBCPDataSourceHLstats.getConnection()) {
 | 
			
		||||
            String l_sSQL = "SELECT message FROM `hlstats_Events_Chat`";
 | 
			
		||||
            try (PreparedStatement l_pStatement = l_cCon.prepareStatement(l_sSQL)) {
 | 
			
		||||
                try (ResultSet l_rsSearch = l_pStatement.executeQuery()) {
 | 
			
		||||
                    while (l_rsSearch.next()) {
 | 
			
		||||
                        hlStatsMessages.put(hlStatsMessages.size() + 1, l_rsSearch.getString(1));
 | 
			
		||||
                    }
 | 
			
		||||
                }
 | 
			
		||||
            }
 | 
			
		||||
        } catch (SQLException ex) {
 | 
			
		||||
            Logger.getLogger(DataMapper.class.getName()).log(Level.SEVERE, null, ex);
 | 
			
		||||
        }
 | 
			
		||||
        return hlStatsMessages;
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    public static void CloseConnections(PreparedStatement ps, ResultSet rs, Connection con) {
 | 
			
		||||
        if (rs != null) {
 | 
			
		||||
@ -128,4 +80,41 @@ public class DataMapper {
 | 
			
		||||
            }
 | 
			
		||||
        }
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    public static void checkStringsToDelete() {
 | 
			
		||||
        Connection l_cCon = null;
 | 
			
		||||
        PreparedStatement l_pStatement = null;
 | 
			
		||||
        ResultSet l_rsSearch = null;
 | 
			
		||||
        String l_sSQL = "delete from Sentences\n" +
 | 
			
		||||
                " where DATE(last_used) < DATE_SUB(CURDATE(), INTERVAL 32 DAY)\n" +
 | 
			
		||||
                "  order by last_used asc limit 3";
 | 
			
		||||
        try {
 | 
			
		||||
            l_cCon = DBCPDataSource.getConnection();
 | 
			
		||||
            l_pStatement = l_cCon.prepareStatement(l_sSQL);
 | 
			
		||||
            l_pStatement.execute();
 | 
			
		||||
        } catch (SQLException throwables) {
 | 
			
		||||
            throwables.printStackTrace();
 | 
			
		||||
        } finally {
 | 
			
		||||
            CloseConnections(l_pStatement, l_rsSearch, l_cCon);
 | 
			
		||||
        }
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    public static void updateLastUsed(@NotNull ArrayList<String> mysqlUpdateLastUsed) {
 | 
			
		||||
        Connection l_cCon = null;
 | 
			
		||||
        PreparedStatement l_pStatement = null;
 | 
			
		||||
        ResultSet l_rsSearch = null;
 | 
			
		||||
        String l_sSQL = "update Sentences Set last_used = now() where Strings = (?)";
 | 
			
		||||
        try {
 | 
			
		||||
            l_cCon = DBCPDataSource.getConnection();
 | 
			
		||||
            l_pStatement = l_cCon.prepareStatement(l_sSQL);
 | 
			
		||||
            for (String str1 : mysqlUpdateLastUsed) {
 | 
			
		||||
                l_pStatement.setString(1, str1);
 | 
			
		||||
                l_pStatement.execute();
 | 
			
		||||
            }
 | 
			
		||||
        } catch (SQLException throwables) {
 | 
			
		||||
            throwables.printStackTrace();
 | 
			
		||||
        } finally {
 | 
			
		||||
            CloseConnections(l_pStatement, l_rsSearch, l_cCon);
 | 
			
		||||
        }
 | 
			
		||||
    }
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
@ -1,17 +0,0 @@
 | 
			
		||||
/*
 | 
			
		||||
 * To change this license header, choose License Headers in Project Properties.
 | 
			
		||||
 * To change this template file, choose Tools | Templates
 | 
			
		||||
 * and open the template in the editor.
 | 
			
		||||
 */
 | 
			
		||||
package FunctionLayer;
 | 
			
		||||
 | 
			
		||||
/**
 | 
			
		||||
 *
 | 
			
		||||
 * @author install1
 | 
			
		||||
 */
 | 
			
		||||
public class CustomError extends Exception {
 | 
			
		||||
 | 
			
		||||
    public CustomError(String msg) {
 | 
			
		||||
        super(msg);
 | 
			
		||||
    }
 | 
			
		||||
}
 | 
			
		||||
@ -1,825 +0,0 @@
 | 
			
		||||
/*
 | 
			
		||||
 * To change this license header, choose License Headers in Project Properties.
 | 
			
		||||
 * To change this template file, choose Tools | Templates
 | 
			
		||||
 * and open the template in the editor.
 | 
			
		||||
 */
 | 
			
		||||
package FunctionLayer;
 | 
			
		||||
 | 
			
		||||
import DataLayer.DataMapper;
 | 
			
		||||
import FunctionLayer.StanfordParser.SentimentAnalyzerTest;
 | 
			
		||||
import FunctionLayer.StanfordParser.SentimentValueCache;
 | 
			
		||||
import com.google.common.base.Stopwatch;
 | 
			
		||||
import com.google.common.collect.MapMaker;
 | 
			
		||||
import edu.stanford.nlp.ie.AbstractSequenceClassifier;
 | 
			
		||||
import edu.stanford.nlp.ie.crf.CRFClassifier;
 | 
			
		||||
import edu.stanford.nlp.ling.CoreLabel;
 | 
			
		||||
import edu.stanford.nlp.parser.lexparser.LexicalizedParser;
 | 
			
		||||
import edu.stanford.nlp.pipeline.Annotation;
 | 
			
		||||
import edu.stanford.nlp.pipeline.CoreDocument;
 | 
			
		||||
import edu.stanford.nlp.pipeline.StanfordCoreNLP;
 | 
			
		||||
import edu.stanford.nlp.tagger.maxent.MaxentTagger;
 | 
			
		||||
import edu.stanford.nlp.trees.GrammaticalStructureFactory;
 | 
			
		||||
import edu.stanford.nlp.trees.TreebankLanguagePack;
 | 
			
		||||
import java.io.IOException;
 | 
			
		||||
import java.io.UnsupportedEncodingException;
 | 
			
		||||
import static java.lang.Math.random;
 | 
			
		||||
import java.net.DatagramPacket;
 | 
			
		||||
import java.net.DatagramSocket;
 | 
			
		||||
import java.net.InetAddress;
 | 
			
		||||
import java.net.SocketException;
 | 
			
		||||
import java.sql.SQLException;
 | 
			
		||||
import java.util.AbstractMap;
 | 
			
		||||
import java.util.ArrayList;
 | 
			
		||||
import java.util.Collection;
 | 
			
		||||
import java.util.Collections;
 | 
			
		||||
import java.util.LinkedHashMap;
 | 
			
		||||
import java.util.List;
 | 
			
		||||
import java.util.ListIterator;
 | 
			
		||||
import java.util.Map;
 | 
			
		||||
import java.util.Map.Entry;
 | 
			
		||||
import java.util.Properties;
 | 
			
		||||
import java.util.Set;
 | 
			
		||||
import java.util.concurrent.Callable;
 | 
			
		||||
import java.util.concurrent.CompletionService;
 | 
			
		||||
import java.util.concurrent.ConcurrentMap;
 | 
			
		||||
import java.util.concurrent.CountDownLatch;
 | 
			
		||||
import java.util.concurrent.ExecutionException;
 | 
			
		||||
import java.util.concurrent.ExecutorCompletionService;
 | 
			
		||||
import java.util.concurrent.ExecutorService;
 | 
			
		||||
import java.util.concurrent.Executors;
 | 
			
		||||
import java.util.concurrent.ForkJoinPool;
 | 
			
		||||
import java.util.concurrent.ForkJoinTask;
 | 
			
		||||
import java.util.concurrent.Future;
 | 
			
		||||
import java.util.concurrent.ThreadLocalRandom;
 | 
			
		||||
import java.util.concurrent.TimeUnit;
 | 
			
		||||
import java.util.concurrent.TimeoutException;
 | 
			
		||||
import java.util.function.Consumer;
 | 
			
		||||
import java.util.logging.Level;
 | 
			
		||||
import java.util.logging.Logger;
 | 
			
		||||
import java.util.stream.Collectors;
 | 
			
		||||
import java.util.stream.Stream;
 | 
			
		||||
 | 
			
		||||
/**
 | 
			
		||||
 *
 | 
			
		||||
 * @author install1
 | 
			
		||||
 */
 | 
			
		||||
public class Datahandler {
 | 
			
		||||
 | 
			
		||||
    public static final long EXPIRE_TIME_IN_SECONDS = TimeUnit.SECONDS.convert(10, TimeUnit.MINUTES);
 | 
			
		||||
    public static final long EXPIRE_TIME_IN_SECONDS1 = TimeUnit.SECONDS.convert(10, TimeUnit.HOURS);
 | 
			
		||||
    public static Datahandler instance = new Datahandler();
 | 
			
		||||
    private static Annotation strAnno;
 | 
			
		||||
    private static Annotation strAnnoSentiment;
 | 
			
		||||
    private static Annotation strAnnoJMWE;
 | 
			
		||||
    private static CoreDocument coreDoc;
 | 
			
		||||
    private static final ConcurrentMap<Integer, String> stringCache = new MapMaker().concurrencyLevel(6).makeMap();
 | 
			
		||||
    private static ConcurrentMap<String, Annotation> pipelineAnnotationCache;
 | 
			
		||||
    private static ConcurrentMap<String, Annotation> pipelineSentimentAnnotationCache;
 | 
			
		||||
    private static ConcurrentMap<String, Annotation> jmweAnnotationCache;
 | 
			
		||||
    private static ConcurrentMap<String, CoreDocument> coreDocumentAnnotationCache;
 | 
			
		||||
    private static ConcurrentMap<String, SentimentValueCache> sentimentCachingMap = new MapMaker().concurrencyLevel(6).makeMap();
 | 
			
		||||
    private LinkedHashMap<String, LinkedHashMap<String, Double>> lHMSMX = new LinkedHashMap();
 | 
			
		||||
    private final Stopwatch stopwatch;
 | 
			
		||||
    private static String similar = "";
 | 
			
		||||
    private static String shiftReduceParserPath = "edu/stanford/nlp/models/srparser/englishSR.ser.gz";
 | 
			
		||||
    private static String sentimentModel = "edu/stanford/nlp/models/sentiment/sentiment.ser.gz";
 | 
			
		||||
    private static String lexParserEnglishRNN = "edu/stanford/nlp/models/lexparser/englishRNN.ser.gz";
 | 
			
		||||
    private static String taggerPath = "edu/stanford/nlp/models/pos-tagger/english-left3words/english-left3words-distsim.tagger";
 | 
			
		||||
    private static String nerModel = "edu/stanford/nlp/models/ner/english.all.3class.caseless.distsim.crf.ser.gz";
 | 
			
		||||
    private static String nerModel2 = "edu/stanford/nlp/models/ner/english.conll.4class.caseless.distsim.crf.ser.gz";
 | 
			
		||||
    private static String nerModel3 = "edu/stanford/nlp/models/ner/english.muc.7class.caseless.distsim.crf.ser.gz";
 | 
			
		||||
    private static final String customStopWordList = "start,starts,period,periods,a,an,and,are,as,at,be,but,by,for,if,in,into,is,it,no,not,of,on,or,such,that,the,their,then,there,these,they,this,to,was,will,with";
 | 
			
		||||
    private static MaxentTagger tagger;
 | 
			
		||||
    private static String[] options = {"-maxLength", "100"};
 | 
			
		||||
    private static Properties props = new Properties();
 | 
			
		||||
    private static Properties propsSentiment = new Properties();
 | 
			
		||||
    private static GrammaticalStructureFactory gsf;
 | 
			
		||||
    private static LexicalizedParser lp;
 | 
			
		||||
    private static TreebankLanguagePack tlp;
 | 
			
		||||
    private static AbstractSequenceClassifier<CoreLabel> classifier;
 | 
			
		||||
    // set up Stanford CoreNLP pipeline
 | 
			
		||||
    private static final StanfordCoreNLP pipeline = getPipeLineSetUp();
 | 
			
		||||
    private static StanfordCoreNLP pipelineSentiment;
 | 
			
		||||
 | 
			
		||||
    public Datahandler() {
 | 
			
		||||
        this.stopwatch = Stopwatch.createUnstarted();
 | 
			
		||||
        this.jmweAnnotationCache = new MapMaker().concurrencyLevel(3).makeMap();
 | 
			
		||||
        this.pipelineAnnotationCache = new MapMaker().concurrencyLevel(4).makeMap();
 | 
			
		||||
        this.pipelineSentimentAnnotationCache = new MapMaker().concurrencyLevel(4).makeMap();
 | 
			
		||||
        this.coreDocumentAnnotationCache = new MapMaker().concurrencyLevel(5).makeMap();
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    public static StanfordCoreNLP getPipeline() {
 | 
			
		||||
        return pipeline;
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    private static StanfordCoreNLP getPipeLineSetUp() {
 | 
			
		||||
        props.setProperty("annotators", "tokenize,ssplit,pos,lemma,ner,parse");
 | 
			
		||||
        props.setProperty("parse.model", shiftReduceParserPath);
 | 
			
		||||
        props.setProperty("parse.maxlen", "90");
 | 
			
		||||
        props.setProperty("parse.binaryTrees", "true");
 | 
			
		||||
        props.setProperty("threads", "8");
 | 
			
		||||
        props.setProperty("pos.maxlen", "90");
 | 
			
		||||
        props.setProperty("tokenize.maxlen", "90");
 | 
			
		||||
        props.setProperty("ssplit.maxlen", "90");
 | 
			
		||||
        props.setProperty("lemma.maxlen", "90");
 | 
			
		||||
        props.setProperty("ner.model", nerModel + "," + nerModel2 + "," + nerModel3);
 | 
			
		||||
        props.setProperty("ner.combinationMode", "HIGH_RECALL");
 | 
			
		||||
        props.setProperty("regexner.ignorecase", "true");
 | 
			
		||||
        props.setProperty("ner.fine.regexner.ignorecase", "true");
 | 
			
		||||
        props.setProperty("tokenize.options", "untokenizable=firstDelete");
 | 
			
		||||
        return new StanfordCoreNLP(props);
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    public void shiftReduceParserInitiate() {
 | 
			
		||||
        //got 8 cores
 | 
			
		||||
        CountDownLatch cdl = new CountDownLatch(2);
 | 
			
		||||
        new Thread(() -> {
 | 
			
		||||
            try {
 | 
			
		||||
                classifier = CRFClassifier.getClassifierNoExceptions(nerModel);
 | 
			
		||||
            } catch (ClassCastException ex) {
 | 
			
		||||
                Logger.getLogger(Datahandler.class.getName()).log(Level.SEVERE, null, ex);
 | 
			
		||||
            }
 | 
			
		||||
            cdl.countDown();
 | 
			
		||||
        }).start();
 | 
			
		||||
        new Thread(() -> {
 | 
			
		||||
            propsSentiment.setProperty("parse.model", lexParserEnglishRNN);
 | 
			
		||||
            propsSentiment.setProperty("sentiment.model", sentimentModel);
 | 
			
		||||
            propsSentiment.setProperty("parse.maxlen", "90");
 | 
			
		||||
            propsSentiment.setProperty("threads", "8");
 | 
			
		||||
            propsSentiment.setProperty("pos.maxlen", "90");
 | 
			
		||||
            propsSentiment.setProperty("tokenize.maxlen", "90");
 | 
			
		||||
            propsSentiment.setProperty("ssplit.maxlen", "90");
 | 
			
		||||
            propsSentiment.setProperty("annotators", "tokenize,ssplit,pos,parse,sentiment,lemma,stopword"); //coref too expensive memorywise
 | 
			
		||||
            propsSentiment.setProperty("customAnnotatorClass.stopword", "FunctionLayer.StopwordAnnotator");
 | 
			
		||||
            propsSentiment.setProperty(StopwordAnnotator.STOPWORDS_LIST, customStopWordList);
 | 
			
		||||
            propsSentiment.setProperty("tokenize.options", "untokenizable=firstDelete");
 | 
			
		||||
            pipelineSentiment = new StanfordCoreNLP(propsSentiment);
 | 
			
		||||
            tagger = new MaxentTagger(taggerPath);
 | 
			
		||||
            cdl.countDown();
 | 
			
		||||
        }).start();
 | 
			
		||||
        lp = LexicalizedParser.loadModel(lexParserEnglishRNN, options);
 | 
			
		||||
        tlp = lp.getOp().langpack();
 | 
			
		||||
        gsf = tlp.grammaticalStructureFactory();
 | 
			
		||||
        try {
 | 
			
		||||
            cdl.await();
 | 
			
		||||
        } catch (InterruptedException ex) {
 | 
			
		||||
            //System.out.println("cdl await interrupted: " + ex.getLocalizedMessage() + "\n");
 | 
			
		||||
        }
 | 
			
		||||
        System.out.println("finished shiftReduceParserInitiate\n");
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    public static AbstractSequenceClassifier<CoreLabel> getClassifier() {
 | 
			
		||||
        return classifier;
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    public static void setClassifier(AbstractSequenceClassifier<CoreLabel> classifier) {
 | 
			
		||||
        Datahandler.classifier = classifier;
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    public void updateStringCache() {
 | 
			
		||||
        try {
 | 
			
		||||
            checkIfUpdateStrings();
 | 
			
		||||
        } catch (CustomError ex) {
 | 
			
		||||
            Logger.getLogger(Datahandler.class.getName()).log(Level.SEVERE, null, ex);
 | 
			
		||||
        }
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    public static GrammaticalStructureFactory getGsf() {
 | 
			
		||||
        return gsf;
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    public static MaxentTagger getTagger() {
 | 
			
		||||
        return tagger;
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    private Map<Integer, String> getCache() throws SQLException, IOException, CustomError {
 | 
			
		||||
        return DataMapper.getAllStrings();
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    public int getlHMSMXSize() {
 | 
			
		||||
        return lHMSMX.size();
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    public int getstringCacheSize() {
 | 
			
		||||
        return stringCache.size();
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    public void initiateMYSQL() throws SQLException, IOException {
 | 
			
		||||
        try {
 | 
			
		||||
            DataMapper.createTables();
 | 
			
		||||
            stringCache.putAll(getCache());
 | 
			
		||||
            // lHMSMX = DataMapper.getAllRelationScores();
 | 
			
		||||
        } catch (CustomError ex) {
 | 
			
		||||
            Logger.getLogger(Datahandler.class
 | 
			
		||||
                    .getName()).log(Level.SEVERE, null, ex);
 | 
			
		||||
        }
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    public void addHLstatsMessages() {
 | 
			
		||||
        ConcurrentMap<String, Integer> hlStatsMessages = new MapMaker().concurrencyLevel(2).makeMap();
 | 
			
		||||
        ConcurrentMap<Integer, String> strCacheLocal = stringCache;
 | 
			
		||||
        Collection<String> strs = DataMapper.getHLstatsMessages().values();
 | 
			
		||||
        for (String str : strs) {
 | 
			
		||||
            if (hlStatsMessages.get(str) == null) {
 | 
			
		||||
                hlStatsMessages.put(str, hlStatsMessages.size());
 | 
			
		||||
            }
 | 
			
		||||
        }
 | 
			
		||||
        int capacity = 150;
 | 
			
		||||
        hlStatsMessages.keySet().forEach(str -> {
 | 
			
		||||
            if (!str.startsWith("!") && MessageResponseHandler.getStr().values().size() < capacity) {
 | 
			
		||||
                String orElse = strCacheLocal.values().parallelStream().filter(e -> e.equals(str)).findAny().orElse(null);
 | 
			
		||||
                if (orElse == null) {
 | 
			
		||||
                    MessageResponseHandler.getMessage(str);
 | 
			
		||||
                }
 | 
			
		||||
            }
 | 
			
		||||
        });
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    public void instantiateAnnotationMapJMWE() {
 | 
			
		||||
        if (!stringCache.isEmpty()) {
 | 
			
		||||
            ConcurrentMap<String, Annotation> jmweAnnotation = PipelineJMWESingleton.INSTANCE.getJMWEAnnotation(stringCache.values());
 | 
			
		||||
            for (Entry<String, Annotation> entries : jmweAnnotation.entrySet()) {
 | 
			
		||||
                jmweAnnotationCache.put(entries.getKey(), entries.getValue());
 | 
			
		||||
            }
 | 
			
		||||
        }
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    public void instantiateAnnotationMap() {
 | 
			
		||||
        if (!stringCache.isEmpty()) {
 | 
			
		||||
            ConcurrentMap<String, Annotation> Annotationspipeline = new MapMaker().concurrencyLevel(2).makeMap();
 | 
			
		||||
            ConcurrentMap<String, Annotation> AnnotationspipelineSentiment = new MapMaker().concurrencyLevel(2).makeMap();
 | 
			
		||||
            stringCache.values().parallelStream().forEach(str -> {
 | 
			
		||||
                Annotation strAnno = new Annotation(str);
 | 
			
		||||
                strAnno.compact();
 | 
			
		||||
                Annotationspipeline.put(str, strAnno);
 | 
			
		||||
                Annotation strAnno2 = new Annotation(str);
 | 
			
		||||
                strAnno2.compact();
 | 
			
		||||
                AnnotationspipelineSentiment.put(str, strAnno2);
 | 
			
		||||
            });
 | 
			
		||||
            ConcurrentMap<String, CoreDocument> coreDocumentpipelineMap = getMultipleCoreDocumentsWaySuggestion(stringCache.values(), pipeline);
 | 
			
		||||
            pipeline.annotate(Annotationspipeline.values());
 | 
			
		||||
            pipelineSentiment.annotate(AnnotationspipelineSentiment.values());
 | 
			
		||||
            Annotationspipeline.entrySet().forEach(pipelineEntry -> {
 | 
			
		||||
                //relatively experimental change
 | 
			
		||||
                pipelineEntry.getValue().compact();
 | 
			
		||||
                pipelineAnnotationCache.put(pipelineEntry.getKey(), pipelineEntry.getValue());
 | 
			
		||||
            });
 | 
			
		||||
            AnnotationspipelineSentiment.entrySet().forEach(pipelineEntry -> {
 | 
			
		||||
                pipelineEntry.getValue().compact();
 | 
			
		||||
                pipelineSentimentAnnotationCache.put(pipelineEntry.getKey(), pipelineEntry.getValue());
 | 
			
		||||
            });
 | 
			
		||||
            coreDocumentpipelineMap.entrySet().stream().forEach(CD -> {
 | 
			
		||||
                coreDocumentAnnotationCache.put(CD.getKey(), CD.getValue());
 | 
			
		||||
            });
 | 
			
		||||
        }
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    private ConcurrentMap<Integer, String> futuresReturnOverallEvaluation(List<SimilarityMatrix> similarityMatrixes) {
 | 
			
		||||
        ConcurrentMap<Integer, String> strmapreturn = new MapMaker().concurrencyLevel(6).makeMap();
 | 
			
		||||
        if (!similarityMatrixes.isEmpty()) {
 | 
			
		||||
            int iterator = 0;
 | 
			
		||||
            for (SimilarityMatrix SMX : similarityMatrixes) {
 | 
			
		||||
                final Double scoreRelationNewMsgToRecentMsg = SMX.getDistance();
 | 
			
		||||
                if (scoreRelationNewMsgToRecentMsg > 0.0) {
 | 
			
		||||
                    strmapreturn = addSMXToMapReturn(strmapreturn, SMX);
 | 
			
		||||
                }
 | 
			
		||||
                //System.out.println("scoreRelationNewMsgToRecentMsg: " + scoreRelationNewMsgToRecentMsg + "\niterator: " + iterator);
 | 
			
		||||
                iterator++;
 | 
			
		||||
            }
 | 
			
		||||
        }
 | 
			
		||||
        return strmapreturn;
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    private ConcurrentMap<Integer, String> addSMXToMapReturn(ConcurrentMap<Integer, String> strmapreturn, SimilarityMatrix SMX) {
 | 
			
		||||
        if (!strmapreturn.containsValue(SMX.getPrimaryString())) {
 | 
			
		||||
            strmapreturn.put(strmapreturn.size(), SMX.getPrimaryString());
 | 
			
		||||
            String transmittedStr = SMX.getSecondaryString();
 | 
			
		||||
            SentimentValueCache cacheValue1 = SMX.getCacheValue1();
 | 
			
		||||
            SentimentValueCache cacheValue2 = SMX.getCacheValue2();
 | 
			
		||||
            if (cacheValue1 != null && !sentimentCachingMap.keySet().contains(SMX.getPrimaryString())) {
 | 
			
		||||
                sentimentCachingMap.put(SMX.getSecondaryString(), SMX.getCacheValue1());
 | 
			
		||||
            }
 | 
			
		||||
            if (cacheValue2 != null && !sentimentCachingMap.keySet().contains(transmittedStr)) {
 | 
			
		||||
                sentimentCachingMap.put(transmittedStr, SMX.getCacheValue2());
 | 
			
		||||
            }
 | 
			
		||||
        }
 | 
			
		||||
        return strmapreturn;
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    private List<SimilarityMatrix> StrComparringNoSentenceRelationMap(
 | 
			
		||||
            ConcurrentMap<Integer, String> strCacheLocal, Collection<String> strCollection, ConcurrentMap<String, Annotation> localJMWEMap,
 | 
			
		||||
            ConcurrentMap<String, Annotation> localPipelineAnnotation, ConcurrentMap<String, Annotation> localPipelineSentimentAnnotation,
 | 
			
		||||
            ConcurrentMap<String, CoreDocument> localCoreDocumentMap) {
 | 
			
		||||
        ExecutorService threadPool = Executors.newCachedThreadPool();
 | 
			
		||||
        CompletionService<SimilarityMatrix> ecs = new ExecutorCompletionService<>(threadPool);
 | 
			
		||||
        int index = 0;
 | 
			
		||||
        int prefix_size = 150;
 | 
			
		||||
        SentimentValueCache sentimentCacheStr = sentimentCachingMap.getOrDefault(strCollection, null);
 | 
			
		||||
        List<SimilarityMatrix> smxReturnList = new ArrayList();
 | 
			
		||||
        if (strCacheLocal.size() < prefix_size)
 | 
			
		||||
        {
 | 
			
		||||
            for (String colStr : strCollection)
 | 
			
		||||
            {
 | 
			
		||||
                strCacheLocal.put(strCacheLocal.size(), colStr);
 | 
			
		||||
            }
 | 
			
		||||
        }
 | 
			
		||||
       
 | 
			
		||||
        
 | 
			
		||||
        
 | 
			
		||||
        for (String str1 : strCollection) {
 | 
			
		||||
            for (String str : strCollection) {
 | 
			
		||||
                if (!str.equals(str1)) {
 | 
			
		||||
                    SimilarityMatrix SMXInit = new SimilarityMatrix(str, str1);
 | 
			
		||||
                    SentimentValueCache sentimentCacheStr1 = sentimentCachingMap.getOrDefault(str1, null);
 | 
			
		||||
                    Callable<SimilarityMatrix> worker;
 | 
			
		||||
                    if (stringCache.size() < prefix_size) {
 | 
			
		||||
                        worker = new SentimentAnalyzerTest(str, str1, SMXInit,
 | 
			
		||||
                                localJMWEMap.get(str), localJMWEMap.get(str1), localPipelineAnnotation.get(str),
 | 
			
		||||
                                localPipelineAnnotation.get(str1), localPipelineSentimentAnnotation.get(str),
 | 
			
		||||
                                localPipelineSentimentAnnotation.get(str1), localCoreDocumentMap.get(str), localCoreDocumentMap.get(str1), sentimentCacheStr, sentimentCacheStr1);
 | 
			
		||||
                    } else {
 | 
			
		||||
                        worker = new SentimentAnalyzerTest(str, str1, SMXInit,
 | 
			
		||||
                                localJMWEMap.get(str), jmweAnnotationCache.get(str1), localPipelineAnnotation.get(str),
 | 
			
		||||
                                pipelineAnnotationCache.get(str1), localPipelineSentimentAnnotation.get(str),
 | 
			
		||||
                                pipelineSentimentAnnotationCache.get(str1), localCoreDocumentMap.get(str), coreDocumentAnnotationCache.get(str1), sentimentCacheStr, sentimentCacheStr1);
 | 
			
		||||
                    }
 | 
			
		||||
                    ecs.submit(worker);
 | 
			
		||||
                    index++;
 | 
			
		||||
                    if (index % 1000 == 0 && index > 0) {
 | 
			
		||||
                        for (int i = 0; i < index; i++) {
 | 
			
		||||
                            try {
 | 
			
		||||
                                Future<SimilarityMatrix> take = ecs.take();
 | 
			
		||||
                                SimilarityMatrix smx = take.get();
 | 
			
		||||
                                if (smx != null) {
 | 
			
		||||
                                    smxReturnList.add(smx);
 | 
			
		||||
                                }
 | 
			
		||||
                            } catch (InterruptedException | ExecutionException ex) {
 | 
			
		||||
                                //
 | 
			
		||||
                            }
 | 
			
		||||
                        }
 | 
			
		||||
                        index = 0;
 | 
			
		||||
                        //System.out.println("smxReturnList size iterating ECS.take(): " + smxReturnList.size());
 | 
			
		||||
                    }
 | 
			
		||||
                }
 | 
			
		||||
            }
 | 
			
		||||
        }
 | 
			
		||||
        double distance_requirement = 15500.0;
 | 
			
		||||
        for (int i = 0; i < index; i++) {
 | 
			
		||||
            try {
 | 
			
		||||
                Future<SimilarityMatrix> take = ecs.take();
 | 
			
		||||
                SimilarityMatrix smx = take.get();
 | 
			
		||||
                
 | 
			
		||||
                if (smx != null && smx.getDistance() > distance_requirement) {
 | 
			
		||||
                    smxReturnList.add(smx);
 | 
			
		||||
                }
 | 
			
		||||
            } catch (InterruptedException | ExecutionException ex) {
 | 
			
		||||
                //
 | 
			
		||||
            }
 | 
			
		||||
        }
 | 
			
		||||
        //System.out.println("smxReturnList size: " + smxReturnList.size());
 | 
			
		||||
        threadPool.shutdown();
 | 
			
		||||
        return smxReturnList;
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    private ConcurrentMap<Integer, String> stringIteratorComparator(ConcurrentMap<Integer, String> strmap,
 | 
			
		||||
            ConcurrentMap<Integer, String> strCacheLocal, ConcurrentMap<String, Annotation> localJMWEMap,
 | 
			
		||||
            ConcurrentMap<String, Annotation> localPipelineAnnotation, ConcurrentMap<String, Annotation> localPipelineSentimentAnnotation,
 | 
			
		||||
            ConcurrentMap<String, CoreDocument> localCoreDocumentMap) {
 | 
			
		||||
        //System.out.println("strmap siuze: " + strmap.size());
 | 
			
		||||
        List<SimilarityMatrix> StrComparringNoSentenceRelationMap = StrComparringNoSentenceRelationMap(strCacheLocal, strmap.values(),
 | 
			
		||||
                localJMWEMap, localPipelineAnnotation, localPipelineSentimentAnnotation, localCoreDocumentMap);
 | 
			
		||||
        Collections.sort(StrComparringNoSentenceRelationMap, (e1, e2) -> e1.getPrimaryString().compareTo(e2.getPrimaryString()));
 | 
			
		||||
        ConcurrentMap<Integer, String> strmapreturn = futuresReturnOverallEvaluation(StrComparringNoSentenceRelationMap);
 | 
			
		||||
        //System.out.println("strmapreturn size: " + strmapreturn.size());
 | 
			
		||||
        return strmapreturn;
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    private ConcurrentMap<Integer, String> removeNonSensicalStrings(ConcurrentMap<Integer, String> strmap) {
 | 
			
		||||
        final ConcurrentMap<Integer, String> strCacheLocal = stringCache;
 | 
			
		||||
        final ConcurrentMap<String, Annotation> localJMWEMap = getMultipleJMWEAnnotation(strmap.values());
 | 
			
		||||
        final ConcurrentMap<String, Annotation> localPipelineAnnotation = getMultiplePipelineAnnotation(strmap.values());
 | 
			
		||||
        final ConcurrentMap<String, Annotation> localPipelineSentimentAnnotation = getMultiplePipelineSentimentAnnotation(strmap.values());
 | 
			
		||||
        final ConcurrentMap<String, CoreDocument> localCoreDocumentMap = getMultipleCoreDocumentsWaySuggestion(strmap.values(), pipeline);
 | 
			
		||||
        return stringIteratorComparator(strmap, strCacheLocal, localJMWEMap, localPipelineAnnotation, localPipelineSentimentAnnotation, localCoreDocumentMap);
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    public synchronized void checkIfUpdateStrings() throws CustomError {
 | 
			
		||||
        if (stopwatch.elapsed(TimeUnit.SECONDS) >= EXPIRE_TIME_IN_SECONDS || !stopwatch.isRunning()) {
 | 
			
		||||
            ConcurrentMap<Integer, String> str = MessageResponseHandler.getStr();
 | 
			
		||||
            System.out.println("str size: " + str.size());
 | 
			
		||||
            str = filterContent(str);
 | 
			
		||||
            str = removeNonSensicalStrings(str);
 | 
			
		||||
            //System.out.println("removeNonSensicalStrings str size POST: " + str.size() + "\n");
 | 
			
		||||
            str = annotationCacheUpdate(str);
 | 
			
		||||
            System.out.println("annotationCacheUpdate str size POST: " + str.size() + "\n");
 | 
			
		||||
            ConcurrentMap<Integer, String> strf = str;
 | 
			
		||||
            if (!stringCache.isEmpty()) {
 | 
			
		||||
                new Thread(() -> {
 | 
			
		||||
                    try {
 | 
			
		||||
                        DataMapper.InsertMYSQLStrings(strf);
 | 
			
		||||
                    } catch (CustomError ex) {
 | 
			
		||||
                        Logger.getLogger(Datahandler.class
 | 
			
		||||
                                .getName()).log(Level.SEVERE, null, ex);
 | 
			
		||||
                    }
 | 
			
		||||
                    MessageResponseHandler.setStr(new MapMaker().concurrencyLevel(6).makeMap());
 | 
			
		||||
                }).start();
 | 
			
		||||
            } else {
 | 
			
		||||
                try {
 | 
			
		||||
                    DataMapper.InsertMYSQLStrings(strf);
 | 
			
		||||
                } catch (CustomError ex) {
 | 
			
		||||
                    Logger.getLogger(Datahandler.class
 | 
			
		||||
                            .getName()).log(Level.SEVERE, null, ex);
 | 
			
		||||
                }
 | 
			
		||||
                MessageResponseHandler.setStr(new MapMaker().concurrencyLevel(2).makeMap());
 | 
			
		||||
            }
 | 
			
		||||
            if (!stopwatch.isRunning()) {
 | 
			
		||||
                stopwatch.start();
 | 
			
		||||
            } else {
 | 
			
		||||
                stopwatch.reset();
 | 
			
		||||
            }
 | 
			
		||||
        }
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    private String trimString(String str) {
 | 
			
		||||
        str = str.trim();
 | 
			
		||||
        if (str.startsWith("<@")) {
 | 
			
		||||
            str = str.substring(str.indexOf("> ") + 2);
 | 
			
		||||
        }
 | 
			
		||||
        return str;
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    private String getResponseFutures(String strF) {
 | 
			
		||||
        List<String> values_copy = new ArrayList<String>(stringCache.values());
 | 
			
		||||
        Collections.shuffle(values_copy);
 | 
			
		||||
        double preRelationUserCounters = -155000.0;
 | 
			
		||||
        List<String> concurrentRelations = new ArrayList();
 | 
			
		||||
        for (String str1 : values_copy) {
 | 
			
		||||
            if (!strF.equals(str1)) {
 | 
			
		||||
                SentimentValueCache sentimentCacheStr1 = sentimentCachingMap.getOrDefault(str1, null);
 | 
			
		||||
                Callable<SimilarityMatrix> worker = new SentimentAnalyzerTest(strF, str1, new SimilarityMatrix(strF, str1),
 | 
			
		||||
                        strAnnoJMWE, jmweAnnotationCache.get(str1), strAnno,
 | 
			
		||||
                        pipelineAnnotationCache.get(str1), strAnnoSentiment,
 | 
			
		||||
                        pipelineSentimentAnnotationCache.get(str1), coreDoc, coreDocumentAnnotationCache.get(str1), null, sentimentCacheStr1);
 | 
			
		||||
                try {
 | 
			
		||||
                    SimilarityMatrix getSMX = worker.call();
 | 
			
		||||
                    if (getSMX != null) {
 | 
			
		||||
                        Double scoreRelationLastUserMsg = getSMX.getDistance();
 | 
			
		||||
                        if (scoreRelationLastUserMsg > preRelationUserCounters) {
 | 
			
		||||
                            preRelationUserCounters = scoreRelationLastUserMsg;
 | 
			
		||||
                            concurrentRelations.add(getSMX.getSecondaryString());
 | 
			
		||||
                            //System.out.println("secondary: " + getSMX.getSecondaryString() + "\nDistance: " + getSMX.getDistance() + "\n");
 | 
			
		||||
                            //System.out.println("SUCESS concurrentRelationsMap size: " + concurrentRelations.size() + "\n");
 | 
			
		||||
                        }
 | 
			
		||||
                    }
 | 
			
		||||
                } catch (Exception ex) {
 | 
			
		||||
                    Logger.getLogger(Datahandler.class.getName()).log(Level.SEVERE, null, ex);
 | 
			
		||||
                }
 | 
			
		||||
            }
 | 
			
		||||
        }
 | 
			
		||||
 | 
			
		||||
        StringBuilder SB = new StringBuilder();
 | 
			
		||||
        double randomLenghtPermit = strF.length() * ((Math.random() * Math.random() * Math.random()) * 5);
 | 
			
		||||
        Collections.reverse(concurrentRelations);
 | 
			
		||||
        if (concurrentRelations.isEmpty()) {
 | 
			
		||||
            return "failure, preventing stuckness";
 | 
			
		||||
        }
 | 
			
		||||
        String firstRelation = concurrentRelations.get(0);
 | 
			
		||||
        for (String secondaryRelation : concurrentRelations) {
 | 
			
		||||
            if (SB.toString().length() > randomLenghtPermit && !SB.toString().isEmpty()) {
 | 
			
		||||
                break;
 | 
			
		||||
            }
 | 
			
		||||
            boolean append = appendToString(firstRelation, secondaryRelation);
 | 
			
		||||
            if (append) {
 | 
			
		||||
                SB.append(secondaryRelation).append(" ");
 | 
			
		||||
            }
 | 
			
		||||
        }
 | 
			
		||||
        return SB.toString();
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    private boolean appendToString(String firstRelation, String secondaryRelation) {
 | 
			
		||||
        if (firstRelation.equals(secondaryRelation)) {
 | 
			
		||||
            return true;
 | 
			
		||||
        }
 | 
			
		||||
        Double scoreRelationStrF = getScoreRelationStrF(firstRelation, secondaryRelation);
 | 
			
		||||
        if (scoreRelationStrF > 1900) {
 | 
			
		||||
            return true;
 | 
			
		||||
        }
 | 
			
		||||
        return false;
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    public String getResponseMsg(String str) throws CustomError {
 | 
			
		||||
        String strF = trimString(str);
 | 
			
		||||
        getSingularAnnotation(strF);
 | 
			
		||||
        return getResponseFutures(strF);
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    public void getSingularAnnotation(String str) {
 | 
			
		||||
        strAnno = new Annotation(str);
 | 
			
		||||
        strAnno.compact();
 | 
			
		||||
        pipeline.annotate(strAnno);
 | 
			
		||||
        strAnnoSentiment = new Annotation(str);
 | 
			
		||||
        strAnnoSentiment.compact();
 | 
			
		||||
        pipelineSentiment.annotate(strAnnoSentiment);
 | 
			
		||||
        List<String> notactualList = new ArrayList();
 | 
			
		||||
        notactualList.add(str);
 | 
			
		||||
        ConcurrentMap<String, Annotation> jmweAnnotation = PipelineJMWESingleton.INSTANCE.getJMWEAnnotation(notactualList);
 | 
			
		||||
        strAnnoJMWE = jmweAnnotation.values().iterator().next();
 | 
			
		||||
        strAnnoJMWE.compact();
 | 
			
		||||
        CoreDocument coreDocument = new CoreDocument(str);
 | 
			
		||||
        pipeline.annotate(coreDocument);
 | 
			
		||||
        coreDoc = coreDocument;
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    private static ConcurrentMap<String, Annotation> getMultipleJMWEAnnotation(Collection<String> str) {
 | 
			
		||||
        ConcurrentMap<String, Annotation> jmweAnnotation = PipelineJMWESingleton.INSTANCE.getJMWEAnnotation(str);
 | 
			
		||||
        return jmweAnnotation;
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    private static ConcurrentMap<String, Annotation> getMultiplePipelineAnnotation(Collection<String> str) {
 | 
			
		||||
        ConcurrentMap<String, Annotation> pipelineAnnotationMap = new MapMaker().concurrencyLevel(2).makeMap();
 | 
			
		||||
        for (String str1 : str) {
 | 
			
		||||
            Annotation strAnno1 = new Annotation(str1);
 | 
			
		||||
            pipelineAnnotationMap.put(str1, strAnno1);
 | 
			
		||||
        }
 | 
			
		||||
        pipeline.annotate(pipelineAnnotationMap.values());
 | 
			
		||||
        return pipelineAnnotationMap;
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    private static ConcurrentMap<String, Annotation> getMultiplePipelineSentimentAnnotation(Collection<String> str) {
 | 
			
		||||
        ConcurrentMap<String, Annotation> pipelineAnnotationMap = new MapMaker().concurrencyLevel(2).makeMap();
 | 
			
		||||
        for (String str1 : str) {
 | 
			
		||||
            Annotation strAnno1 = new Annotation(str1);
 | 
			
		||||
            pipelineAnnotationMap.put(str1, strAnno1);
 | 
			
		||||
        }
 | 
			
		||||
        pipelineSentiment.annotate(pipelineAnnotationMap.values());
 | 
			
		||||
        return pipelineAnnotationMap;
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    private Double getScoreRelationNewMsgToRecentMsg(String str, String mostRecentMsg) {
 | 
			
		||||
        SimilarityMatrix SMX = new SimilarityMatrix(str, mostRecentMsg);
 | 
			
		||||
        SentimentValueCache cacheSentiment1 = sentimentCachingMap.getOrDefault(str, null);
 | 
			
		||||
        SentimentValueCache cacheSentiment2 = sentimentCachingMap.getOrDefault(mostRecentMsg, null);
 | 
			
		||||
        Callable<SimilarityMatrix> worker = new SentimentAnalyzerTest(str, mostRecentMsg, SMX,
 | 
			
		||||
                jmweAnnotationCache.get(str), jmweAnnotationCache.get(mostRecentMsg), pipelineAnnotationCache.get(str),
 | 
			
		||||
                pipelineAnnotationCache.get(mostRecentMsg), pipelineSentimentAnnotationCache.get(str),
 | 
			
		||||
                pipelineSentimentAnnotationCache.get(mostRecentMsg), coreDocumentAnnotationCache.get(str),
 | 
			
		||||
                coreDocumentAnnotationCache.get(mostRecentMsg), cacheSentiment1, cacheSentiment2);
 | 
			
		||||
        SimilarityMatrix callSMX = null;
 | 
			
		||||
        try {
 | 
			
		||||
            callSMX = worker.call();
 | 
			
		||||
        } catch (Exception ex) {
 | 
			
		||||
            Logger.getLogger(Datahandler.class
 | 
			
		||||
                    .getName()).log(Level.SEVERE, null, ex);
 | 
			
		||||
        }
 | 
			
		||||
        if (callSMX != null) {
 | 
			
		||||
            double smxDistance = callSMX.getDistance();
 | 
			
		||||
            return smxDistance;
 | 
			
		||||
        }
 | 
			
		||||
        return 0.0;
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    private Double getScoreRelationStrF(String str, String mostRecentMsg) {
 | 
			
		||||
        SimilarityMatrix SMX = new SimilarityMatrix(str, mostRecentMsg);
 | 
			
		||||
        SentimentValueCache cacheSentiment1 = sentimentCachingMap.getOrDefault(str, null);
 | 
			
		||||
        SentimentValueCache cacheSentiment2 = sentimentCachingMap.getOrDefault(mostRecentMsg, null);
 | 
			
		||||
        Callable<SimilarityMatrix> worker = new SentimentAnalyzerTest(str, mostRecentMsg, SMX,
 | 
			
		||||
                strAnnoJMWE, jmweAnnotationCache.get(mostRecentMsg), strAnno,
 | 
			
		||||
                pipelineAnnotationCache.get(mostRecentMsg), strAnnoSentiment,
 | 
			
		||||
                pipelineSentimentAnnotationCache.get(mostRecentMsg), coreDoc, coreDocumentAnnotationCache.get(mostRecentMsg), cacheSentiment1, cacheSentiment2);
 | 
			
		||||
        SimilarityMatrix callSMX = null;
 | 
			
		||||
        try {
 | 
			
		||||
            callSMX = worker.call();
 | 
			
		||||
        } catch (Exception ex) {
 | 
			
		||||
            Logger.getLogger(Datahandler.class
 | 
			
		||||
                    .getName()).log(Level.SEVERE, null, ex);
 | 
			
		||||
        }
 | 
			
		||||
        if (callSMX != null) {
 | 
			
		||||
            double smxDistance = callSMX.getDistance();
 | 
			
		||||
            return smxDistance;
 | 
			
		||||
        }
 | 
			
		||||
        return 0.0;
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    public static ConcurrentMap<Integer, String> filterContent(ConcurrentMap<Integer, String> str) {
 | 
			
		||||
        ConcurrentMap<Integer, String> strlistreturn = new MapMaker().concurrencyLevel(2).makeMap();
 | 
			
		||||
        str.values().forEach(str1 -> {
 | 
			
		||||
            if (!str1.isEmpty() && str1.length() > 3) {
 | 
			
		||||
                str1 = str1.trim();
 | 
			
		||||
                if (str1.contains("PM*")) {
 | 
			
		||||
                    str1 = str1.substring(str1.indexOf("PM*") + 3);
 | 
			
		||||
                }
 | 
			
		||||
                if (str1.contains("AM*")) {
 | 
			
		||||
                    str1 = str1.substring(str1.indexOf("AM*") + 3);
 | 
			
		||||
                }
 | 
			
		||||
                /*
 | 
			
		||||
                if (str1.contains("?") || str1.contains("°"))
 | 
			
		||||
                {
 | 
			
		||||
                    if (!str1.contains("http"))
 | 
			
		||||
                    {
 | 
			
		||||
                        str1 = str1.replace("?", " <:wlenny:514861023002624001> ");
 | 
			
		||||
                        str1 = str1.replace("°", " <:wlenny:514861023002624001> ");
 | 
			
		||||
                    }
 | 
			
		||||
                }
 | 
			
		||||
                */
 | 
			
		||||
                if (str1.contains("(Counter-Terrorist)")) {
 | 
			
		||||
                    str1 = str1.replace("(Counter-Terrorist)", " ");
 | 
			
		||||
                }
 | 
			
		||||
                if (str1.contains("(Terrorist)")) {
 | 
			
		||||
                    str1 = str1.replace("(Terrorist)", " ");
 | 
			
		||||
                }
 | 
			
		||||
                if (str1.contains("(Spectator)")) {
 | 
			
		||||
                    str1 = str1.replace("(Spectator)", " ");
 | 
			
		||||
                }
 | 
			
		||||
                if (str1.contains("*DEAD*")) {
 | 
			
		||||
                    str1 = str1.replace("*DEAD*", " ");
 | 
			
		||||
                }
 | 
			
		||||
                if (str1.contains("{red}")) {
 | 
			
		||||
                    str1 = str1.replace("{red}", " ");
 | 
			
		||||
                }
 | 
			
		||||
                if (str1.contains("{orange}")) {
 | 
			
		||||
                    str1 = str1.replace("{orange}", " ");
 | 
			
		||||
                }
 | 
			
		||||
                if (str1.contains("{yellow}")) {
 | 
			
		||||
                    str1 = str1.replace("{yellow}", " ");
 | 
			
		||||
                }
 | 
			
		||||
                if (str1.contains("{green}")) {
 | 
			
		||||
                    str1 = str1.replace("{green}", " ");
 | 
			
		||||
                }
 | 
			
		||||
                if (str1.contains("{lightblue}")) {
 | 
			
		||||
                    str1 = str1.replace("{lightblue}", " ");
 | 
			
		||||
                }
 | 
			
		||||
                if (str1.contains("{blue}")) {
 | 
			
		||||
                    str1 = str1.replace("{blue}", " ");
 | 
			
		||||
                }
 | 
			
		||||
                if (str1.contains("{purple}")) {
 | 
			
		||||
                    str1 = str1.replace("{purple}", " ");
 | 
			
		||||
                }
 | 
			
		||||
                if (str1.contains("{white}")) {
 | 
			
		||||
                    str1 = str1.replace("{white}", " ");
 | 
			
		||||
                }
 | 
			
		||||
                if (str1.contains("{fullblue}")) {
 | 
			
		||||
                    str1 = str1.replace("{fullblue}", " ");
 | 
			
		||||
                }
 | 
			
		||||
                if (str1.contains("{cyan}")) {
 | 
			
		||||
                    str1 = str1.replace("{cyan}", " ");
 | 
			
		||||
                }
 | 
			
		||||
                if (str1.contains("{lime}")) {
 | 
			
		||||
                    str1 = str1.replace("{lime}", " ");
 | 
			
		||||
                }
 | 
			
		||||
                if (str1.contains("{deeppink}")) {
 | 
			
		||||
                    str1 = str1.replace("{deeppink}", " ");
 | 
			
		||||
                }
 | 
			
		||||
                if (str1.contains("{slategray}")) {
 | 
			
		||||
                    str1 = str1.replace("{slategray}", " ");
 | 
			
		||||
                }
 | 
			
		||||
                if (str1.contains("{dodgerblue}")) {
 | 
			
		||||
                    str1 = str1.replace("{dodgerblue}", " ");
 | 
			
		||||
                }
 | 
			
		||||
                if (str1.contains("{black}")) {
 | 
			
		||||
                    str1 = str1.replace("{black}", " ");
 | 
			
		||||
                }
 | 
			
		||||
                if (str1.contains("{orangered}")) {
 | 
			
		||||
                    str1 = str1.replace("{orangered}", " ");
 | 
			
		||||
                }
 | 
			
		||||
                if (str1.contains("{darkorchid}")) {
 | 
			
		||||
                    str1 = str1.replace("{darkorchid}", " ");
 | 
			
		||||
                }
 | 
			
		||||
                if (str1.contains("{pink}")) {
 | 
			
		||||
                    str1 = str1.replace("{pink}", " ");
 | 
			
		||||
                }
 | 
			
		||||
                if (str1.contains("{lightyellow}")) {
 | 
			
		||||
                    str1 = str1.replace("{lightyellow}", " ");
 | 
			
		||||
                }
 | 
			
		||||
                if (str1.contains("{chocolate}")) {
 | 
			
		||||
                    str1 = str1.replace("{chocolate}", " ");
 | 
			
		||||
                }
 | 
			
		||||
                if (str1.contains("{beige}")) {
 | 
			
		||||
                    str1 = str1.replace("{beige}", " ");
 | 
			
		||||
                }
 | 
			
		||||
                if (str1.contains("{azure}")) {
 | 
			
		||||
                    str1 = str1.replace("{azure}", " ");
 | 
			
		||||
                }
 | 
			
		||||
                if (str1.contains("{yellowgreen}")) {
 | 
			
		||||
                    str1 = str1.replace("{yellowgreen}", " ");
 | 
			
		||||
                }
 | 
			
		||||
                str1 = str1.trim();
 | 
			
		||||
                if (str1.length() > 2 && (!str1.startsWith("!"))) {
 | 
			
		||||
                    strlistreturn.put(strlistreturn.size(), str1);
 | 
			
		||||
                }
 | 
			
		||||
            }
 | 
			
		||||
        });
 | 
			
		||||
        return strlistreturn;
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    private ConcurrentMap<Integer, String> annotationCacheUpdate(ConcurrentMap<Integer, String> strmap) {
 | 
			
		||||
        ConcurrentMap<String, Annotation> jmweAnnotation = PipelineJMWESingleton.INSTANCE.getJMWEAnnotation(strmap.values());
 | 
			
		||||
        for (Entry<String, Annotation> jmweitr : jmweAnnotation.entrySet()) {
 | 
			
		||||
            jmweAnnotationCache.put(jmweitr.getKey(), jmweitr.getValue());
 | 
			
		||||
        }
 | 
			
		||||
        ConcurrentMap<String, Annotation> Annotationspipeline = new MapMaker().concurrencyLevel(4).makeMap();
 | 
			
		||||
        ConcurrentMap<String, Annotation> AnnotationspipelineSentiment = new MapMaker().concurrencyLevel(4).makeMap();
 | 
			
		||||
        ConcurrentMap<String, CoreDocument> coreDocumentpipelineMap = getMultipleCoreDocumentsWaySuggestion(strmap.values(), pipeline);
 | 
			
		||||
        strmap.values().forEach(str -> {
 | 
			
		||||
            Annotation strAnno1 = new Annotation(str);
 | 
			
		||||
            Annotationspipeline.put(str, strAnno1);
 | 
			
		||||
            Annotation strAnno2 = new Annotation(str);
 | 
			
		||||
            AnnotationspipelineSentiment.put(str, strAnno2);
 | 
			
		||||
            stringCache.put(stringCache.size() + 1, str);
 | 
			
		||||
        });
 | 
			
		||||
        pipeline.annotate(Annotationspipeline.values());
 | 
			
		||||
        pipelineSentiment.annotate(AnnotationspipelineSentiment.values());
 | 
			
		||||
        Annotationspipeline.entrySet().forEach(pipelineEntry -> {
 | 
			
		||||
            if (pipelineEntry != null) {
 | 
			
		||||
                pipelineAnnotationCache.put(pipelineEntry.getKey(), pipelineEntry.getValue());
 | 
			
		||||
            }
 | 
			
		||||
        });
 | 
			
		||||
        AnnotationspipelineSentiment.entrySet().forEach(pipelineEntry -> {
 | 
			
		||||
            if (pipelineEntry != null) {
 | 
			
		||||
                pipelineSentimentAnnotationCache.put(pipelineEntry.getKey(), pipelineEntry.getValue());
 | 
			
		||||
            }
 | 
			
		||||
        });
 | 
			
		||||
        coreDocumentpipelineMap.entrySet().forEach(coreDocumentEntry -> {
 | 
			
		||||
            coreDocumentAnnotationCache.put(coreDocumentEntry.getKey(), coreDocumentEntry.getValue());
 | 
			
		||||
        });
 | 
			
		||||
        return strmap;
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    public int getMessageOverHead() {
 | 
			
		||||
        return stringCache.values().size() - (stringCache.values().size() / 10);
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    public void update_autismo_socket_msg() {
 | 
			
		||||
        try {
 | 
			
		||||
            try (DatagramSocket serverSocket = new DatagramSocket(48477)) {
 | 
			
		||||
                try (DatagramSocket serverSocket1 = new DatagramSocket(48478)) {
 | 
			
		||||
                byte[] receiveData = new byte[4096];
 | 
			
		||||
                InetAddress IPAddress = InetAddress.getByName("144.76.218.19");
 | 
			
		||||
                DatagramPacket receivePacket = new DatagramPacket(receiveData, receiveData.length);
 | 
			
		||||
                while (true) {
 | 
			
		||||
                    serverSocket.receive(receivePacket);
 | 
			
		||||
                    String sentence = new String(receivePacket.getData(), 0, receivePacket.getLength());
 | 
			
		||||
                    sentence = sentence.replace("clientmessage:", "");
 | 
			
		||||
                    String getResponseMsg = getResponseMsg(sentence);
 | 
			
		||||
                    byte[] sendData = getResponseMsg.getBytes("UTF-8");
 | 
			
		||||
                    DatagramPacket sendPacket = new DatagramPacket(sendData, sendData.length, IPAddress, 48477);
 | 
			
		||||
                    serverSocket.send(sendPacket);
 | 
			
		||||
                    
 | 
			
		||||
                    receivePacket = new DatagramPacket(receiveData, receiveData.length);
 | 
			
		||||
                    serverSocket1.receive(receivePacket);
 | 
			
		||||
                    sentence = new String(receivePacket.getData(), 0, receivePacket.getLength());
 | 
			
		||||
                    sentence = sentence.replace("clientmessage:", "");
 | 
			
		||||
                    getResponseMsg = getResponseMsg(sentence);
 | 
			
		||||
                    sendData = getResponseMsg.getBytes("UTF-8");
 | 
			
		||||
                    sendPacket = new DatagramPacket(sendData, sendData.length, IPAddress, 48478);
 | 
			
		||||
                    serverSocket1.send(sendPacket);
 | 
			
		||||
                }
 | 
			
		||||
                }
 | 
			
		||||
            } catch (CustomError ex) {
 | 
			
		||||
                Logger.getLogger(Datahandler.class.getName()).log(Level.SEVERE, null, ex);
 | 
			
		||||
            }
 | 
			
		||||
        } catch (SocketException ex) {
 | 
			
		||||
            Logger.getLogger(Datahandler.class.getName()).log(Level.SEVERE, null, ex);
 | 
			
		||||
        } catch (UnsupportedEncodingException ex) {
 | 
			
		||||
            Logger.getLogger(Datahandler.class.getName()).log(Level.SEVERE, null, ex);
 | 
			
		||||
        } catch (IOException ex) {
 | 
			
		||||
            Logger.getLogger(Datahandler.class.getName()).log(Level.SEVERE, null, ex);
 | 
			
		||||
        }
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    private static class AnnotationCollector<T> implements Consumer<T> {
 | 
			
		||||
 | 
			
		||||
        private static int i = 0;
 | 
			
		||||
        private List<T> annotationsT = new ArrayList();
 | 
			
		||||
 | 
			
		||||
        @Override
 | 
			
		||||
        public void accept(T ann) {
 | 
			
		||||
            //System.out.println("adding ann: " + ann.toString());
 | 
			
		||||
            annotationsT.add(ann);
 | 
			
		||||
        }
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    public static ConcurrentMap<String, CoreDocument> getMultipleCoreDocumentsWaySuggestion(Collection<String> str, StanfordCoreNLP localNLP) {
 | 
			
		||||
        AnnotationCollector<Annotation> annCollector = new AnnotationCollector();
 | 
			
		||||
        for (String exampleString : str) {
 | 
			
		||||
            localNLP.annotate(new Annotation(exampleString), annCollector);
 | 
			
		||||
            annCollector.i++;
 | 
			
		||||
            //System.out.println("iterator: " + annCollector.i + "\nstr size: " + str.size() + "\n");
 | 
			
		||||
        }
 | 
			
		||||
        try {
 | 
			
		||||
            Thread.sleep(8000);
 | 
			
		||||
        } catch (InterruptedException ex) {
 | 
			
		||||
            Logger.getLogger(Datahandler.class.getName()).log(Level.SEVERE, null, ex);
 | 
			
		||||
        }
 | 
			
		||||
        ConcurrentMap<String, CoreDocument> annotationreturnMap = new MapMaker().concurrencyLevel(6).makeMap();
 | 
			
		||||
        for (Annotation ann : annCollector.annotationsT) {
 | 
			
		||||
            if (ann != null) {
 | 
			
		||||
                ann.compact();
 | 
			
		||||
                CoreDocument CD = new CoreDocument(ann);
 | 
			
		||||
                annotationreturnMap.put(CD.text(), CD);
 | 
			
		||||
                //System.out.println("CD text:" + CD.text() + "\niterator: " + iterator + "\nsize: " + annCollector.annotationsT.size());
 | 
			
		||||
            }
 | 
			
		||||
        }
 | 
			
		||||
        return annotationreturnMap;
 | 
			
		||||
    }
 | 
			
		||||
}
 | 
			
		||||
							
								
								
									
										660
									
								
								ArtificialAutism/src/main/java/FunctionLayer/Datahandler.kt
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										660
									
								
								ArtificialAutism/src/main/java/FunctionLayer/Datahandler.kt
									
									
									
									
									
										Normal file
									
								
							@ -0,0 +1,660 @@
 | 
			
		||||
/*
 | 
			
		||||
 * To change this license header, choose License Headers in Project Properties.
 | 
			
		||||
 * To change this template file, choose Tools | Templates
 | 
			
		||||
 * and open the template in the editor.
 | 
			
		||||
 */
 | 
			
		||||
package FunctionLayer
 | 
			
		||||
 | 
			
		||||
import DataLayer.DataMapper
 | 
			
		||||
import FunctionLayer.StanfordParser.SentimentAnalyzerTest
 | 
			
		||||
import com.google.common.base.Stopwatch
 | 
			
		||||
import edu.mit.jmwe.data.IMWE
 | 
			
		||||
import edu.mit.jmwe.data.IToken
 | 
			
		||||
import edu.stanford.nlp.ie.AbstractSequenceClassifier
 | 
			
		||||
import edu.stanford.nlp.ie.crf.CRFClassifier
 | 
			
		||||
import edu.stanford.nlp.ling.CoreAnnotations
 | 
			
		||||
import edu.stanford.nlp.ling.CoreLabel
 | 
			
		||||
import edu.stanford.nlp.ling.TaggedWord
 | 
			
		||||
import edu.stanford.nlp.parser.lexparser.LexicalizedParser
 | 
			
		||||
import edu.stanford.nlp.pipeline.Annotation
 | 
			
		||||
import edu.stanford.nlp.pipeline.CoreDocument
 | 
			
		||||
import edu.stanford.nlp.pipeline.StanfordCoreNLP
 | 
			
		||||
import edu.stanford.nlp.tagger.maxent.MaxentTagger
 | 
			
		||||
import edu.stanford.nlp.trees.*
 | 
			
		||||
import edu.stanford.nlp.util.CoreMap
 | 
			
		||||
import kotlinx.coroutines.*
 | 
			
		||||
import org.ejml.simple.SimpleMatrix
 | 
			
		||||
import java.util.*
 | 
			
		||||
import java.util.concurrent.TimeUnit
 | 
			
		||||
import java.util.regex.Pattern
 | 
			
		||||
import kotlin.collections.ArrayList
 | 
			
		||||
import kotlin.collections.HashMap
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
/**
 | 
			
		||||
 *
 | 
			
		||||
 * @author install1
 | 
			
		||||
 */
 | 
			
		||||
public class Datahandler {
 | 
			
		||||
    private val stopwatch: Stopwatch
 | 
			
		||||
    private val EXPIRE_TIME_IN_MINUTES = TimeUnit.MINUTES.convert(30, TimeUnit.MINUTES)
 | 
			
		||||
    private var pipelineAnnotationCache: HashMap<String, Annotation>
 | 
			
		||||
    private var pipelineSentimentAnnotationCache = HashMap<String, Annotation>()
 | 
			
		||||
    private var coreDocumentAnnotationCache: HashMap<String, CoreDocument>
 | 
			
		||||
    private var jmweAnnotationCache = HashMap<String, Annotation>()
 | 
			
		||||
    private var stringCache = ArrayList<String>()
 | 
			
		||||
 | 
			
		||||
    //private val nerModel = "edu/stanford/nlp/models/ner/english.all.3class.caseless.distsim.crf.ser.gz"
 | 
			
		||||
    private val nerModel = "edu/stanford/nlp/models/ner/english.all.3class.distsim.crf.ser.gz"
 | 
			
		||||
    private var tagger: MaxentTagger = MaxentTagger()
 | 
			
		||||
    private var gsf: GrammaticalStructureFactory
 | 
			
		||||
    private var classifier: AbstractSequenceClassifier<CoreLabel>
 | 
			
		||||
 | 
			
		||||
    //SentimentAnalyzer Hashmaps
 | 
			
		||||
    private var tokenizeCountingHashMap: HashMap<String, Int> = HashMap()
 | 
			
		||||
    private var taggedWordListHashMap: HashMap<String, List<List<TaggedWord>>> = HashMap()
 | 
			
		||||
    private var retrieveTGWListHashMap: HashMap<String, java.util.ArrayList<String>> =
 | 
			
		||||
            HashMap()
 | 
			
		||||
    private var sentences1HashMap: HashMap<String, List<CoreMap>> = HashMap()
 | 
			
		||||
    private var sentencesSentimentHashMap: HashMap<String, List<CoreMap>> = HashMap()
 | 
			
		||||
    private var trees1HashMap: HashMap<String, java.util.ArrayList<Tree>> = HashMap()
 | 
			
		||||
    private var grammaticalStructureHashMap: HashMap<String, java.util.ArrayList<GrammaticalStructure>> =
 | 
			
		||||
            HashMap()
 | 
			
		||||
    private var typedDependenciesHashMap: HashMap<String, java.util.ArrayList<TypedDependency>> =
 | 
			
		||||
            HashMap()
 | 
			
		||||
    private var rnnCoreAnnotationsPredictedHashMap: HashMap<String, java.util.ArrayList<Int>> = HashMap()
 | 
			
		||||
    private var simpleMatricesHashMap: HashMap<String, java.util.ArrayList<SimpleMatrix>> = HashMap()
 | 
			
		||||
    private var simpleMatricesNodevectorsHashMap: HashMap<String, java.util.ArrayList<SimpleMatrix>> = HashMap()
 | 
			
		||||
    private var listHashMap: HashMap<String, MutableList<Any?>> = HashMap()
 | 
			
		||||
    private var longestHashMap: HashMap<String, Int> = HashMap()
 | 
			
		||||
    private var sentimentHashMap: HashMap<String, Int> = HashMap()
 | 
			
		||||
    private var imwesHashMap: HashMap<String, List<IMWE<IToken>>> = HashMap()
 | 
			
		||||
    private var InflectedCounterNegativeHashMap: HashMap<String, Int> = HashMap()
 | 
			
		||||
    private var InflectedCounterPositiveHashMap: HashMap<String, Int> = HashMap()
 | 
			
		||||
    private var tokenEntryHashMap: HashMap<String, ArrayList<String>> = HashMap()
 | 
			
		||||
    private var MarkedContinuousCounterHashMap: HashMap<String, Int> = HashMap()
 | 
			
		||||
    private var UnmarkedPatternCounterHashMap: HashMap<String, Int> = HashMap()
 | 
			
		||||
    private var strTokensIpartFormHashMap: HashMap<String, java.util.ArrayList<String>> = HashMap()
 | 
			
		||||
    private var tokenFormsHashMap: HashMap<String, java.util.ArrayList<String>> = HashMap()
 | 
			
		||||
    private var strTokenEntryGetPOSHashMap: HashMap<String, java.util.ArrayList<String>> = HashMap()
 | 
			
		||||
    private var intTokenEntyCountsHashMap: HashMap<String, java.util.ArrayList<Int>> = HashMap()
 | 
			
		||||
    private var ITokenTagsHashMap: HashMap<String, java.util.ArrayList<String>> = HashMap()
 | 
			
		||||
    private var strTokenStemsHashMap: HashMap<String, java.util.ArrayList<String>> = HashMap()
 | 
			
		||||
    private var AnotatorcounterHashMap: HashMap<String, Int> = HashMap()
 | 
			
		||||
    private var TokensCounterHashMap: HashMap<String, Int> = HashMap()
 | 
			
		||||
    private var entityTokenTagsHashMap: HashMap<String, java.util.ArrayList<String>> = HashMap()
 | 
			
		||||
    private var nerEntitiesHashMap: HashMap<String, java.util.ArrayList<String>> = HashMap()
 | 
			
		||||
    private var nerEntitiesTypeHashMap: HashMap<String, java.util.ArrayList<String>> = HashMap()
 | 
			
		||||
    private var stopWordTokenHashMap: HashMap<String, java.util.ArrayList<String>> = HashMap()
 | 
			
		||||
    private var stopWordLemmaHashMap: HashMap<String, java.util.ArrayList<String>> = HashMap()
 | 
			
		||||
    private var PairCounterHashMap: HashMap<String, Int> = HashMap()
 | 
			
		||||
 | 
			
		||||
    constructor() {
 | 
			
		||||
        stopwatch = Stopwatch.createUnstarted()
 | 
			
		||||
        jmweAnnotationCache = HashMap<String, Annotation>()
 | 
			
		||||
        pipelineAnnotationCache = HashMap<String, Annotation>()
 | 
			
		||||
        pipelineSentimentAnnotationCache = HashMap<String, Annotation>()
 | 
			
		||||
        coreDocumentAnnotationCache = HashMap<String, CoreDocument>()
 | 
			
		||||
        gsf = initiateGrammaticalStructureFactory()
 | 
			
		||||
        classifier = CRFClassifier.getClassifierNoExceptions(nerModel)
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    fun initiateGrammaticalStructureFactory(): GrammaticalStructureFactory {
 | 
			
		||||
        val options = arrayOf("-maxLength", "100")
 | 
			
		||||
        //val lexParserEnglishRNN = "edu/stanford/nlp/models/lexparser/englishRNN.ser.gz"
 | 
			
		||||
        val lexParserEnglishPCFG = "edu/stanford/nlp/models/lexparser/englishPCFG.ser.gz"
 | 
			
		||||
        val lp = LexicalizedParser.loadModel(lexParserEnglishPCFG, *options)
 | 
			
		||||
        val tlp = lp.getOp().langpack()
 | 
			
		||||
        return tlp.grammaticalStructureFactory()
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    public fun pipeLineSetUp(): StanfordCoreNLP {
 | 
			
		||||
        val props = Properties()
 | 
			
		||||
        val shiftReduceParserPath = "edu/stanford/nlp/models/srparser/englishSR.ser.gz"
 | 
			
		||||
        //val nerModel2 = "edu/stanford/nlp/models/ner/english.conll.4class.caseless.distsim.crf.ser.gz"
 | 
			
		||||
        val nerModel2 = "edu/stanford/nlp/models/ner/english.conll.4class.distsim.crf.ser.gz"
 | 
			
		||||
        //val nerModel3 = "edu/stanford/nlp/models/ner/english.muc.7class.caseless.distsim.crf.ser.gz"
 | 
			
		||||
        val nerModel3 = "edu/stanford/nlp/models/ner/english.muc.7class.distsim.crf.ser.gz"
 | 
			
		||||
        props.setProperty("annotators", "tokenize,ssplit,pos,lemma,ner,parse")
 | 
			
		||||
        props.setProperty("parse.model", shiftReduceParserPath)
 | 
			
		||||
        props.setProperty("parse.maxlen", "90")
 | 
			
		||||
        props.setProperty("parse.binaryTrees", "true")
 | 
			
		||||
        props.setProperty("threads", "5")
 | 
			
		||||
        props.setProperty("pos.maxlen", "90")
 | 
			
		||||
        props.setProperty("tokenize.maxlen", "90")
 | 
			
		||||
        props.setProperty("ssplit.maxlen", "90")
 | 
			
		||||
        props.setProperty("lemma.maxlen", "90")
 | 
			
		||||
        props.setProperty("ner.model", "$nerModel,$nerModel2,$nerModel3")
 | 
			
		||||
        props.setProperty("ner.combinationMode", "HIGH_RECALL")
 | 
			
		||||
        props.setProperty("regexner.ignorecase", "true")
 | 
			
		||||
        props.setProperty("ner.fine.regexner.ignorecase", "true")
 | 
			
		||||
        props.setProperty("tokenize.options", "untokenizable=firstKeep")
 | 
			
		||||
        return StanfordCoreNLP(props)
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    fun shiftReduceParserInitiate(): StanfordCoreNLP {
 | 
			
		||||
        val propsSentiment = Properties()
 | 
			
		||||
        //val lexParserEnglishRNN = "edu/stanford/nlp/models/lexparser/englishRNN.ser.gz"
 | 
			
		||||
        val lexParserEnglishPCFG = "edu/stanford/nlp/models/lexparser/englishPCFG.ser.gz"
 | 
			
		||||
        val sentimentModel = "edu/stanford/nlp/models/sentiment/sentiment.ser.gz"
 | 
			
		||||
        //val taggerPath = "edu/stanford/nlp/models/pos-tagger/english-left3words/english-left3words-distsim.tagger"
 | 
			
		||||
        val taggerPath = "edu/stanford/nlp/models/pos-tagger/english-left3words-distsim.tagger"
 | 
			
		||||
        val customStopWordList = "start,starts,period,periods,a,an,and,are,as,at,be,but,by,for,if,in,into,is,it,no,not,of," +
 | 
			
		||||
                "on,or,such,that,the,their,then,there,these,they,this,to,was,will,with"
 | 
			
		||||
        propsSentiment.setProperty("parse.model", lexParserEnglishPCFG)
 | 
			
		||||
        propsSentiment.setProperty("sentiment.model", sentimentModel)
 | 
			
		||||
        propsSentiment.setProperty("parse.maxlen", "90")
 | 
			
		||||
        propsSentiment.setProperty("threads", "5")
 | 
			
		||||
        propsSentiment.setProperty("pos.maxlen", "90")
 | 
			
		||||
        propsSentiment.setProperty("tokenize.maxlen", "90")
 | 
			
		||||
        propsSentiment.setProperty("ssplit.maxlen", "90")
 | 
			
		||||
        propsSentiment.setProperty("annotators", "tokenize,ssplit,pos,parse,sentiment,lemma,stopword") //coref too expensive memorywise
 | 
			
		||||
        propsSentiment.setProperty("customAnnotatorClass.stopword", "FunctionLayer.StopwordAnnotator")
 | 
			
		||||
        propsSentiment.setProperty(StopwordAnnotator.STOPWORDS_LIST, customStopWordList)
 | 
			
		||||
        propsSentiment.setProperty("tokenize.options", "untokenizable=firstKeep")
 | 
			
		||||
        tagger = MaxentTagger(taggerPath)
 | 
			
		||||
 | 
			
		||||
        println("finished shiftReduceParserInitiate\n")
 | 
			
		||||
        return StanfordCoreNLP(propsSentiment)
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    fun updateStringCache() {
 | 
			
		||||
        if (stopwatch.elapsed(TimeUnit.MINUTES) >= EXPIRE_TIME_IN_MINUTES || !stopwatch.isRunning) {
 | 
			
		||||
            if (!stopwatch.isRunning) {
 | 
			
		||||
                stopwatch.start()
 | 
			
		||||
            } else {
 | 
			
		||||
                stopwatch.reset()
 | 
			
		||||
            }
 | 
			
		||||
            stringCache.sortWith(Comparator.comparingInt(String::length).reversed());
 | 
			
		||||
            System.out.println("pre InsertMYSQLStrings")
 | 
			
		||||
            val arrayList = java.util.ArrayList<String>(stringCache)
 | 
			
		||||
            DataMapper.InsertMYSQLStrings(arrayList)
 | 
			
		||||
            DataMapper.checkStringsToDelete();
 | 
			
		||||
            stringCache = ArrayList<String>();
 | 
			
		||||
            initiateMYSQL();
 | 
			
		||||
        }
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    fun initiateMYSQL() {
 | 
			
		||||
        stringCache.addAll(DataMapper.getAllStrings())
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    private fun trimString(str: String): String {
 | 
			
		||||
        var message = str.trim { it <= ' ' }
 | 
			
		||||
        if (message.startsWith("<@")) {
 | 
			
		||||
            message = message.substring(message.indexOf("> ") + 2)
 | 
			
		||||
        }
 | 
			
		||||
        if (!message.isEmpty()) {
 | 
			
		||||
            message = message.replace("@", "")
 | 
			
		||||
            if (message.contains("<>")) {
 | 
			
		||||
                message = message.substring(message.indexOf(">"))
 | 
			
		||||
            }
 | 
			
		||||
            if (message.startsWith("[ *")) {
 | 
			
		||||
                message = message.substring(message.indexOf("]"))
 | 
			
		||||
            }
 | 
			
		||||
        }
 | 
			
		||||
        return message
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    private fun createStrAnnotation(str: String, stanfordCoreNLP: StanfordCoreNLP, sentimentBool: Boolean) {
 | 
			
		||||
        val strAnno2 = Annotation(str)
 | 
			
		||||
        strAnno2.compact()
 | 
			
		||||
        stanfordCoreNLP.annotate(strAnno2)
 | 
			
		||||
        if (sentimentBool) {
 | 
			
		||||
            pipelineSentimentAnnotationCache.put(str, strAnno2)
 | 
			
		||||
        } else {
 | 
			
		||||
            pipelineAnnotationCache.put(str, strAnno2)
 | 
			
		||||
        }
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    private fun getResponseFutures(strF: String, stanfordCoreNLP: StanfordCoreNLP, stanfordCoreNLPSentiment: StanfordCoreNLP): String {
 | 
			
		||||
        val strAnno: Annotation = Annotation(strF)
 | 
			
		||||
        strAnno.compact()
 | 
			
		||||
        stanfordCoreNLP.annotate(strAnno)
 | 
			
		||||
 | 
			
		||||
        val strAnnoSentiment: Annotation = Annotation(strF)
 | 
			
		||||
        strAnnoSentiment.compact()
 | 
			
		||||
        stanfordCoreNLPSentiment.annotate(strAnnoSentiment)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
        val annotation = Annotation(strF)
 | 
			
		||||
        stanfordCoreNLP.annotate(annotation)
 | 
			
		||||
        val coreDocument = CoreDocument(annotation)
 | 
			
		||||
 | 
			
		||||
        val values_copy: List<String> = ArrayList(stringCache)
 | 
			
		||||
        var preRelationUserCounters = -155000.0
 | 
			
		||||
        val concurrentRelations: MutableList<String> = arrayListOf()
 | 
			
		||||
        val SB = StringBuilder()
 | 
			
		||||
        var jmweAnnotationF = PipelineJMWESingleton.INSTANCE.getJMWEAnnotation(strF)
 | 
			
		||||
        var tokenizeCountingF: Int? = null
 | 
			
		||||
        var taggedWordListF: List<List<TaggedWord>>? = null
 | 
			
		||||
        var retrieveTGWListF: java.util.ArrayList<String>? = null
 | 
			
		||||
        var sentencesF: List<CoreMap>? = null
 | 
			
		||||
        var sentencesSentimentF: List<CoreMap>? = null
 | 
			
		||||
        var coreMaps1: List<CoreMap> = jmweAnnotationF.get(CoreAnnotations.SentencesAnnotation::class.java)
 | 
			
		||||
        var treesF: java.util.ArrayList<Tree>? = null
 | 
			
		||||
        var grammaticalStructuresF: ArrayList<GrammaticalStructure>? = null
 | 
			
		||||
        var typedDependenciesF: java.util.ArrayList<TypedDependency>? = null
 | 
			
		||||
        var rnnCoreAnnotationsPredictedF: java.util.ArrayList<Int>? = null
 | 
			
		||||
        var simpleMatricesF: java.util.ArrayList<SimpleMatrix>? = null
 | 
			
		||||
        var simpleMatricesNodevectorsF: java.util.ArrayList<SimpleMatrix>? = null
 | 
			
		||||
        var listF: MutableList<Any?>? = null
 | 
			
		||||
        var longestF: Int? = null
 | 
			
		||||
        var sentimentLongestF: Int? = null
 | 
			
		||||
        var imwesF: List<IMWE<IToken>>? = null
 | 
			
		||||
        var InflectedCounterNegativeF: Int? = null
 | 
			
		||||
        var InflectedCounterPositiveF: Int? = null
 | 
			
		||||
        var tokenEntryF: ArrayList<String>? = null
 | 
			
		||||
        var MarkedContinuousCounterF: Int? = null
 | 
			
		||||
        var UnmarkedPatternCounterF: Int? = null
 | 
			
		||||
        var strTokensIpartFormF: ArrayList<String>? = null
 | 
			
		||||
        var tokenFormsF: java.util.ArrayList<String>? = null
 | 
			
		||||
        var strTokenEntryGetPOSF: ArrayList<String>? = null
 | 
			
		||||
        var intTokenEntyCountsF: java.util.ArrayList<Int>? = null
 | 
			
		||||
        var ITokenTagsF: ArrayList<String>? = null
 | 
			
		||||
        var strTokenStemsF: java.util.ArrayList<String>? = null
 | 
			
		||||
        var AnotatorcounterF: Int? = null
 | 
			
		||||
        var TokensCounterF: Int? = null
 | 
			
		||||
        var entityTokenTagsF: java.util.ArrayList<String>? = null
 | 
			
		||||
        var nerEntitiesF: java.util.ArrayList<String>? = null
 | 
			
		||||
        var nerEntitiesTypeF: java.util.ArrayList<String>? = null
 | 
			
		||||
        var stopWordTokenF: java.util.ArrayList<String>? = null
 | 
			
		||||
        var stopWordLemmaF: java.util.ArrayList<String>? = null
 | 
			
		||||
        var PairCounterF: Int? = null
 | 
			
		||||
        for (str1 in values_copy) {
 | 
			
		||||
            if (strF != str1) {
 | 
			
		||||
                val annotation2 = pipelineSentimentAnnotationCache.getOrDefault(str1, null)
 | 
			
		||||
                val annotation4 = pipelineAnnotationCache.getOrDefault(str1, null)
 | 
			
		||||
                val coreDocument1 = coreDocumentAnnotationCache.getOrDefault(str1, null)
 | 
			
		||||
                var jmweAnnotation = jmweAnnotationCache.getOrDefault(str1, null)
 | 
			
		||||
                if (annotation2 == null) {
 | 
			
		||||
                    createStrAnnotation(str1, stanfordCoreNLPSentiment, true)
 | 
			
		||||
                }
 | 
			
		||||
                if (annotation4 == null) {
 | 
			
		||||
                    createStrAnnotation(str1, stanfordCoreNLP, false)
 | 
			
		||||
                }
 | 
			
		||||
                if (coreDocument1 == null) {
 | 
			
		||||
                    getCoreDocumentsSuggested(stanfordCoreNLP, str1)
 | 
			
		||||
                }
 | 
			
		||||
                if (jmweAnnotation == null) {
 | 
			
		||||
                    getJMWEAnnotation(str1)
 | 
			
		||||
                    jmweAnnotation = jmweAnnotationCache.get(str1)
 | 
			
		||||
                }
 | 
			
		||||
                val tokenizeCounting: Int? = tokenizeCountingHashMap.getOrDefault(str1, null)
 | 
			
		||||
                val taggedWordList1: List<List<TaggedWord>>? = taggedWordListHashMap.getOrDefault(str1, null)
 | 
			
		||||
                val retrieveTGWList1: java.util.ArrayList<String>? = retrieveTGWListHashMap.getOrDefault(str1, null)
 | 
			
		||||
                val sentence1: List<CoreMap>? = sentences1HashMap.getOrDefault(str1, null)
 | 
			
		||||
                val sentenceSentiment1: List<CoreMap>? = sentencesSentimentHashMap.getOrDefault(str1, null)
 | 
			
		||||
                val trees1 = trees1HashMap.getOrDefault(str1, null)
 | 
			
		||||
                var coreMaps2: List<CoreMap> = listOf()
 | 
			
		||||
                val grammaticalStructures1 = grammaticalStructureHashMap.getOrDefault(
 | 
			
		||||
                        str1, null)
 | 
			
		||||
                if (jmweAnnotation != null) {
 | 
			
		||||
                    coreMaps2 = jmweAnnotation.get(CoreAnnotations.SentencesAnnotation::class.java)
 | 
			
		||||
                }
 | 
			
		||||
                val typedDependencies1 = typedDependenciesHashMap.getOrDefault(str1, null)
 | 
			
		||||
                val rnnCoreAnnotationsPredicted1 = rnnCoreAnnotationsPredictedHashMap.getOrDefault(str1, null)
 | 
			
		||||
                val simpleMatrices1 = simpleMatricesHashMap.getOrDefault(str1, null);
 | 
			
		||||
                val simpleMatricesNodevectors1 = simpleMatricesNodevectorsHashMap.getOrDefault(str1, null);
 | 
			
		||||
                val list1 = listHashMap.getOrDefault(str1, null);
 | 
			
		||||
                val longest1 = longestHashMap.getOrDefault(str1, null);
 | 
			
		||||
                val sentimentLongest1 = sentimentHashMap.getOrDefault(str1, null);
 | 
			
		||||
                val imwes1 = imwesHashMap.getOrDefault(str1, null);
 | 
			
		||||
                val InflectedCounterNegative1 = InflectedCounterNegativeHashMap.getOrDefault(str1, null);
 | 
			
		||||
                val InflectedCounterPositive1 = InflectedCounterPositiveHashMap.getOrDefault(str1, null)
 | 
			
		||||
                val tokenEntry1 = tokenEntryHashMap.getOrDefault(str1, null)
 | 
			
		||||
                val MarkedContinuousCounter1 = MarkedContinuousCounterHashMap.getOrDefault(str1, null)
 | 
			
		||||
                val UnmarkedPatternCounter1 = UnmarkedPatternCounterHashMap.getOrDefault(str1, null)
 | 
			
		||||
                val strTokensIpartForm1 = strTokensIpartFormHashMap.getOrDefault(str1, null);
 | 
			
		||||
                val tokenForms1 = tokenFormsHashMap.getOrDefault(str1, null);
 | 
			
		||||
                val strTokenEntryGetPOS1 = strTokenEntryGetPOSHashMap.getOrDefault(str1, null)
 | 
			
		||||
                val intTokenEntyCounts1 = intTokenEntyCountsHashMap.getOrDefault(str1, null);
 | 
			
		||||
                val ITokenTags1 = ITokenTagsHashMap.getOrDefault(str1, null);
 | 
			
		||||
                val strTokenStems1 = strTokenStemsHashMap.getOrDefault(str1, null);
 | 
			
		||||
                val Anotatorcounter1 = AnotatorcounterHashMap.getOrDefault(str1, null);
 | 
			
		||||
                val TokensCounter1 = TokensCounterHashMap.getOrDefault(str1, null);
 | 
			
		||||
                val entityTokenTags1 = entityTokenTagsHashMap.getOrDefault(str1, null);
 | 
			
		||||
                val nerEntities1 = nerEntitiesHashMap.getOrDefault(str1, null);
 | 
			
		||||
                val nerEntitiesType1 = nerEntitiesTypeHashMap.getOrDefault(str1, null);
 | 
			
		||||
                val stopWordToken1 = stopWordTokenHashMap.getOrDefault(str1, null);
 | 
			
		||||
                val stopWordLemma1 = stopWordLemmaHashMap.getOrDefault(str1, null);
 | 
			
		||||
                val PairCounter1 = PairCounterHashMap.getOrDefault(str1, null);
 | 
			
		||||
 | 
			
		||||
                var SMX = SentimentAnalyzerTest(strF, str1, SimilarityMatrix(strF, str1),
 | 
			
		||||
                        coreMaps1, coreMaps2, strAnno,
 | 
			
		||||
                        pipelineAnnotationCache[str1], strAnnoSentiment,
 | 
			
		||||
                        pipelineSentimentAnnotationCache[str1], coreDocument, coreDocumentAnnotationCache[str1],
 | 
			
		||||
                        tagger, gsf, classifier, tokenizeCounting, tokenizeCountingF,
 | 
			
		||||
                        taggedWordListF, taggedWordList1, retrieveTGWListF, retrieveTGWList1,
 | 
			
		||||
                        sentencesF, sentence1, sentencesSentimentF, sentenceSentiment1, treesF, trees1,
 | 
			
		||||
                        grammaticalStructuresF, grammaticalStructures1, typedDependenciesF,
 | 
			
		||||
                        typedDependencies1, rnnCoreAnnotationsPredictedF, rnnCoreAnnotationsPredicted1,
 | 
			
		||||
                        simpleMatricesF, simpleMatrices1, simpleMatricesNodevectorsF, simpleMatricesNodevectors1,
 | 
			
		||||
                        listF, list1, longestF, longest1, sentimentLongestF, sentimentLongest1, imwesF,
 | 
			
		||||
                        imwes1, InflectedCounterNegativeF, InflectedCounterNegative1, InflectedCounterPositiveF,
 | 
			
		||||
                        InflectedCounterPositive1, tokenEntryF, tokenEntry1, MarkedContinuousCounterF,
 | 
			
		||||
                        MarkedContinuousCounter1, UnmarkedPatternCounterF, UnmarkedPatternCounter1,
 | 
			
		||||
                        strTokensIpartFormF, strTokensIpartForm1, tokenFormsF, tokenForms1,
 | 
			
		||||
                        strTokenEntryGetPOSF, strTokenEntryGetPOS1, intTokenEntyCountsF,
 | 
			
		||||
                        intTokenEntyCounts1, ITokenTagsF, ITokenTags1, strTokenStemsF, strTokenStems1,
 | 
			
		||||
                        AnotatorcounterF, Anotatorcounter1, TokensCounterF, TokensCounter1,
 | 
			
		||||
                        entityTokenTagsF, entityTokenTags1, nerEntitiesF, nerEntities1, nerEntitiesTypeF,
 | 
			
		||||
                        nerEntitiesType1, stopWordTokenF, stopWordToken1, stopWordLemmaF, stopWordLemma1,
 | 
			
		||||
                        PairCounterF, PairCounter1)
 | 
			
		||||
                if (tokenizeCounting == null) {
 | 
			
		||||
                    tokenizeCountingHashMap.put(str1, SMX.getTokenizeCounting())
 | 
			
		||||
                }
 | 
			
		||||
                if (taggedWordList1 == null) {
 | 
			
		||||
                    taggedWordListHashMap.put(str1, SMX.getTaggedWordList1())
 | 
			
		||||
                }
 | 
			
		||||
                if (tokenizeCountingF == null) {
 | 
			
		||||
                    tokenizeCountingF = SMX.getTokenizeCountingF();
 | 
			
		||||
                }
 | 
			
		||||
                if (taggedWordListF == null) {
 | 
			
		||||
                    taggedWordListF = SMX.getTaggedWordListF();
 | 
			
		||||
                }
 | 
			
		||||
                if (retrieveTGWListF == null) {
 | 
			
		||||
                    retrieveTGWListF = SMX.getRetrieveTGWListF();
 | 
			
		||||
                }
 | 
			
		||||
                if (retrieveTGWList1 == null) {
 | 
			
		||||
                    retrieveTGWListHashMap.put(str1, SMX.getRetrieveTGWList1());
 | 
			
		||||
                }
 | 
			
		||||
                if (sentencesF == null) {
 | 
			
		||||
                    sentencesF = SMX.getSentencesF();
 | 
			
		||||
                }
 | 
			
		||||
                if (sentence1 == null) {
 | 
			
		||||
                    sentences1HashMap.put(str1, SMX.getSentences1())
 | 
			
		||||
                }
 | 
			
		||||
                if (sentencesSentimentF == null) {
 | 
			
		||||
                    sentencesSentimentF = SMX.getSentencesSentimentF();
 | 
			
		||||
                }
 | 
			
		||||
                if (sentenceSentiment1 == null) {
 | 
			
		||||
                    sentencesSentimentHashMap.put(str1, SMX.getSentencesSentiment1());
 | 
			
		||||
                }
 | 
			
		||||
                if (treesF == null) {
 | 
			
		||||
                    treesF = SMX.getTreesF();
 | 
			
		||||
                }
 | 
			
		||||
                if (trees1 == null) {
 | 
			
		||||
                    trees1HashMap.put(str1, SMX.getTrees1())
 | 
			
		||||
                }
 | 
			
		||||
                if (grammaticalStructuresF == null) {
 | 
			
		||||
                    grammaticalStructuresF = SMX.getGrammaticalStructuresF();
 | 
			
		||||
                }
 | 
			
		||||
                if (grammaticalStructures1 == null) {
 | 
			
		||||
                    grammaticalStructureHashMap.put(str1, SMX.getGrammaticalStructures1())
 | 
			
		||||
                }
 | 
			
		||||
                if (typedDependenciesF == null) {
 | 
			
		||||
                    typedDependenciesF = SMX.getTypedDependenciesF();
 | 
			
		||||
                }
 | 
			
		||||
                if (typedDependencies1 == null) {
 | 
			
		||||
                    typedDependenciesHashMap.put(str1, SMX.getTypedDependencies1())
 | 
			
		||||
                }
 | 
			
		||||
                if (rnnCoreAnnotationsPredictedF == null) {
 | 
			
		||||
                    rnnCoreAnnotationsPredictedF = SMX.getRnnCoreAnnotationsPredictedF()
 | 
			
		||||
                }
 | 
			
		||||
                if (rnnCoreAnnotationsPredicted1 == null) {
 | 
			
		||||
                    rnnCoreAnnotationsPredictedHashMap.put(str1, SMX.getRnnCoreAnnotationsPredicted1())
 | 
			
		||||
                }
 | 
			
		||||
                if (simpleMatricesF == null) {
 | 
			
		||||
                    simpleMatricesF = SMX.getSimpleMatricesF();
 | 
			
		||||
                }
 | 
			
		||||
                if (simpleMatrices1 == null) {
 | 
			
		||||
                    simpleMatricesHashMap.put(str1, SMX.getSimpleMatrices1());
 | 
			
		||||
                }
 | 
			
		||||
                if (simpleMatricesNodevectorsF == null) {
 | 
			
		||||
                    simpleMatricesNodevectorsF = SMX.getSimpleMatricesNodevectorsF();
 | 
			
		||||
                }
 | 
			
		||||
                if (simpleMatricesNodevectors1 == null) {
 | 
			
		||||
                    simpleMatricesNodevectorsHashMap.put(str1, SMX.getSimpleMatricesNodevectors1());
 | 
			
		||||
                }
 | 
			
		||||
                if (listF == null) {
 | 
			
		||||
                    listF = SMX.getListF();
 | 
			
		||||
                }
 | 
			
		||||
                if (list1 == null) {
 | 
			
		||||
                    listHashMap.put(str1, SMX.getList1());
 | 
			
		||||
                }
 | 
			
		||||
                if (longestF == null) {
 | 
			
		||||
                    longestF = SMX.getLongestF();
 | 
			
		||||
                }
 | 
			
		||||
                if (longest1 == null) {
 | 
			
		||||
                    longestHashMap.put(str1, SMX.getLongest1());
 | 
			
		||||
                }
 | 
			
		||||
                if (sentimentLongestF == null) {
 | 
			
		||||
                    sentimentLongestF = SMX.getSentimentLongestF();
 | 
			
		||||
                }
 | 
			
		||||
                if (sentimentLongest1 == null) {
 | 
			
		||||
                    sentimentHashMap.put(str1, SMX.getSentimentLongest1());
 | 
			
		||||
                }
 | 
			
		||||
                if (imwesF == null) {
 | 
			
		||||
                    imwesF = SMX.getImwesF();
 | 
			
		||||
                }
 | 
			
		||||
                if (imwes1 == null) {
 | 
			
		||||
                    imwesHashMap.put(str1, SMX.getImwes1());
 | 
			
		||||
                }
 | 
			
		||||
                if (InflectedCounterNegativeF == null) {
 | 
			
		||||
                    InflectedCounterNegativeF = SMX.getInflectedCounterNegativeF();
 | 
			
		||||
                }
 | 
			
		||||
                if (InflectedCounterNegative1 == null) {
 | 
			
		||||
                    InflectedCounterNegativeHashMap.put(str1, SMX.getInflectedCounterNegative1());
 | 
			
		||||
                }
 | 
			
		||||
                if (InflectedCounterPositiveF == null) {
 | 
			
		||||
                    InflectedCounterPositiveF = SMX.getInflectedCounterPositiveF();
 | 
			
		||||
                }
 | 
			
		||||
                if (InflectedCounterPositive1 == null) {
 | 
			
		||||
                    InflectedCounterPositiveHashMap.put(str1, SMX.getInflectedCounterPositive1());
 | 
			
		||||
                }
 | 
			
		||||
                if (tokenEntryF == null) {
 | 
			
		||||
                    tokenEntryF = SMX.getTokenEntryF();
 | 
			
		||||
                }
 | 
			
		||||
                if (tokenEntry1 == null) {
 | 
			
		||||
                    tokenEntryHashMap.put(str1, SMX.getTokenEntry1())
 | 
			
		||||
                }
 | 
			
		||||
                if (MarkedContinuousCounterF == null) {
 | 
			
		||||
                    MarkedContinuousCounterF = SMX.getMarkedContinuousCounterF();
 | 
			
		||||
                }
 | 
			
		||||
                if (MarkedContinuousCounter1 == null) {
 | 
			
		||||
                    MarkedContinuousCounterHashMap.put(str1, SMX.getMarkedContinuousCounter1());
 | 
			
		||||
                }
 | 
			
		||||
                if (UnmarkedPatternCounterF == null) {
 | 
			
		||||
                    UnmarkedPatternCounterF = SMX.getUnmarkedPatternCounterF();
 | 
			
		||||
                }
 | 
			
		||||
                if (UnmarkedPatternCounter1 == null) {
 | 
			
		||||
                    UnmarkedPatternCounterHashMap.put(str1, SMX.getUnmarkedPatternCounter1());
 | 
			
		||||
                }
 | 
			
		||||
                if (strTokensIpartFormF == null) {
 | 
			
		||||
                    strTokensIpartFormF = SMX.getStrTokensIpartFormF();
 | 
			
		||||
                }
 | 
			
		||||
                if (strTokensIpartForm1 == null) {
 | 
			
		||||
                    strTokensIpartFormHashMap.put(str1, SMX.getStrTokensIpartForm1());
 | 
			
		||||
                }
 | 
			
		||||
                if (tokenFormsF == null) {
 | 
			
		||||
                    tokenFormsF = SMX.getTokenFormsF();
 | 
			
		||||
                }
 | 
			
		||||
                if (tokenForms1 == null) {
 | 
			
		||||
                    tokenFormsHashMap.put(str1, SMX.getTokenForms1());
 | 
			
		||||
                }
 | 
			
		||||
                if (strTokenEntryGetPOSF == null) {
 | 
			
		||||
                    strTokenEntryGetPOSF = SMX.getStrTokenEntryGetPOSF();
 | 
			
		||||
                }
 | 
			
		||||
                if (strTokenEntryGetPOS1 == null) {
 | 
			
		||||
                    strTokenEntryGetPOSHashMap.put(str1, SMX.getStrTokenEntryGetPOS1())
 | 
			
		||||
                }
 | 
			
		||||
                if (intTokenEntyCountsF == null) {
 | 
			
		||||
                    intTokenEntyCountsF = SMX.getIntTokenEntyCountsF();
 | 
			
		||||
                }
 | 
			
		||||
                if (intTokenEntyCounts1 == null) {
 | 
			
		||||
                    intTokenEntyCountsHashMap.put(str1, SMX.getIntTokenEntyCounts1());
 | 
			
		||||
                }
 | 
			
		||||
                if (ITokenTagsF == null) {
 | 
			
		||||
                    ITokenTagsF = SMX.getITokenTagsF();
 | 
			
		||||
                }
 | 
			
		||||
                if (ITokenTags1 == null) {
 | 
			
		||||
                    ITokenTagsHashMap.put(str1, SMX.getITokenTags1());
 | 
			
		||||
                }
 | 
			
		||||
                if (strTokenStemsF == null) {
 | 
			
		||||
                    strTokenStemsF = SMX.getStrTokenStemsF();
 | 
			
		||||
                }
 | 
			
		||||
                if (strTokenStems1 == null) {
 | 
			
		||||
                    strTokenStemsHashMap.put(str1, SMX.getStrTokenStems1());
 | 
			
		||||
                }
 | 
			
		||||
                if (AnotatorcounterF == null) {
 | 
			
		||||
                    AnotatorcounterF = SMX.getAnotatorcounterF();
 | 
			
		||||
                }
 | 
			
		||||
                if (Anotatorcounter1 == null) {
 | 
			
		||||
                    AnotatorcounterHashMap.put(str1, SMX.getAnotatorcounter1());
 | 
			
		||||
                }
 | 
			
		||||
                if (TokensCounterF == null) {
 | 
			
		||||
                    TokensCounterF = SMX.getTokensCounterF();
 | 
			
		||||
                }
 | 
			
		||||
                if (TokensCounter1 == null) {
 | 
			
		||||
                    TokensCounterHashMap.put(str1, SMX.getTokensCounter1());
 | 
			
		||||
                }
 | 
			
		||||
                if (entityTokenTagsF == null) {
 | 
			
		||||
                    entityTokenTagsF = SMX.getEntityTokenTagsF();
 | 
			
		||||
                }
 | 
			
		||||
                if (entityTokenTags1 == null) {
 | 
			
		||||
                    entityTokenTagsHashMap.put(str1, SMX.getEntityTokenTags1());
 | 
			
		||||
                }
 | 
			
		||||
                if (nerEntitiesF == null) {
 | 
			
		||||
                    nerEntitiesF = SMX.getNerEntitiesF();
 | 
			
		||||
                }
 | 
			
		||||
                if (nerEntities1 == null) {
 | 
			
		||||
                    nerEntitiesHashMap.put(str1, SMX.getNerEntities1());
 | 
			
		||||
                }
 | 
			
		||||
                if (nerEntitiesTypeF == null) {
 | 
			
		||||
                    nerEntitiesTypeF = SMX.getNerEntitiesTypeF();
 | 
			
		||||
                }
 | 
			
		||||
                if (nerEntitiesType1 == null) {
 | 
			
		||||
                    nerEntitiesTypeHashMap.put(str1, SMX.getNerEntitiesType1());
 | 
			
		||||
                }
 | 
			
		||||
                if (stopWordTokenF == null) {
 | 
			
		||||
                    stopWordTokenF = SMX.getStopWordTokenF();
 | 
			
		||||
                }
 | 
			
		||||
                if (stopWordToken1 == null) {
 | 
			
		||||
                    stopWordTokenHashMap.put(str1, SMX.getStopWordToken1());
 | 
			
		||||
                }
 | 
			
		||||
                if (stopWordLemmaF == null) {
 | 
			
		||||
                    stopWordLemmaF = SMX.getStopWordLemmaF();
 | 
			
		||||
                }
 | 
			
		||||
                if (stopWordLemma1 == null) {
 | 
			
		||||
                    stopWordLemmaHashMap.put(str1, SMX.getStopWordLemma1());
 | 
			
		||||
                }
 | 
			
		||||
                if (PairCounterF == null) {
 | 
			
		||||
                    PairCounterF = SMX.getPairCounterF();
 | 
			
		||||
                }
 | 
			
		||||
                if (PairCounter1 == null) {
 | 
			
		||||
                    PairCounterHashMap.put(str1, SMX.getPairCounter1());
 | 
			
		||||
                }
 | 
			
		||||
 | 
			
		||||
                var getSMX: SimilarityMatrix = SMX.callSMX()
 | 
			
		||||
                val scoreRelationLastUserMsg = getSMX.distance
 | 
			
		||||
                if (scoreRelationLastUserMsg > preRelationUserCounters) {
 | 
			
		||||
                    preRelationUserCounters = scoreRelationLastUserMsg
 | 
			
		||||
                    concurrentRelations.add(getSMX.secondaryString)
 | 
			
		||||
                }
 | 
			
		||||
            }
 | 
			
		||||
        }
 | 
			
		||||
        val cacheRequirement = 6500;
 | 
			
		||||
        if (preRelationUserCounters > cacheRequirement && !stringCache.contains(strF) && filterContent(strF)) {
 | 
			
		||||
            stringCache.add(strF)
 | 
			
		||||
        }
 | 
			
		||||
        val randomLenghtPermit = strF.length * (Math.random() * Math.random() * Math.random() * (Math.random() * 10))
 | 
			
		||||
        Collections.reverse(concurrentRelations)
 | 
			
		||||
        val mysqlUpdateLastUsed: ArrayList<String> = ArrayList()
 | 
			
		||||
        if (!concurrentRelations.isEmpty()) {
 | 
			
		||||
            for (secondaryRelation in concurrentRelations) {
 | 
			
		||||
                if (SB.toString().length > randomLenghtPermit && !SB.toString().isEmpty()) {
 | 
			
		||||
                    break
 | 
			
		||||
                }
 | 
			
		||||
                SB.append(secondaryRelation).append(" ")
 | 
			
		||||
                mysqlUpdateLastUsed.add(secondaryRelation)
 | 
			
		||||
            }
 | 
			
		||||
        }
 | 
			
		||||
        if (SB.toString().isEmpty()) {
 | 
			
		||||
            return "failure, preventing stuckness"
 | 
			
		||||
        }
 | 
			
		||||
        runBlocking {
 | 
			
		||||
            CoroutineScope(launch(Dispatchers.IO) {
 | 
			
		||||
                DataMapper.updateLastUsed(mysqlUpdateLastUsed);
 | 
			
		||||
                yield()
 | 
			
		||||
            })
 | 
			
		||||
        }
 | 
			
		||||
        return SB.toString()
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    private fun getJMWEAnnotation(str1: String) {
 | 
			
		||||
        val jmweAnnotation = PipelineJMWESingleton.INSTANCE.getJMWEAnnotation(str1)
 | 
			
		||||
        jmweAnnotationCache.put(str1, jmweAnnotation)
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    fun getResponseMsg(str: String, personName: String, stanfordCoreNLP: StanfordCoreNLP,
 | 
			
		||||
                       stanfordCoreNLPSentiment: StanfordCoreNLP, ingameResponse: Boolean): String {
 | 
			
		||||
        var responseFutures: String = ""
 | 
			
		||||
        runBlocking {
 | 
			
		||||
            val launch1 = launch(Dispatchers.Default) {
 | 
			
		||||
                var strF = trimString(str)
 | 
			
		||||
                responseFutures = getResponseFutures(strF, stanfordCoreNLP, stanfordCoreNLPSentiment)
 | 
			
		||||
                if (!ingameResponse) {
 | 
			
		||||
                    responseFutures = checkPersonPresentInSentence(personName, responseFutures, strF, stanfordCoreNLP,
 | 
			
		||||
                            stanfordCoreNLPSentiment)
 | 
			
		||||
                }
 | 
			
		||||
                yield()
 | 
			
		||||
            }
 | 
			
		||||
            launch1.join()
 | 
			
		||||
        }
 | 
			
		||||
        return responseFutures
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    private fun checkPersonPresentInSentence(personName: String, responseMsg: String, userLastMessage: String,
 | 
			
		||||
                                             stanfordCoreNLP: StanfordCoreNLP,
 | 
			
		||||
                                             stanfordCoreNLPSentiment: StanfordCoreNLP): String {
 | 
			
		||||
        try {
 | 
			
		||||
            val pipelineCoreDcoument = CoreDocument(responseMsg)
 | 
			
		||||
            val pipelineCoreDcoumentLastMsg = CoreDocument(userLastMessage)
 | 
			
		||||
            stanfordCoreNLP.annotate(pipelineCoreDcoument)
 | 
			
		||||
            stanfordCoreNLPSentiment.annotate(pipelineCoreDcoumentLastMsg)
 | 
			
		||||
            val regex = "(.*?\\d){10,}"
 | 
			
		||||
            for (em in pipelineCoreDcoument.entityMentions()) {
 | 
			
		||||
                val entityType = em.entityType()
 | 
			
		||||
                if (entityType == "PERSON") {
 | 
			
		||||
                    var str = responseMsg
 | 
			
		||||
                    val emText = em.text()
 | 
			
		||||
                    val pattern = Pattern.compile(regex)
 | 
			
		||||
                    val matcher = pattern.matcher(personName)
 | 
			
		||||
                    val isMatched = matcher.matches()
 | 
			
		||||
                    if (emText != personName && !isMatched) {
 | 
			
		||||
                        for (emLastMsg in pipelineCoreDcoumentLastMsg.entityMentions()) {
 | 
			
		||||
                            if (emText != emLastMsg.text() && !Character.isDigit(emLastMsg.text().trim { it <= ' ' }[0])) {
 | 
			
		||||
                                //System.out.println("emLastMsg.text(): " + emLastMsg.text());
 | 
			
		||||
                                str = (responseMsg.substring(0, responseMsg.indexOf(emText)) + " "
 | 
			
		||||
                                        + emLastMsg + " " + responseMsg.substring(responseMsg.indexOf(emText)))
 | 
			
		||||
                            }
 | 
			
		||||
                        }
 | 
			
		||||
                        str += " $personName"
 | 
			
		||||
                        return str
 | 
			
		||||
                    }
 | 
			
		||||
                }
 | 
			
		||||
            }
 | 
			
		||||
        } catch (e: Exception) {
 | 
			
		||||
            println("""SCUFFED JAYZ: ${e.localizedMessage}""".trimIndent())
 | 
			
		||||
        }
 | 
			
		||||
        return responseMsg
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    fun filterContent(str: String): Boolean {
 | 
			
		||||
        if (!str.isEmpty() && str.length > 3) {
 | 
			
		||||
            var str1Local: String = str.trim();
 | 
			
		||||
            if (str1Local.length > 2 && !str1Local.startsWith("!")) {
 | 
			
		||||
                return true
 | 
			
		||||
            }
 | 
			
		||||
        }
 | 
			
		||||
        return false
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    fun getCoreDocumentsSuggested(pipeline: StanfordCoreNLP, str: String) {
 | 
			
		||||
        val annotation = Annotation(str)
 | 
			
		||||
        pipeline.annotate(annotation)
 | 
			
		||||
        val coreDocument = CoreDocument(annotation)
 | 
			
		||||
        coreDocumentAnnotationCache.put(str, coreDocument)
 | 
			
		||||
    }
 | 
			
		||||
}
 | 
			
		||||
@ -9,45 +9,43 @@ import PresentationLayer.DiscordHandler;
 | 
			
		||||
import discord4j.core.event.domain.message.MessageCreateEvent;
 | 
			
		||||
import discord4j.core.object.entity.User;
 | 
			
		||||
import discord4j.core.object.entity.channel.TextChannel;
 | 
			
		||||
 | 
			
		||||
import java.math.BigInteger;
 | 
			
		||||
import java.util.ArrayList;
 | 
			
		||||
import java.util.List;
 | 
			
		||||
import java.util.logging.Level;
 | 
			
		||||
import java.util.logging.Logger;
 | 
			
		||||
 | 
			
		||||
import edu.stanford.nlp.pipeline.StanfordCoreNLP;
 | 
			
		||||
import reactor.core.publisher.Flux;
 | 
			
		||||
import reactor.core.publisher.Mono;
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
/**
 | 
			
		||||
 *
 | 
			
		||||
 * @author install1
 | 
			
		||||
 */
 | 
			
		||||
public class DoStuff {
 | 
			
		||||
 | 
			
		||||
    public static boolean occupied = false;
 | 
			
		||||
 | 
			
		||||
    public static boolean isOccupied() {
 | 
			
		||||
        return occupied;
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    public static void doStuff(MessageCreateEvent event, String usernameBot) {
 | 
			
		||||
        String username = null;
 | 
			
		||||
    public static void doStuff(MessageCreateEvent event, String usernameBot, Datahandler datahandler,
 | 
			
		||||
                               StanfordCoreNLP stanfordCoreNLP, StanfordCoreNLP stanfordCoreNLPSentiment) {
 | 
			
		||||
        String username = "";
 | 
			
		||||
        try {
 | 
			
		||||
            username = event.getMessage().getAuthor().get().getUsername();
 | 
			
		||||
        } catch (java.util.NoSuchElementException e) {
 | 
			
		||||
            username = null;
 | 
			
		||||
        }
 | 
			
		||||
        if (username != null && !username.equals(usernameBot)) {
 | 
			
		||||
            occupied = true;
 | 
			
		||||
            TextChannel block = event.getMessage().getChannel().cast(TextChannel.class).block();
 | 
			
		||||
            String name = block.getCategory().block().getName();
 | 
			
		||||
            name = name.toLowerCase();
 | 
			
		||||
            String channelName = block.getName().toLowerCase();
 | 
			
		||||
            boolean channelpermissionsDenied = false;
 | 
			
		||||
            if (channelName.contains("suggestion-box")) {
 | 
			
		||||
                channelpermissionsDenied = true;
 | 
			
		||||
            }
 | 
			
		||||
            switch (name) {
 | 
			
		||||
                case "public area": {
 | 
			
		||||
                    break;
 | 
			
		||||
                }
 | 
			
		||||
                case "public area":
 | 
			
		||||
                case "information area": {
 | 
			
		||||
                    break;
 | 
			
		||||
                }
 | 
			
		||||
@ -56,49 +54,34 @@ public class DoStuff {
 | 
			
		||||
                    break;
 | 
			
		||||
                }
 | 
			
		||||
            }
 | 
			
		||||
            List<User> blockLast = event.getMessage().getUserMentions().buffer().blockLast();
 | 
			
		||||
            String content = event.getMessage().getContent();
 | 
			
		||||
            if (!channelpermissionsDenied) {
 | 
			
		||||
                if (blockLast != null)
 | 
			
		||||
                {
 | 
			
		||||
                List<User> blockLast = event.getMessage().getUserMentions().buffer().blockLast();
 | 
			
		||||
                String content = event.getMessage().getContent();
 | 
			
		||||
                if (blockLast != null) {
 | 
			
		||||
                    for (User user : blockLast) {
 | 
			
		||||
                        content = content.replace(user.getId().asString(), "");
 | 
			
		||||
                    }
 | 
			
		||||
                }
 | 
			
		||||
                MessageResponseHandler.getMessage(content);
 | 
			
		||||
            }
 | 
			
		||||
            boolean mentionedBot = false;
 | 
			
		||||
            if (blockLast != null){
 | 
			
		||||
                for (User user : blockLast)
 | 
			
		||||
                {
 | 
			
		||||
                    if (user.getUsername().equals(usernameBot))
 | 
			
		||||
                    {
 | 
			
		||||
                        mentionedBot = true;
 | 
			
		||||
                        break;
 | 
			
		||||
                boolean mentionedBot = false;
 | 
			
		||||
                if (blockLast != null) {
 | 
			
		||||
                    for (User user : blockLast) {
 | 
			
		||||
                        if (user.getUsername().equals(usernameBot)) {
 | 
			
		||||
                            mentionedBot = true;
 | 
			
		||||
                            break;
 | 
			
		||||
                        }
 | 
			
		||||
                    }
 | 
			
		||||
                }
 | 
			
		||||
            }
 | 
			
		||||
            if (mentionedBot || channelName.contains("general-autism")) {
 | 
			
		||||
                try {
 | 
			
		||||
                if (mentionedBot || channelName.contains("general-autism")) {
 | 
			
		||||
                    String ResponseStr;
 | 
			
		||||
                    ResponseStr = MessageResponseHandler.selectReponseMessage(content, username);
 | 
			
		||||
                    ResponseStr = datahandler.getResponseMsg(content, username, stanfordCoreNLP, stanfordCoreNLPSentiment,
 | 
			
		||||
                            false);
 | 
			
		||||
                    if (!ResponseStr.isEmpty()) {
 | 
			
		||||
                        System.out.print("\nResponseStr3: " + ResponseStr + "\n");
 | 
			
		||||
                        event.getMessage().getChannel().block().createMessage(ResponseStr).block();
 | 
			
		||||
                    }
 | 
			
		||||
                } catch (CustomError ex) {
 | 
			
		||||
                    Logger.getLogger(DoStuff.class.getName()).log(Level.SEVERE, null, ex);
 | 
			
		||||
                }
 | 
			
		||||
 | 
			
		||||
            }
 | 
			
		||||
            new Thread(() -> {
 | 
			
		||||
                try {
 | 
			
		||||
                    Datahandler.instance.checkIfUpdateStrings();
 | 
			
		||||
                } catch (CustomError ex) {
 | 
			
		||||
                    Logger.getLogger(DiscordHandler.class.getName()).log(Level.SEVERE, null, ex);
 | 
			
		||||
                }
 | 
			
		||||
            }).start();
 | 
			
		||||
            occupied = false;
 | 
			
		||||
            datahandler.updateStringCache();
 | 
			
		||||
        }
 | 
			
		||||
    }
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
@ -1,101 +0,0 @@
 | 
			
		||||
/*
 | 
			
		||||
 * To change this license header, choose License Headers in Project Properties.
 | 
			
		||||
 * To change this template file, choose Tools | Templates
 | 
			
		||||
 * and open the template in the editor.
 | 
			
		||||
 */
 | 
			
		||||
package FunctionLayer;
 | 
			
		||||
 | 
			
		||||
import com.google.common.collect.MapMaker;
 | 
			
		||||
import edu.stanford.nlp.pipeline.CoreDocument;
 | 
			
		||||
import edu.stanford.nlp.pipeline.CoreEntityMention;
 | 
			
		||||
import java.util.ArrayList;
 | 
			
		||||
import java.util.List;
 | 
			
		||||
import java.util.concurrent.ConcurrentMap;
 | 
			
		||||
import java.util.regex.Matcher;
 | 
			
		||||
import java.util.regex.Pattern;
 | 
			
		||||
 | 
			
		||||
/**
 | 
			
		||||
 *
 | 
			
		||||
 * @author install1
 | 
			
		||||
 */
 | 
			
		||||
public class MessageResponseHandler {
 | 
			
		||||
 | 
			
		||||
    private static ConcurrentMap<Integer, String> str = new MapMaker().concurrencyLevel(2).makeMap();
 | 
			
		||||
 | 
			
		||||
    public static ConcurrentMap<Integer, String> getStr() {
 | 
			
		||||
        return str;
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    public static void setStr(ConcurrentMap<Integer, String> str) {
 | 
			
		||||
        MessageResponseHandler.str = str;
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    public static void getMessage(String message) {
 | 
			
		||||
        if (message != null && !message.isEmpty()) {
 | 
			
		||||
            message = message.replace("@", "");
 | 
			
		||||
            if (message.contains("<>")) {
 | 
			
		||||
                message = message.substring(message.indexOf(">"));
 | 
			
		||||
            }
 | 
			
		||||
            if (message.startsWith("[ *")) {
 | 
			
		||||
                message = message.substring(message.indexOf("]"));
 | 
			
		||||
            }
 | 
			
		||||
            str.put(str.size() + 1, message);
 | 
			
		||||
        }
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    public static String selectReponseMessage(String toString, String personName) throws CustomError {
 | 
			
		||||
        ConcurrentMap<Integer, String> str1 = new MapMaker().concurrencyLevel(6).makeMap();
 | 
			
		||||
        str1.put(str1.size() + 1, toString);
 | 
			
		||||
        String strreturn = "";
 | 
			
		||||
        for (String str : str1.values()) {
 | 
			
		||||
            if (!str.isEmpty()) {
 | 
			
		||||
                strreturn = str;
 | 
			
		||||
            }
 | 
			
		||||
        }
 | 
			
		||||
        String getResponseMsg = Datahandler.instance.getResponseMsg(strreturn);
 | 
			
		||||
        getResponseMsg = checkPersonPresentInSentence(personName, getResponseMsg, strreturn);
 | 
			
		||||
        return getResponseMsg;
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    private static String checkPersonPresentInSentence(String personName, String responseMsg, String userLastMessage) {
 | 
			
		||||
        //check if userlastmsg contains person as refference
 | 
			
		||||
        //check if first person is author or their person of mention
 | 
			
		||||
        try {
 | 
			
		||||
            String strreturn = responseMsg;
 | 
			
		||||
            CoreDocument pipelineCoreDcoument = new CoreDocument(responseMsg);
 | 
			
		||||
            CoreDocument pipelineCoreDcoumentLastMsg = new CoreDocument(userLastMessage);
 | 
			
		||||
            Datahandler.getPipeline().annotate(pipelineCoreDcoument);
 | 
			
		||||
            Datahandler.getPipeline().annotate(pipelineCoreDcoumentLastMsg);
 | 
			
		||||
            String regex = "(.*?\\d){10,}";
 | 
			
		||||
            for (CoreEntityMention em : pipelineCoreDcoument.entityMentions()) {
 | 
			
		||||
                String entityType = em.entityType();
 | 
			
		||||
                if (entityType.equals("PERSON")) {
 | 
			
		||||
                    String str = strreturn;
 | 
			
		||||
                    String emText = em.text();
 | 
			
		||||
                    Pattern pattern = Pattern.compile(regex);
 | 
			
		||||
                    Matcher matcher = pattern.matcher(personName);
 | 
			
		||||
                    boolean isMatched = matcher.matches();
 | 
			
		||||
                    if (!emText.equals(personName) && !isMatched) {
 | 
			
		||||
                        for (CoreEntityMention emLastMsg : pipelineCoreDcoumentLastMsg.entityMentions()) {
 | 
			
		||||
                            if (!emText.equals(emLastMsg.text()) && !Character.isDigit(emLastMsg.text().trim().charAt(0))) {
 | 
			
		||||
                                //System.out.println("emLastMsg.text(): " + emLastMsg.text());
 | 
			
		||||
                                str = strreturn.substring(0, strreturn.indexOf(emText)) + " "
 | 
			
		||||
                                        + emLastMsg + " " + strreturn.substring(strreturn.indexOf(emText));
 | 
			
		||||
                            }
 | 
			
		||||
                        }
 | 
			
		||||
                        str += " " + personName;
 | 
			
		||||
                        return str;
 | 
			
		||||
                    }
 | 
			
		||||
                }
 | 
			
		||||
            }
 | 
			
		||||
        } catch (Exception e) {
 | 
			
		||||
            System.out.println("SCUFFED JAYZ: " + e.getLocalizedMessage() + "\n");
 | 
			
		||||
        }
 | 
			
		||||
        return responseMsg;
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    public static int getOverHead() {
 | 
			
		||||
        int getResponseMsgOverHead = Datahandler.instance.getMessageOverHead();
 | 
			
		||||
        return getResponseMsgOverHead;
 | 
			
		||||
    }
 | 
			
		||||
}
 | 
			
		||||
@ -5,7 +5,6 @@
 | 
			
		||||
 */
 | 
			
		||||
package FunctionLayer;
 | 
			
		||||
 | 
			
		||||
import com.google.common.collect.MapMaker;
 | 
			
		||||
import edu.mit.jmwe.data.IMWE;
 | 
			
		||||
import edu.mit.jmwe.data.IToken;
 | 
			
		||||
import edu.mit.jmwe.data.Token;
 | 
			
		||||
@ -24,38 +23,30 @@ import edu.stanford.nlp.ling.JMWEAnnotation;
 | 
			
		||||
import edu.stanford.nlp.pipeline.Annotation;
 | 
			
		||||
import edu.stanford.nlp.pipeline.StanfordCoreNLP;
 | 
			
		||||
import edu.stanford.nlp.util.CoreMap;
 | 
			
		||||
 | 
			
		||||
import java.io.File;
 | 
			
		||||
import java.io.IOException;
 | 
			
		||||
import java.util.ArrayList;
 | 
			
		||||
import java.util.Collection;
 | 
			
		||||
import java.util.List;
 | 
			
		||||
import java.util.Properties;
 | 
			
		||||
import java.util.concurrent.ConcurrentMap;
 | 
			
		||||
 | 
			
		||||
/**
 | 
			
		||||
 *
 | 
			
		||||
 * @author install1
 | 
			
		||||
 */
 | 
			
		||||
//maybe not public?
 | 
			
		||||
public class PipelineJMWESingleton {
 | 
			
		||||
 | 
			
		||||
    //if not needed to be volatile dont make it, increases time
 | 
			
		||||
    public volatile static PipelineJMWESingleton INSTANCE;
 | 
			
		||||
    //public volatile static PipelineJMWESingleton INSTANCE;
 | 
			
		||||
    public static PipelineJMWESingleton INSTANCE;
 | 
			
		||||
    private static StanfordCoreNLP localNLP = initializeJMWE();
 | 
			
		||||
    private static String underscoreSpaceReplacement;
 | 
			
		||||
    private static IMWEIndex index;
 | 
			
		||||
    private static IMWEDetector detector;
 | 
			
		||||
 | 
			
		||||
    private PipelineJMWESingleton() {
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    public static void getINSTANCE() {
 | 
			
		||||
        INSTANCE = new PipelineJMWESingleton();
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    public final ConcurrentMap<String, Annotation> getJMWEAnnotation(Collection<String> strvalues) {
 | 
			
		||||
        boolean verbose = false;
 | 
			
		||||
        IMWEIndex index;
 | 
			
		||||
        String jmweIndexData = "/home/debian/autism_bot/lib/mweindex_wordnet3.0_semcor1.6.data"; // ./lib/mweindex_wordnet3.0_semcor1.6.data
 | 
			
		||||
        String jmweIndexDataLocalTest = "E:/java8/Projects/mweindex_wordnet3.0_semcor1.6.data";
 | 
			
		||||
        String jmweIndexData = "/home/gameservers/autism_bot/lib/mweindex_wordnet3.0_semcor1.6.data"; // ./lib/mweindex_wordnet3.0_semcor1.6.data
 | 
			
		||||
        String jmweIndexDataLocalTest = "E:/stationær backup filer/Projects/mweindex_wordnet3.0_semcor1.6.data";
 | 
			
		||||
        File indexFile = new File((String) jmweIndexData);
 | 
			
		||||
        index = new MWEIndex(indexFile);
 | 
			
		||||
        String detectorName = "Exhaustive";
 | 
			
		||||
@ -64,36 +55,45 @@ public class PipelineJMWESingleton {
 | 
			
		||||
        } catch (IOException e) {
 | 
			
		||||
            throw new RuntimeException("unable to open IMWEIndex index: " + e + "\n");
 | 
			
		||||
        }
 | 
			
		||||
        IMWEDetector detector = getDetector(index, detectorName);
 | 
			
		||||
        ConcurrentMap<String, Annotation> returnAnnotations = new MapMaker().concurrencyLevel(2).makeMap();
 | 
			
		||||
        strvalues.forEach(str -> {
 | 
			
		||||
            Annotation annoStr = new Annotation(str);
 | 
			
		||||
            returnAnnotations.put(str, annoStr);
 | 
			
		||||
        });
 | 
			
		||||
        localNLP.annotate(returnAnnotations.values());
 | 
			
		||||
        returnAnnotations.values().parallelStream().forEach(annoStr -> {
 | 
			
		||||
            for (CoreMap sentence : annoStr.get(CoreAnnotations.SentencesAnnotation.class)) {
 | 
			
		||||
                List<IMWE<IToken>> mwes = getjMWEInSentence(sentence, index, detector, verbose);
 | 
			
		||||
                sentence.set(JMWEAnnotation.class, mwes);
 | 
			
		||||
            }
 | 
			
		||||
        });
 | 
			
		||||
        detector = getDetector(index, detectorName);
 | 
			
		||||
        index.close();
 | 
			
		||||
        return returnAnnotations;
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    public static void getINSTANCE() {
 | 
			
		||||
        INSTANCE = new PipelineJMWESingleton();
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    public final Annotation getJMWEAnnotation(String str) {
 | 
			
		||||
        try {
 | 
			
		||||
            index.open();
 | 
			
		||||
        } catch (IOException e) {
 | 
			
		||||
            throw new RuntimeException("unable to open IMWEIndex index: " + e + "\n");
 | 
			
		||||
        }
 | 
			
		||||
        Annotation annoStr = new Annotation(str);
 | 
			
		||||
        localNLP.annotate(annoStr);
 | 
			
		||||
        Class<CoreAnnotations.SentencesAnnotation> sentencesAnnotationClass = CoreAnnotations.SentencesAnnotation.class;
 | 
			
		||||
        for (CoreMap sentence : annoStr.get(sentencesAnnotationClass)) {
 | 
			
		||||
            List<IMWE<IToken>> mwes = getjMWEInSentence(sentence, index, detector, false);
 | 
			
		||||
            //annoStr.set(JMWEAnnotation.class, mwes);
 | 
			
		||||
            sentence.set(JMWEAnnotation.class, mwes);
 | 
			
		||||
        }
 | 
			
		||||
        index.close();
 | 
			
		||||
        return annoStr;
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    public final static StanfordCoreNLP initializeJMWE() {
 | 
			
		||||
        Properties propsJMWE;
 | 
			
		||||
        propsJMWE = new Properties();
 | 
			
		||||
        propsJMWE.setProperty("annotators", "tokenize,ssplit,pos,lemma");
 | 
			
		||||
        propsJMWE.setProperty("tokenize.options", "untokenizable=firstDelete");
 | 
			
		||||
        propsJMWE.setProperty("threads", "25");
 | 
			
		||||
        propsJMWE.setProperty("tokenize.options", "untokenizable=firstKeep");
 | 
			
		||||
        propsJMWE.setProperty("threads", "5");
 | 
			
		||||
        propsJMWE.setProperty("pos.maxlen", "90");
 | 
			
		||||
        propsJMWE.setProperty("tokenize.maxlen", "90");
 | 
			
		||||
        propsJMWE.setProperty("ssplit.maxlen", "90");
 | 
			
		||||
        propsJMWE.setProperty("lemma.maxlen", "90");
 | 
			
		||||
        underscoreSpaceReplacement = "-";
 | 
			
		||||
        localNLP = new StanfordCoreNLP(propsJMWE);
 | 
			
		||||
        System.out.println("finished singleton constructor \n");
 | 
			
		||||
        System.out.println("finished JMWE constructor \n");
 | 
			
		||||
        return localNLP;
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
@ -124,7 +124,7 @@ public class PipelineJMWESingleton {
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    public List<IMWE<IToken>> getjMWEInSentence(CoreMap sentence, IMWEIndex index, IMWEDetector detector,
 | 
			
		||||
            boolean verbose) {
 | 
			
		||||
                                                boolean verbose) {
 | 
			
		||||
        List<IToken> tokens = getITokens(sentence.get(CoreAnnotations.TokensAnnotation.class));
 | 
			
		||||
        List<IMWE<IToken>> mwes = detector.detect(tokens);
 | 
			
		||||
        if (verbose) {
 | 
			
		||||
@ -146,5 +146,4 @@ public class PipelineJMWESingleton {
 | 
			
		||||
        }
 | 
			
		||||
        return sentence;
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
@ -5,10 +5,7 @@
 | 
			
		||||
 */
 | 
			
		||||
package FunctionLayer;
 | 
			
		||||
 | 
			
		||||
import FunctionLayer.StanfordParser.SentimentValueCache;
 | 
			
		||||
 | 
			
		||||
/**
 | 
			
		||||
 *
 | 
			
		||||
 * @author install1
 | 
			
		||||
 */
 | 
			
		||||
public class SimilarityMatrix {
 | 
			
		||||
@ -16,8 +13,6 @@ public class SimilarityMatrix {
 | 
			
		||||
    private String PrimaryString;
 | 
			
		||||
    private String SecondaryString;
 | 
			
		||||
    private double distance;
 | 
			
		||||
    private SentimentValueCache cacheValue1;
 | 
			
		||||
    private SentimentValueCache cacheValue2;
 | 
			
		||||
 | 
			
		||||
    public final double getDistance() {
 | 
			
		||||
        return distance;
 | 
			
		||||
@ -38,36 +33,8 @@ public class SimilarityMatrix {
 | 
			
		||||
        this.distance = result;
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    public final String getPrimaryString() {
 | 
			
		||||
        return PrimaryString;
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    public final void setPrimaryString(String PrimaryString) {
 | 
			
		||||
        this.PrimaryString = PrimaryString;
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    public final String getSecondaryString() {
 | 
			
		||||
        return SecondaryString;
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    public final void setSecondaryString(String SecondaryString) {
 | 
			
		||||
        this.SecondaryString = SecondaryString;
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    public final SentimentValueCache getCacheValue1() {
 | 
			
		||||
        return cacheValue1;
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    public final void setCacheValue1(SentimentValueCache cacheValue1) {
 | 
			
		||||
        this.cacheValue1 = cacheValue1;
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    public final SentimentValueCache getCacheValue2() {
 | 
			
		||||
        return cacheValue2;
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    public final void setCacheValue2(SentimentValueCache cacheValue2) {
 | 
			
		||||
        this.cacheValue2 = cacheValue2;
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
										
											
												File diff suppressed because it is too large
												Load Diff
											
										
									
								
							@ -1,334 +0,0 @@
 | 
			
		||||
/*
 | 
			
		||||
 * To change this license header, choose License Headers in Project Properties.
 | 
			
		||||
 * To change this template file, choose Tools | Templates
 | 
			
		||||
 * and open the template in the editor.
 | 
			
		||||
 */
 | 
			
		||||
package FunctionLayer.StanfordParser;
 | 
			
		||||
 | 
			
		||||
import com.google.common.collect.MapMaker;
 | 
			
		||||
import edu.stanford.nlp.ling.TaggedWord;
 | 
			
		||||
import edu.stanford.nlp.trees.GrammaticalStructure;
 | 
			
		||||
import edu.stanford.nlp.trees.Tree;
 | 
			
		||||
import edu.stanford.nlp.trees.TypedDependency;
 | 
			
		||||
import java.util.ArrayList;
 | 
			
		||||
import java.util.Collection;
 | 
			
		||||
import java.util.List;
 | 
			
		||||
import java.util.Map;
 | 
			
		||||
import java.util.concurrent.ConcurrentMap;
 | 
			
		||||
import org.ejml.simple.SimpleMatrix;
 | 
			
		||||
 | 
			
		||||
/**
 | 
			
		||||
 *
 | 
			
		||||
 * @author install1
 | 
			
		||||
 */
 | 
			
		||||
public class SentimentValueCache {
 | 
			
		||||
 | 
			
		||||
    private String sentence;
 | 
			
		||||
    private int counter;
 | 
			
		||||
    private List<List<TaggedWord>> taggedwordlist = new ArrayList();
 | 
			
		||||
    private final ConcurrentMap<Integer, String> tgwlistIndex = new MapMaker().concurrencyLevel(2).makeMap();
 | 
			
		||||
    private final ConcurrentMap<Integer, Tree> sentenceConstituencyParseList = new MapMaker().concurrencyLevel(2).makeMap();
 | 
			
		||||
    private final Collection<TypedDependency> allTypedDependencies = new ArrayList();
 | 
			
		||||
    private final ConcurrentMap<Integer, GrammaticalStructure> gsMap = new MapMaker().concurrencyLevel(2).makeMap();
 | 
			
		||||
    private final ConcurrentMap<Integer, SimpleMatrix> simpleSMXlist = new MapMaker().concurrencyLevel(3).makeMap();
 | 
			
		||||
    private final ConcurrentMap<Integer, SimpleMatrix> simpleSMXlistVector = new MapMaker().concurrencyLevel(3).makeMap();
 | 
			
		||||
    private final ConcurrentMap<Integer, Integer> rnnPredictClassMap = new MapMaker().concurrencyLevel(3).makeMap();
 | 
			
		||||
    private List classifyRaw;
 | 
			
		||||
    private int mainSentiment = 0;
 | 
			
		||||
    private int longest = 0;
 | 
			
		||||
    private int tokensCounter = 0;
 | 
			
		||||
    private int anotatorcounter = 0;
 | 
			
		||||
    private int inflectedCounterPositive = 0;
 | 
			
		||||
    private int inflectedCounterNegative = 0;
 | 
			
		||||
    private int MarkedContinuousCounter = 0;
 | 
			
		||||
    private int MarkedContiniousCounterEntries = 0;
 | 
			
		||||
    private int UnmarkedPatternCounter = 0;
 | 
			
		||||
    private int pairCounter = 0;
 | 
			
		||||
    private final ConcurrentMap<Integer, String> ITokenMapTag = new MapMaker().concurrencyLevel(2).makeMap();
 | 
			
		||||
    private final ConcurrentMap<Integer, String> strTokenStems = new MapMaker().concurrencyLevel(2).makeMap();
 | 
			
		||||
    private final ConcurrentMap<Integer, String> strTokenForm = new MapMaker().concurrencyLevel(2).makeMap();
 | 
			
		||||
    private final ConcurrentMap<Integer, String> strTokenGetEntry = new MapMaker().concurrencyLevel(2).makeMap();
 | 
			
		||||
    private final ConcurrentMap<Integer, String> strTokenGetiPart = new MapMaker().concurrencyLevel(2).makeMap();
 | 
			
		||||
    private final ConcurrentMap<Integer, String> strTokenEntryPOS = new MapMaker().concurrencyLevel(2).makeMap();
 | 
			
		||||
    private final ConcurrentMap<Integer, Integer> entryCounts = new MapMaker().concurrencyLevel(2).makeMap();
 | 
			
		||||
    private final ConcurrentMap<Integer, String> nerEntities1 = new MapMaker().concurrencyLevel(2).makeMap();
 | 
			
		||||
    private final ConcurrentMap<Integer, String> nerEntities2 = new MapMaker().concurrencyLevel(2).makeMap();
 | 
			
		||||
    private final ConcurrentMap<Integer, String> nerEntityTokenTags = new MapMaker().concurrencyLevel(3).makeMap();
 | 
			
		||||
    private final ConcurrentMap<Integer, String> stopwordTokens = new MapMaker().concurrencyLevel(2).makeMap();
 | 
			
		||||
    private final ConcurrentMap<Integer, String> stopWordLemma = new MapMaker().concurrencyLevel(2).makeMap();
 | 
			
		||||
 | 
			
		||||
    public int getPairCounter() {
 | 
			
		||||
        return pairCounter;
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    public void setPairCounter(int pairCounter) {
 | 
			
		||||
        this.pairCounter = pairCounter;
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    public void addStopWordLemma(String str) {
 | 
			
		||||
        stopWordLemma.put(stopWordLemma.size(), str);
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    public void addstopwordTokens(String str) {
 | 
			
		||||
        stopwordTokens.put(stopwordTokens.size(), str);
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    public ConcurrentMap<Integer, String> getStopwordTokens() {
 | 
			
		||||
        return stopwordTokens;
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    public ConcurrentMap<Integer, String> getStopWordLemma() {
 | 
			
		||||
        return stopWordLemma;
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    public void addnerEntityTokenTags(String str) {
 | 
			
		||||
        nerEntityTokenTags.put(nerEntityTokenTags.size(), str);
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    public ConcurrentMap<Integer, String> getnerEntityTokenTags() {
 | 
			
		||||
        return nerEntityTokenTags;
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    public ConcurrentMap<Integer, String> getnerEntities1() {
 | 
			
		||||
        return nerEntities1;
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    public ConcurrentMap<Integer, String> getnerEntities2() {
 | 
			
		||||
        return nerEntities2;
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    public void addNEREntities1(String str) {
 | 
			
		||||
        nerEntities1.put(nerEntities1.size(), str);
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    public void addNEREntities2(String str) {
 | 
			
		||||
        nerEntities2.put(nerEntities2.size(), str);
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    public void setTaggedwords(List<List<TaggedWord>> twlist) {
 | 
			
		||||
        taggedwordlist = twlist;
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    public List<List<TaggedWord>> getTaggedwordlist() {
 | 
			
		||||
        return taggedwordlist;
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    public void addEntryCounts(int counts) {
 | 
			
		||||
        entryCounts.put(entryCounts.size(), counts);
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    public ConcurrentMap<Integer, Integer> getEntryCounts() {
 | 
			
		||||
        return entryCounts;
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    public void addstrTokenEntryPOS(String str) {
 | 
			
		||||
        strTokenEntryPOS.put(strTokenEntryPOS.size(), str);
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    public ConcurrentMap<Integer, String> getstrTokenEntryPOS() {
 | 
			
		||||
        return strTokenEntryPOS;
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    public void addstrTokenGetiPart(String str) {
 | 
			
		||||
        strTokenGetiPart.put(strTokenGetiPart.size(), str);
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    public ConcurrentMap<Integer, String> getstrTokenGetiPart() {
 | 
			
		||||
        return strTokenGetiPart;
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    public ConcurrentMap<Integer, String> getstrTokenGetEntry() {
 | 
			
		||||
        return strTokenGetEntry;
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    public void addstrTokenGetEntry(String str) {
 | 
			
		||||
        strTokenGetEntry.put(strTokenGetEntry.size(), str);
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    public ConcurrentMap<Integer, String> getstrTokenForm() {
 | 
			
		||||
        return strTokenForm;
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    public void addstrTokenForm(String str) {
 | 
			
		||||
        strTokenForm.put(strTokenForm.size(), str);
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    public ConcurrentMap<Integer, String> getstrTokenStems() {
 | 
			
		||||
        return strTokenStems;
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    public void addstrTokenStems(String str) {
 | 
			
		||||
        strTokenStems.put(strTokenStems.size(), str);
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    public ConcurrentMap<Integer, String> getITokenMapTag() {
 | 
			
		||||
        return ITokenMapTag;
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    public void addITokenMapTag(String str) {
 | 
			
		||||
        ITokenMapTag.put(ITokenMapTag.size(), str);
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    public int getUnmarkedPatternCounter() {
 | 
			
		||||
        return UnmarkedPatternCounter;
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    public void setUnmarkedPatternCounter(int UnmarkedPatternCounter) {
 | 
			
		||||
        this.UnmarkedPatternCounter = UnmarkedPatternCounter;
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    public int getMarkedContiniousCounterEntries() {
 | 
			
		||||
        return MarkedContiniousCounterEntries;
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    public void setMarkedContiniousCounterEntries(int MarkedContiniousCounterEntries) {
 | 
			
		||||
        this.MarkedContiniousCounterEntries = MarkedContiniousCounterEntries;
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    public int getMarkedContinuousCounter() {
 | 
			
		||||
        return MarkedContinuousCounter;
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    public void setMarkedContinuousCounter(int MarkedContinuousCounter) {
 | 
			
		||||
        this.MarkedContinuousCounter = MarkedContinuousCounter;
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    public int getInflectedCounterNegative() {
 | 
			
		||||
        return inflectedCounterNegative;
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    public void setInflectedCounterNegative(int inflectedCounterNegative) {
 | 
			
		||||
        this.inflectedCounterNegative = inflectedCounterNegative;
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    public int getInflectedCounterPositive() {
 | 
			
		||||
        return inflectedCounterPositive;
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    public void setInflectedCounterPositive(int inflectedCounterPositive) {
 | 
			
		||||
        this.inflectedCounterPositive = inflectedCounterPositive;
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    public int getAnotatorcounter() {
 | 
			
		||||
        return anotatorcounter;
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    public void setAnotatorcounter(int anotatorcounter) {
 | 
			
		||||
        this.anotatorcounter = anotatorcounter;
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    public int getTokensCounter() {
 | 
			
		||||
        return tokensCounter;
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    public void setTokensCounter(int tokensCounter) {
 | 
			
		||||
        this.tokensCounter = tokensCounter;
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    public int getMainSentiment() {
 | 
			
		||||
        return mainSentiment;
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    public void setMainSentiment(int mainSentiment) {
 | 
			
		||||
        this.mainSentiment = mainSentiment;
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    public int getLongest() {
 | 
			
		||||
        return longest;
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    public void setLongest(int longest) {
 | 
			
		||||
        this.longest = longest;
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    public List getClassifyRaw() {
 | 
			
		||||
        return classifyRaw;
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    public void setClassifyRaw(List classifyRaw) {
 | 
			
		||||
        this.classifyRaw = classifyRaw;
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    public ConcurrentMap<Integer, Integer> getRnnPrediectClassMap() {
 | 
			
		||||
        return rnnPredictClassMap;
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    public void addRNNPredictClass(int rnnPrediction) {
 | 
			
		||||
        rnnPredictClassMap.put(rnnPredictClassMap.size(), rnnPrediction);
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    public void addSimpleMatrix(SimpleMatrix SMX) {
 | 
			
		||||
        simpleSMXlist.put(simpleSMXlist.size(), SMX);
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    public void addSimpleMatrixVector(SimpleMatrix SMX) {
 | 
			
		||||
        simpleSMXlistVector.put(simpleSMXlistVector.size(), SMX);
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    public ConcurrentMap<Integer, GrammaticalStructure> getGsMap() {
 | 
			
		||||
        return gsMap;
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    public ConcurrentMap<Integer, SimpleMatrix> getSimpleSMXlist() {
 | 
			
		||||
        return simpleSMXlist;
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    public ConcurrentMap<Integer, SimpleMatrix> getSimpleSMXlistVector() {
 | 
			
		||||
        return simpleSMXlistVector;
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    public ConcurrentMap<Integer, GrammaticalStructure> getGs() {
 | 
			
		||||
        return gsMap;
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    public int getCounter() {
 | 
			
		||||
        return counter;
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    public void addGS(GrammaticalStructure gs) {
 | 
			
		||||
        gsMap.put(gsMap.size(), gs);
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    public Collection<TypedDependency> getAllTypedDependencies() {
 | 
			
		||||
        return allTypedDependencies;
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    public void addTypedDependencies(Collection<TypedDependency> TDPlist) {
 | 
			
		||||
        for (TypedDependency TDP : TDPlist) {
 | 
			
		||||
            allTypedDependencies.add(TDP);
 | 
			
		||||
        }
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    public ConcurrentMap<Integer, Tree> getSentenceConstituencyParseList() {
 | 
			
		||||
        return sentenceConstituencyParseList;
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    public void addSentenceConstituencyParse(Tree tree) {
 | 
			
		||||
        sentenceConstituencyParseList.put(sentenceConstituencyParseList.size(), tree);
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    public void setCounter(int counter) {
 | 
			
		||||
        counter = counter;
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    public String getSentence() {
 | 
			
		||||
        return sentence;
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    public SentimentValueCache(String str, int counter) {
 | 
			
		||||
        this.sentence = str;
 | 
			
		||||
        this.counter = counter;
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    public ConcurrentMap<Integer, String> getTgwlistIndex() {
 | 
			
		||||
        return tgwlistIndex;
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    public void addTgwlistIndex(String str) {
 | 
			
		||||
        tgwlistIndex.put(tgwlistIndex.size(), str);
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    public SentimentValueCache(String str) {
 | 
			
		||||
        this.sentence = str;
 | 
			
		||||
    }
 | 
			
		||||
}
 | 
			
		||||
							
								
								
									
										3
									
								
								ArtificialAutism/src/main/java/META-INF/MANIFEST.MF
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										3
									
								
								ArtificialAutism/src/main/java/META-INF/MANIFEST.MF
									
									
									
									
									
										Normal file
									
								
							@ -0,0 +1,3 @@
 | 
			
		||||
Manifest-Version: 1.0
 | 
			
		||||
Main-Class: PresentationLayer.DiscordHandler
 | 
			
		||||
 | 
			
		||||
@ -1,71 +1,111 @@
 | 
			
		||||
/*
 | 
			
		||||
 * To change this license header, choose License Headers in Project Properties.
 | 
			
		||||
 * To change this template file, choose Tools | Templates
 | 
			
		||||
 * and open the template in the editor.
 | 
			
		||||
 
 | 
			
		||||
    ps ax | grep EventNotfierDiscordBot-1.0
 | 
			
		||||
    kill $pid (number)
 | 
			
		||||
 | 
			
		||||
nohup screen -d -m -S nonroot java -Xmx6048M -jar  /home/javatests/ArtificialAutism-1.0.jar
 | 
			
		||||
nohup screen -d -m -S nonroot java -Xmx6800M -jar  /home/javatests/ArtificialAutism-1.0.jar
 | 
			
		||||
 | 
			
		||||
screen -ls (number1)
 | 
			
		||||
screen -X -S (number1) quit
 | 
			
		||||
 */
 | 
			
		||||
package PresentationLayer;
 | 
			
		||||
 | 
			
		||||
import DataLayer.settings;
 | 
			
		||||
import FunctionLayer.Datahandler;
 | 
			
		||||
import FunctionLayer.DoStuff;
 | 
			
		||||
import FunctionLayer.PipelineJMWESingleton;
 | 
			
		||||
import discord4j.core.DiscordClient;
 | 
			
		||||
import discord4j.core.GatewayDiscordClient;
 | 
			
		||||
import java.io.IOException;
 | 
			
		||||
import java.sql.SQLException;
 | 
			
		||||
import java.util.Timer;
 | 
			
		||||
import java.util.TimerTask;
 | 
			
		||||
import java.util.logging.Level;
 | 
			
		||||
import java.util.logging.Logger;
 | 
			
		||||
import DataLayer.settings;
 | 
			
		||||
import discord4j.common.util.Snowflake;
 | 
			
		||||
import discord4j.core.event.domain.message.MessageCreateEvent;
 | 
			
		||||
import java.math.BigInteger;
 | 
			
		||||
import edu.stanford.nlp.pipeline.StanfordCoreNLP;
 | 
			
		||||
 | 
			
		||||
import java.io.IOException;
 | 
			
		||||
import java.io.UnsupportedEncodingException;
 | 
			
		||||
import java.net.*;
 | 
			
		||||
import java.sql.SQLException;
 | 
			
		||||
import java.util.ArrayList;
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
/**
 | 
			
		||||
 *
 | 
			
		||||
 * @author install1
 | 
			
		||||
 */
 | 
			
		||||
public class DiscordHandler {
 | 
			
		||||
    public static void main(String[] args) {
 | 
			
		||||
        System.setProperty("java.util.concurrent.ForkJoinPool.common.parallelism", "15");
 | 
			
		||||
 | 
			
		||||
    private static void receiveAndSendPacket(DatagramSocket serverSocket, InetAddress ipAddress, int port,
 | 
			
		||||
                                             Datahandler datahandler, StanfordCoreNLP stanfordCoreNLP, StanfordCoreNLP stanfordCoreNLPSentiment) throws IOException {
 | 
			
		||||
        byte[] receiveData = new byte[4096];
 | 
			
		||||
        DatagramPacket receivePacket = new DatagramPacket(receiveData, receiveData.length);
 | 
			
		||||
        try {
 | 
			
		||||
            Datahandler.instance.initiateMYSQL();
 | 
			
		||||
            //nohup screen -d -m -S nonroot java -Xmx6900M -jar  /home/javatests/ArtificialAutism-1.0.jar
 | 
			
		||||
            //uncomment db fetch when ready, just keep the comment for future reference
 | 
			
		||||
            System.out.println("finished initiating MYSQL");
 | 
			
		||||
        } catch (SQLException | IOException ex) {
 | 
			
		||||
            Logger.getLogger(DiscordHandler.class.getName()).log(Level.SEVERE, null, ex);
 | 
			
		||||
            serverSocket.receive(receivePacket);
 | 
			
		||||
        } catch (IOException e) {
 | 
			
		||||
            e.printStackTrace();
 | 
			
		||||
        }
 | 
			
		||||
        String sentence = new String(receivePacket.getData(), 0,
 | 
			
		||||
                receivePacket.getLength());
 | 
			
		||||
        sentence = sentence.replace("clientmessage:", "");
 | 
			
		||||
        String ResponseMsg = datahandler.getResponseMsg(sentence, "", stanfordCoreNLP, stanfordCoreNLPSentiment,
 | 
			
		||||
                true);
 | 
			
		||||
        byte[] sendData = ResponseMsg.getBytes("UTF-8");
 | 
			
		||||
        int deliver_port = 0;
 | 
			
		||||
        switch (port) {
 | 
			
		||||
            case 48475:
 | 
			
		||||
                deliver_port = 48470;
 | 
			
		||||
                break;
 | 
			
		||||
            case 48476:
 | 
			
		||||
                deliver_port = 48471;
 | 
			
		||||
                break;
 | 
			
		||||
            case 48477:
 | 
			
		||||
                deliver_port = 48472;
 | 
			
		||||
                break;
 | 
			
		||||
            case 48478:
 | 
			
		||||
                deliver_port = 48473;
 | 
			
		||||
                break;
 | 
			
		||||
        }
 | 
			
		||||
        DatagramPacket sendPacket = new DatagramPacket(sendData, sendData.length, ipAddress, deliver_port);
 | 
			
		||||
        serverSocket.send(sendPacket);
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    public static void handleUDPTraffic(int port, Datahandler datahandler,
 | 
			
		||||
                                        StanfordCoreNLP stanfordCoreNLP, StanfordCoreNLP stanfordCoreNLPSentiment) {
 | 
			
		||||
 | 
			
		||||
        try (DatagramSocket serverSocket = new DatagramSocket(port)) {
 | 
			
		||||
 | 
			
		||||
            String hostIP = "195.154.53.196";
 | 
			
		||||
            if (port == 48477 || port == 48478) {
 | 
			
		||||
                hostIP = "51.158.20.245";
 | 
			
		||||
            }
 | 
			
		||||
            InetAddress ipAddress = InetAddress.getByName(hostIP);//used ip'
 | 
			
		||||
            while (true) {
 | 
			
		||||
                receiveAndSendPacket(serverSocket, ipAddress, port, datahandler, stanfordCoreNLP, stanfordCoreNLPSentiment);
 | 
			
		||||
            }
 | 
			
		||||
        } catch (SocketException | UnknownHostException e) {
 | 
			
		||||
            e.printStackTrace();
 | 
			
		||||
        } catch (UnsupportedEncodingException e) {
 | 
			
		||||
            e.printStackTrace();
 | 
			
		||||
        } catch (IOException e) {
 | 
			
		||||
            e.printStackTrace();
 | 
			
		||||
        }
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    public static void main(String[] args) throws IOException, SQLException {
 | 
			
		||||
        Datahandler datahandler = new Datahandler();
 | 
			
		||||
        datahandler.initiateMYSQL();
 | 
			
		||||
 | 
			
		||||
        PipelineJMWESingleton.getINSTANCE();
 | 
			
		||||
        Datahandler.instance.instantiateAnnotationMapJMWE();
 | 
			
		||||
        Datahandler.instance.shiftReduceParserInitiate();
 | 
			
		||||
        Datahandler.instance.instantiateAnnotationMap();
 | 
			
		||||
        StanfordCoreNLP stanfordCoreNLP = datahandler.pipeLineSetUp();
 | 
			
		||||
        StanfordCoreNLP stanfordCoreNLPSentiment = datahandler.shiftReduceParserInitiate();
 | 
			
		||||
        System.out.println("FINISHED ALL ANNOTATIONS");
 | 
			
		||||
        Datahandler.instance.addHLstatsMessages();
 | 
			
		||||
        Datahandler.instance.updateStringCache();
 | 
			
		||||
        //String token = "NTI5NzAxNTk5NjAyMjc4NDAx.Dw0vDg.7-aMjVWdQMYPl8qVNyvTCPS5F_A";
 | 
			
		||||
        datahandler.updateStringCache();
 | 
			
		||||
        System.out.println("updatedstring cache");
 | 
			
		||||
        String token = new settings().getDiscordToken();
 | 
			
		||||
        final DiscordClient client = DiscordClient.create(token);
 | 
			
		||||
        final GatewayDiscordClient gateway = client.login().block();
 | 
			
		||||
        String usernameBot = gateway.getSelf().block().getUsername();
 | 
			
		||||
        new Thread(() -> {
 | 
			
		||||
            Datahandler.instance.update_autismo_socket_msg();
 | 
			
		||||
        }).start();
 | 
			
		||||
        int autismbotCount = 4;
 | 
			
		||||
        //make sure not to use ports that are already occupied.
 | 
			
		||||
        for (int i = 0; i < autismbotCount; i++) {
 | 
			
		||||
            final int j = i;
 | 
			
		||||
            new Thread(() -> {
 | 
			
		||||
                ArrayList<Integer> ports = new ArrayList<Integer>();
 | 
			
		||||
                ports.add(48475);
 | 
			
		||||
                ports.add(48476);
 | 
			
		||||
                ports.add(48477);
 | 
			
		||||
                ports.add(48478);
 | 
			
		||||
                handleUDPTraffic(ports.get(j), datahandler, stanfordCoreNLP, stanfordCoreNLPSentiment);
 | 
			
		||||
            }).start();
 | 
			
		||||
        }
 | 
			
		||||
        gateway.on(MessageCreateEvent.class).subscribe(event -> {
 | 
			
		||||
            if (!FunctionLayer.DoStuff.isOccupied()) {
 | 
			
		||||
                FunctionLayer.DoStuff.doStuff(event, usernameBot);
 | 
			
		||||
            }
 | 
			
		||||
            FunctionLayer.DoStuff.doStuff(event, usernameBot, datahandler, stanfordCoreNLP, stanfordCoreNLPSentiment);
 | 
			
		||||
        });
 | 
			
		||||
        gateway.onDisconnect().block();
 | 
			
		||||
    }
 | 
			
		||||
    } //3.1.1 discord4j version
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
		Loading…
	
		Reference in New Issue
	
	Block a user