further bot updates for chatting

This commit is contained in:
christian 2021-10-25 19:08:22 +02:00
parent 509cd2cbe9
commit 4c205f49d5
13 changed files with 2688 additions and 2242 deletions

View File

@ -5,41 +5,69 @@
*/
package DataLayer;
import org.jetbrains.annotations.NotNull;
import FunctionLayer.SimilarityMatrix;
import FunctionLayer.CustomError;
import com.google.common.collect.MapMaker;
import java.sql.Connection;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.util.*;
import java.sql.Statement;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.concurrent.ConcurrentMap;
import java.util.logging.Level;
import java.util.logging.Logger;
/**
*
* @author install1
*/
public class DataMapper {
public static ArrayList<String> getAllStrings() throws SQLException {
public static void createTables() throws CustomError {
Connection l_cCon = null;
PreparedStatement l_pStatement = null;
ResultSet l_rsSearch = null;
try {
l_cCon = DBCPDataSource.getConnection();
String l_sSQL = "CREATE TABLE IF NOT EXISTS `ArtificialAutism`.`Sentences` (`Strings` text NOT NULL)";
l_pStatement = l_cCon.prepareStatement(l_sSQL);
l_pStatement.execute();
} catch (SQLException ex) {
throw new CustomError("failed in DataMapper " + ex.getMessage());
} finally {
CloseConnections(l_pStatement, l_rsSearch, l_cCon);
}
}
public static ConcurrentMap<Integer, String> getAllStrings() throws CustomError {
ConcurrentMap<Integer, String> allStrings = new MapMaker().concurrencyLevel(2).makeMap();
Connection l_cCon = null;
PreparedStatement l_pStatement = null;
ResultSet l_rsSearch = null;
ArrayList<String> arrayListStr = new ArrayList();
try {
l_cCon = DBCPDataSource.getConnection();
String l_sSQL = "SELECT * FROM `Sentences`";
l_pStatement = l_cCon.prepareStatement(l_sSQL);
l_rsSearch = l_pStatement.executeQuery();
int ij = 0;
while (l_rsSearch.next()) {
arrayListStr.add(l_rsSearch.getString(1));
allStrings.put(ij, l_rsSearch.getString(1));
ij++;
}
} catch (SQLException ex) {
throw new CustomError("failed in DataMapper " + ex.getMessage());
} finally {
CloseConnections(l_pStatement, l_rsSearch, l_cCon);
}
return arrayListStr;
return allStrings;
}
public static void InsertMYSQLStrings(ArrayList<String> str) throws SQLException {
public static void InsertMYSQLStrings(ConcurrentMap<Integer, String> str) throws CustomError {
Connection l_cCon = null;
PreparedStatement l_pStatement = null;
ResultSet l_rsSearch = null;
@ -47,15 +75,35 @@ public class DataMapper {
try {
l_cCon = DBCPDataSource.getConnection();
l_pStatement = l_cCon.prepareStatement(l_sSQL);
for (String str1 : str) {
for (String str1 : str.values()) {
//System.out.println("adding str1: " + str1 + "\n");
l_pStatement.setString(1, str1);
l_pStatement.execute();
l_pStatement.addBatch();
}
l_pStatement.executeBatch();
} catch (SQLException ex) {
throw new CustomError("failed in DataMapper " + ex.getMessage());
} finally {
CloseConnections(l_pStatement, l_rsSearch, l_cCon);
}
}
public static ConcurrentMap<Integer, String> getHLstatsMessages() {
ConcurrentMap<Integer, String> hlStatsMessages = new MapMaker().concurrencyLevel(2).makeMap();
try (Connection l_cCon = DBCPDataSourceHLstats.getConnection()) {
String l_sSQL = "SELECT message FROM `hlstats_Events_Chat`";
try (PreparedStatement l_pStatement = l_cCon.prepareStatement(l_sSQL)) {
try (ResultSet l_rsSearch = l_pStatement.executeQuery()) {
while (l_rsSearch.next()) {
hlStatsMessages.put(hlStatsMessages.size() + 1, l_rsSearch.getString(1));
}
}
}
} catch (SQLException ex) {
Logger.getLogger(DataMapper.class.getName()).log(Level.SEVERE, null, ex);
}
return hlStatsMessages;
}
public static void CloseConnections(PreparedStatement ps, ResultSet rs, Connection con) {
if (rs != null) {
@ -80,39 +128,4 @@ public class DataMapper {
}
}
}
public static void checkStringsToDelete() {
Connection l_cCon = null;
PreparedStatement l_pStatement = null;
ResultSet l_rsSearch = null;
String l_sSQL = "delete from Sentences order by last_used asc LIMIT 15";
try {
l_cCon = DBCPDataSource.getConnection();
l_pStatement = l_cCon.prepareStatement(l_sSQL);
l_pStatement.execute();
} catch (SQLException throwables) {
throwables.printStackTrace();
} finally {
CloseConnections(l_pStatement, l_rsSearch, l_cCon);
}
}
public static void updateLastUsed(@NotNull ArrayList<String> mysqlUpdateLastUsed) {
Connection l_cCon = null;
PreparedStatement l_pStatement = null;
ResultSet l_rsSearch = null;
String l_sSQL = "update Sentences Set last_used = now() where Strings = (?)";
try {
l_cCon = DBCPDataSource.getConnection();
l_pStatement = l_cCon.prepareStatement(l_sSQL);
for (String str1 : mysqlUpdateLastUsed) {
l_pStatement.setString(1, str1);
l_pStatement.execute();
}
} catch (SQLException throwables) {
throwables.printStackTrace();
} finally {
CloseConnections(l_pStatement, l_rsSearch, l_cCon);
}
}
}

View File

@ -0,0 +1,17 @@
/*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
package FunctionLayer;
/**
*
* @author install1
*/
public class CustomError extends Exception {
public CustomError(String msg) {
super(msg);
}
}

View File

@ -0,0 +1,825 @@
/*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
package FunctionLayer;
import DataLayer.DataMapper;
import FunctionLayer.StanfordParser.SentimentAnalyzerTest;
import FunctionLayer.StanfordParser.SentimentValueCache;
import com.google.common.base.Stopwatch;
import com.google.common.collect.MapMaker;
import edu.stanford.nlp.ie.AbstractSequenceClassifier;
import edu.stanford.nlp.ie.crf.CRFClassifier;
import edu.stanford.nlp.ling.CoreLabel;
import edu.stanford.nlp.parser.lexparser.LexicalizedParser;
import edu.stanford.nlp.pipeline.Annotation;
import edu.stanford.nlp.pipeline.CoreDocument;
import edu.stanford.nlp.pipeline.StanfordCoreNLP;
import edu.stanford.nlp.tagger.maxent.MaxentTagger;
import edu.stanford.nlp.trees.GrammaticalStructureFactory;
import edu.stanford.nlp.trees.TreebankLanguagePack;
import java.io.IOException;
import java.io.UnsupportedEncodingException;
import static java.lang.Math.random;
import java.net.DatagramPacket;
import java.net.DatagramSocket;
import java.net.InetAddress;
import java.net.SocketException;
import java.sql.SQLException;
import java.util.AbstractMap;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.ListIterator;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Properties;
import java.util.Set;
import java.util.concurrent.Callable;
import java.util.concurrent.CompletionService;
import java.util.concurrent.ConcurrentMap;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.ExecutorCompletionService;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.ForkJoinPool;
import java.util.concurrent.ForkJoinTask;
import java.util.concurrent.Future;
import java.util.concurrent.ThreadLocalRandom;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.TimeoutException;
import java.util.function.Consumer;
import java.util.logging.Level;
import java.util.logging.Logger;
import java.util.stream.Collectors;
import java.util.stream.Stream;
/**
*
* @author install1
*/
public class Datahandler {
public static final long EXPIRE_TIME_IN_SECONDS = TimeUnit.SECONDS.convert(10, TimeUnit.MINUTES);
public static final long EXPIRE_TIME_IN_SECONDS1 = TimeUnit.SECONDS.convert(10, TimeUnit.HOURS);
public static Datahandler instance = new Datahandler();
private static Annotation strAnno;
private static Annotation strAnnoSentiment;
private static Annotation strAnnoJMWE;
private static CoreDocument coreDoc;
private static final ConcurrentMap<Integer, String> stringCache = new MapMaker().concurrencyLevel(6).makeMap();
private static ConcurrentMap<String, Annotation> pipelineAnnotationCache;
private static ConcurrentMap<String, Annotation> pipelineSentimentAnnotationCache;
private static ConcurrentMap<String, Annotation> jmweAnnotationCache;
private static ConcurrentMap<String, CoreDocument> coreDocumentAnnotationCache;
private static ConcurrentMap<String, SentimentValueCache> sentimentCachingMap = new MapMaker().concurrencyLevel(6).makeMap();
private LinkedHashMap<String, LinkedHashMap<String, Double>> lHMSMX = new LinkedHashMap();
private final Stopwatch stopwatch;
private static String similar = "";
private static String shiftReduceParserPath = "edu/stanford/nlp/models/srparser/englishSR.ser.gz";
private static String sentimentModel = "edu/stanford/nlp/models/sentiment/sentiment.ser.gz";
private static String lexParserEnglishRNN = "edu/stanford/nlp/models/lexparser/englishRNN.ser.gz";
private static String taggerPath = "edu/stanford/nlp/models/pos-tagger/english-left3words/english-left3words-distsim.tagger";
private static String nerModel = "edu/stanford/nlp/models/ner/english.all.3class.caseless.distsim.crf.ser.gz";
private static String nerModel2 = "edu/stanford/nlp/models/ner/english.conll.4class.caseless.distsim.crf.ser.gz";
private static String nerModel3 = "edu/stanford/nlp/models/ner/english.muc.7class.caseless.distsim.crf.ser.gz";
private static final String customStopWordList = "start,starts,period,periods,a,an,and,are,as,at,be,but,by,for,if,in,into,is,it,no,not,of,on,or,such,that,the,their,then,there,these,they,this,to,was,will,with";
private static MaxentTagger tagger;
private static String[] options = {"-maxLength", "100"};
private static Properties props = new Properties();
private static Properties propsSentiment = new Properties();
private static GrammaticalStructureFactory gsf;
private static LexicalizedParser lp;
private static TreebankLanguagePack tlp;
private static AbstractSequenceClassifier<CoreLabel> classifier;
// set up Stanford CoreNLP pipeline
private static final StanfordCoreNLP pipeline = getPipeLineSetUp();
private static StanfordCoreNLP pipelineSentiment;
public Datahandler() {
this.stopwatch = Stopwatch.createUnstarted();
this.jmweAnnotationCache = new MapMaker().concurrencyLevel(3).makeMap();
this.pipelineAnnotationCache = new MapMaker().concurrencyLevel(4).makeMap();
this.pipelineSentimentAnnotationCache = new MapMaker().concurrencyLevel(4).makeMap();
this.coreDocumentAnnotationCache = new MapMaker().concurrencyLevel(5).makeMap();
}
public static StanfordCoreNLP getPipeline() {
return pipeline;
}
private static StanfordCoreNLP getPipeLineSetUp() {
props.setProperty("annotators", "tokenize,ssplit,pos,lemma,ner,parse");
props.setProperty("parse.model", shiftReduceParserPath);
props.setProperty("parse.maxlen", "90");
props.setProperty("parse.binaryTrees", "true");
props.setProperty("threads", "8");
props.setProperty("pos.maxlen", "90");
props.setProperty("tokenize.maxlen", "90");
props.setProperty("ssplit.maxlen", "90");
props.setProperty("lemma.maxlen", "90");
props.setProperty("ner.model", nerModel + "," + nerModel2 + "," + nerModel3);
props.setProperty("ner.combinationMode", "HIGH_RECALL");
props.setProperty("regexner.ignorecase", "true");
props.setProperty("ner.fine.regexner.ignorecase", "true");
props.setProperty("tokenize.options", "untokenizable=firstDelete");
return new StanfordCoreNLP(props);
}
public void shiftReduceParserInitiate() {
//got 8 cores
CountDownLatch cdl = new CountDownLatch(2);
new Thread(() -> {
try {
classifier = CRFClassifier.getClassifierNoExceptions(nerModel);
} catch (ClassCastException ex) {
Logger.getLogger(Datahandler.class.getName()).log(Level.SEVERE, null, ex);
}
cdl.countDown();
}).start();
new Thread(() -> {
propsSentiment.setProperty("parse.model", lexParserEnglishRNN);
propsSentiment.setProperty("sentiment.model", sentimentModel);
propsSentiment.setProperty("parse.maxlen", "90");
propsSentiment.setProperty("threads", "8");
propsSentiment.setProperty("pos.maxlen", "90");
propsSentiment.setProperty("tokenize.maxlen", "90");
propsSentiment.setProperty("ssplit.maxlen", "90");
propsSentiment.setProperty("annotators", "tokenize,ssplit,pos,parse,sentiment,lemma,stopword"); //coref too expensive memorywise
propsSentiment.setProperty("customAnnotatorClass.stopword", "FunctionLayer.StopwordAnnotator");
propsSentiment.setProperty(StopwordAnnotator.STOPWORDS_LIST, customStopWordList);
propsSentiment.setProperty("tokenize.options", "untokenizable=firstDelete");
pipelineSentiment = new StanfordCoreNLP(propsSentiment);
tagger = new MaxentTagger(taggerPath);
cdl.countDown();
}).start();
lp = LexicalizedParser.loadModel(lexParserEnglishRNN, options);
tlp = lp.getOp().langpack();
gsf = tlp.grammaticalStructureFactory();
try {
cdl.await();
} catch (InterruptedException ex) {
//System.out.println("cdl await interrupted: " + ex.getLocalizedMessage() + "\n");
}
System.out.println("finished shiftReduceParserInitiate\n");
}
public static AbstractSequenceClassifier<CoreLabel> getClassifier() {
return classifier;
}
public static void setClassifier(AbstractSequenceClassifier<CoreLabel> classifier) {
Datahandler.classifier = classifier;
}
public void updateStringCache() {
try {
checkIfUpdateStrings();
} catch (CustomError ex) {
Logger.getLogger(Datahandler.class.getName()).log(Level.SEVERE, null, ex);
}
}
public static GrammaticalStructureFactory getGsf() {
return gsf;
}
public static MaxentTagger getTagger() {
return tagger;
}
private Map<Integer, String> getCache() throws SQLException, IOException, CustomError {
return DataMapper.getAllStrings();
}
public int getlHMSMXSize() {
return lHMSMX.size();
}
public int getstringCacheSize() {
return stringCache.size();
}
public void initiateMYSQL() throws SQLException, IOException {
try {
DataMapper.createTables();
stringCache.putAll(getCache());
// lHMSMX = DataMapper.getAllRelationScores();
} catch (CustomError ex) {
Logger.getLogger(Datahandler.class
.getName()).log(Level.SEVERE, null, ex);
}
}
public void addHLstatsMessages() {
ConcurrentMap<String, Integer> hlStatsMessages = new MapMaker().concurrencyLevel(2).makeMap();
ConcurrentMap<Integer, String> strCacheLocal = stringCache;
Collection<String> strs = DataMapper.getHLstatsMessages().values();
for (String str : strs) {
if (hlStatsMessages.get(str) == null) {
hlStatsMessages.put(str, hlStatsMessages.size());
}
}
int capacity = 150;
hlStatsMessages.keySet().forEach(str -> {
if (!str.startsWith("!") && MessageResponseHandler.getStr().values().size() < capacity) {
String orElse = strCacheLocal.values().parallelStream().filter(e -> e.equals(str)).findAny().orElse(null);
if (orElse == null) {
MessageResponseHandler.getMessage(str);
}
}
});
}
public void instantiateAnnotationMapJMWE() {
if (!stringCache.isEmpty()) {
ConcurrentMap<String, Annotation> jmweAnnotation = PipelineJMWESingleton.INSTANCE.getJMWEAnnotation(stringCache.values());
for (Entry<String, Annotation> entries : jmweAnnotation.entrySet()) {
jmweAnnotationCache.put(entries.getKey(), entries.getValue());
}
}
}
public void instantiateAnnotationMap() {
if (!stringCache.isEmpty()) {
ConcurrentMap<String, Annotation> Annotationspipeline = new MapMaker().concurrencyLevel(2).makeMap();
ConcurrentMap<String, Annotation> AnnotationspipelineSentiment = new MapMaker().concurrencyLevel(2).makeMap();
stringCache.values().parallelStream().forEach(str -> {
Annotation strAnno = new Annotation(str);
strAnno.compact();
Annotationspipeline.put(str, strAnno);
Annotation strAnno2 = new Annotation(str);
strAnno2.compact();
AnnotationspipelineSentiment.put(str, strAnno2);
});
ConcurrentMap<String, CoreDocument> coreDocumentpipelineMap = getMultipleCoreDocumentsWaySuggestion(stringCache.values(), pipeline);
pipeline.annotate(Annotationspipeline.values());
pipelineSentiment.annotate(AnnotationspipelineSentiment.values());
Annotationspipeline.entrySet().forEach(pipelineEntry -> {
//relatively experimental change
pipelineEntry.getValue().compact();
pipelineAnnotationCache.put(pipelineEntry.getKey(), pipelineEntry.getValue());
});
AnnotationspipelineSentiment.entrySet().forEach(pipelineEntry -> {
pipelineEntry.getValue().compact();
pipelineSentimentAnnotationCache.put(pipelineEntry.getKey(), pipelineEntry.getValue());
});
coreDocumentpipelineMap.entrySet().stream().forEach(CD -> {
coreDocumentAnnotationCache.put(CD.getKey(), CD.getValue());
});
}
}
private ConcurrentMap<Integer, String> futuresReturnOverallEvaluation(List<SimilarityMatrix> similarityMatrixes) {
ConcurrentMap<Integer, String> strmapreturn = new MapMaker().concurrencyLevel(6).makeMap();
if (!similarityMatrixes.isEmpty()) {
int iterator = 0;
for (SimilarityMatrix SMX : similarityMatrixes) {
final Double scoreRelationNewMsgToRecentMsg = SMX.getDistance();
if (scoreRelationNewMsgToRecentMsg > 0.0) {
strmapreturn = addSMXToMapReturn(strmapreturn, SMX);
}
//System.out.println("scoreRelationNewMsgToRecentMsg: " + scoreRelationNewMsgToRecentMsg + "\niterator: " + iterator);
iterator++;
}
}
return strmapreturn;
}
private ConcurrentMap<Integer, String> addSMXToMapReturn(ConcurrentMap<Integer, String> strmapreturn, SimilarityMatrix SMX) {
if (!strmapreturn.containsValue(SMX.getPrimaryString())) {
strmapreturn.put(strmapreturn.size(), SMX.getPrimaryString());
String transmittedStr = SMX.getSecondaryString();
SentimentValueCache cacheValue1 = SMX.getCacheValue1();
SentimentValueCache cacheValue2 = SMX.getCacheValue2();
if (cacheValue1 != null && !sentimentCachingMap.keySet().contains(SMX.getPrimaryString())) {
sentimentCachingMap.put(SMX.getSecondaryString(), SMX.getCacheValue1());
}
if (cacheValue2 != null && !sentimentCachingMap.keySet().contains(transmittedStr)) {
sentimentCachingMap.put(transmittedStr, SMX.getCacheValue2());
}
}
return strmapreturn;
}
private List<SimilarityMatrix> StrComparringNoSentenceRelationMap(
ConcurrentMap<Integer, String> strCacheLocal, Collection<String> strCollection, ConcurrentMap<String, Annotation> localJMWEMap,
ConcurrentMap<String, Annotation> localPipelineAnnotation, ConcurrentMap<String, Annotation> localPipelineSentimentAnnotation,
ConcurrentMap<String, CoreDocument> localCoreDocumentMap) {
ExecutorService threadPool = Executors.newCachedThreadPool();
CompletionService<SimilarityMatrix> ecs = new ExecutorCompletionService<>(threadPool);
int index = 0;
int prefix_size = 150;
SentimentValueCache sentimentCacheStr = sentimentCachingMap.getOrDefault(strCollection, null);
List<SimilarityMatrix> smxReturnList = new ArrayList();
if (strCacheLocal.size() < prefix_size)
{
for (String colStr : strCollection)
{
strCacheLocal.put(strCacheLocal.size(), colStr);
}
}
for (String str1 : strCollection) {
for (String str : strCollection) {
if (!str.equals(str1)) {
SimilarityMatrix SMXInit = new SimilarityMatrix(str, str1);
SentimentValueCache sentimentCacheStr1 = sentimentCachingMap.getOrDefault(str1, null);
Callable<SimilarityMatrix> worker;
if (stringCache.size() < prefix_size) {
worker = new SentimentAnalyzerTest(str, str1, SMXInit,
localJMWEMap.get(str), localJMWEMap.get(str1), localPipelineAnnotation.get(str),
localPipelineAnnotation.get(str1), localPipelineSentimentAnnotation.get(str),
localPipelineSentimentAnnotation.get(str1), localCoreDocumentMap.get(str), localCoreDocumentMap.get(str1), sentimentCacheStr, sentimentCacheStr1);
} else {
worker = new SentimentAnalyzerTest(str, str1, SMXInit,
localJMWEMap.get(str), jmweAnnotationCache.get(str1), localPipelineAnnotation.get(str),
pipelineAnnotationCache.get(str1), localPipelineSentimentAnnotation.get(str),
pipelineSentimentAnnotationCache.get(str1), localCoreDocumentMap.get(str), coreDocumentAnnotationCache.get(str1), sentimentCacheStr, sentimentCacheStr1);
}
ecs.submit(worker);
index++;
if (index % 1000 == 0 && index > 0) {
for (int i = 0; i < index; i++) {
try {
Future<SimilarityMatrix> take = ecs.take();
SimilarityMatrix smx = take.get();
if (smx != null) {
smxReturnList.add(smx);
}
} catch (InterruptedException | ExecutionException ex) {
//
}
}
index = 0;
//System.out.println("smxReturnList size iterating ECS.take(): " + smxReturnList.size());
}
}
}
}
double distance_requirement = 15500.0;
for (int i = 0; i < index; i++) {
try {
Future<SimilarityMatrix> take = ecs.take();
SimilarityMatrix smx = take.get();
if (smx != null && smx.getDistance() > distance_requirement) {
smxReturnList.add(smx);
}
} catch (InterruptedException | ExecutionException ex) {
//
}
}
//System.out.println("smxReturnList size: " + smxReturnList.size());
threadPool.shutdown();
return smxReturnList;
}
private ConcurrentMap<Integer, String> stringIteratorComparator(ConcurrentMap<Integer, String> strmap,
ConcurrentMap<Integer, String> strCacheLocal, ConcurrentMap<String, Annotation> localJMWEMap,
ConcurrentMap<String, Annotation> localPipelineAnnotation, ConcurrentMap<String, Annotation> localPipelineSentimentAnnotation,
ConcurrentMap<String, CoreDocument> localCoreDocumentMap) {
//System.out.println("strmap siuze: " + strmap.size());
List<SimilarityMatrix> StrComparringNoSentenceRelationMap = StrComparringNoSentenceRelationMap(strCacheLocal, strmap.values(),
localJMWEMap, localPipelineAnnotation, localPipelineSentimentAnnotation, localCoreDocumentMap);
Collections.sort(StrComparringNoSentenceRelationMap, (e1, e2) -> e1.getPrimaryString().compareTo(e2.getPrimaryString()));
ConcurrentMap<Integer, String> strmapreturn = futuresReturnOverallEvaluation(StrComparringNoSentenceRelationMap);
//System.out.println("strmapreturn size: " + strmapreturn.size());
return strmapreturn;
}
private ConcurrentMap<Integer, String> removeNonSensicalStrings(ConcurrentMap<Integer, String> strmap) {
final ConcurrentMap<Integer, String> strCacheLocal = stringCache;
final ConcurrentMap<String, Annotation> localJMWEMap = getMultipleJMWEAnnotation(strmap.values());
final ConcurrentMap<String, Annotation> localPipelineAnnotation = getMultiplePipelineAnnotation(strmap.values());
final ConcurrentMap<String, Annotation> localPipelineSentimentAnnotation = getMultiplePipelineSentimentAnnotation(strmap.values());
final ConcurrentMap<String, CoreDocument> localCoreDocumentMap = getMultipleCoreDocumentsWaySuggestion(strmap.values(), pipeline);
return stringIteratorComparator(strmap, strCacheLocal, localJMWEMap, localPipelineAnnotation, localPipelineSentimentAnnotation, localCoreDocumentMap);
}
public synchronized void checkIfUpdateStrings() throws CustomError {
if (stopwatch.elapsed(TimeUnit.SECONDS) >= EXPIRE_TIME_IN_SECONDS || !stopwatch.isRunning()) {
ConcurrentMap<Integer, String> str = MessageResponseHandler.getStr();
System.out.println("str size: " + str.size());
str = filterContent(str);
str = removeNonSensicalStrings(str);
//System.out.println("removeNonSensicalStrings str size POST: " + str.size() + "\n");
str = annotationCacheUpdate(str);
System.out.println("annotationCacheUpdate str size POST: " + str.size() + "\n");
ConcurrentMap<Integer, String> strf = str;
if (!stringCache.isEmpty()) {
new Thread(() -> {
try {
DataMapper.InsertMYSQLStrings(strf);
} catch (CustomError ex) {
Logger.getLogger(Datahandler.class
.getName()).log(Level.SEVERE, null, ex);
}
MessageResponseHandler.setStr(new MapMaker().concurrencyLevel(6).makeMap());
}).start();
} else {
try {
DataMapper.InsertMYSQLStrings(strf);
} catch (CustomError ex) {
Logger.getLogger(Datahandler.class
.getName()).log(Level.SEVERE, null, ex);
}
MessageResponseHandler.setStr(new MapMaker().concurrencyLevel(2).makeMap());
}
if (!stopwatch.isRunning()) {
stopwatch.start();
} else {
stopwatch.reset();
}
}
}
private String trimString(String str) {
str = str.trim();
if (str.startsWith("<@")) {
str = str.substring(str.indexOf("> ") + 2);
}
return str;
}
private String getResponseFutures(String strF) {
List<String> values_copy = new ArrayList<String>(stringCache.values());
Collections.shuffle(values_copy);
double preRelationUserCounters = -155000.0;
List<String> concurrentRelations = new ArrayList();
for (String str1 : values_copy) {
if (!strF.equals(str1)) {
SentimentValueCache sentimentCacheStr1 = sentimentCachingMap.getOrDefault(str1, null);
Callable<SimilarityMatrix> worker = new SentimentAnalyzerTest(strF, str1, new SimilarityMatrix(strF, str1),
strAnnoJMWE, jmweAnnotationCache.get(str1), strAnno,
pipelineAnnotationCache.get(str1), strAnnoSentiment,
pipelineSentimentAnnotationCache.get(str1), coreDoc, coreDocumentAnnotationCache.get(str1), null, sentimentCacheStr1);
try {
SimilarityMatrix getSMX = worker.call();
if (getSMX != null) {
Double scoreRelationLastUserMsg = getSMX.getDistance();
if (scoreRelationLastUserMsg > preRelationUserCounters) {
preRelationUserCounters = scoreRelationLastUserMsg;
concurrentRelations.add(getSMX.getSecondaryString());
//System.out.println("secondary: " + getSMX.getSecondaryString() + "\nDistance: " + getSMX.getDistance() + "\n");
//System.out.println("SUCESS concurrentRelationsMap size: " + concurrentRelations.size() + "\n");
}
}
} catch (Exception ex) {
Logger.getLogger(Datahandler.class.getName()).log(Level.SEVERE, null, ex);
}
}
}
StringBuilder SB = new StringBuilder();
double randomLenghtPermit = strF.length() * ((Math.random() * Math.random() * Math.random()) * 5);
Collections.reverse(concurrentRelations);
if (concurrentRelations.isEmpty()) {
return "failure, preventing stuckness";
}
String firstRelation = concurrentRelations.get(0);
for (String secondaryRelation : concurrentRelations) {
if (SB.toString().length() > randomLenghtPermit && !SB.toString().isEmpty()) {
break;
}
boolean append = appendToString(firstRelation, secondaryRelation);
if (append) {
SB.append(secondaryRelation).append(" ");
}
}
return SB.toString();
}
private boolean appendToString(String firstRelation, String secondaryRelation) {
if (firstRelation.equals(secondaryRelation)) {
return true;
}
Double scoreRelationStrF = getScoreRelationStrF(firstRelation, secondaryRelation);
if (scoreRelationStrF > 1900) {
return true;
}
return false;
}
public String getResponseMsg(String str) throws CustomError {
String strF = trimString(str);
getSingularAnnotation(strF);
return getResponseFutures(strF);
}
public void getSingularAnnotation(String str) {
strAnno = new Annotation(str);
strAnno.compact();
pipeline.annotate(strAnno);
strAnnoSentiment = new Annotation(str);
strAnnoSentiment.compact();
pipelineSentiment.annotate(strAnnoSentiment);
List<String> notactualList = new ArrayList();
notactualList.add(str);
ConcurrentMap<String, Annotation> jmweAnnotation = PipelineJMWESingleton.INSTANCE.getJMWEAnnotation(notactualList);
strAnnoJMWE = jmweAnnotation.values().iterator().next();
strAnnoJMWE.compact();
CoreDocument coreDocument = new CoreDocument(str);
pipeline.annotate(coreDocument);
coreDoc = coreDocument;
}
private static ConcurrentMap<String, Annotation> getMultipleJMWEAnnotation(Collection<String> str) {
ConcurrentMap<String, Annotation> jmweAnnotation = PipelineJMWESingleton.INSTANCE.getJMWEAnnotation(str);
return jmweAnnotation;
}
private static ConcurrentMap<String, Annotation> getMultiplePipelineAnnotation(Collection<String> str) {
ConcurrentMap<String, Annotation> pipelineAnnotationMap = new MapMaker().concurrencyLevel(2).makeMap();
for (String str1 : str) {
Annotation strAnno1 = new Annotation(str1);
pipelineAnnotationMap.put(str1, strAnno1);
}
pipeline.annotate(pipelineAnnotationMap.values());
return pipelineAnnotationMap;
}
private static ConcurrentMap<String, Annotation> getMultiplePipelineSentimentAnnotation(Collection<String> str) {
ConcurrentMap<String, Annotation> pipelineAnnotationMap = new MapMaker().concurrencyLevel(2).makeMap();
for (String str1 : str) {
Annotation strAnno1 = new Annotation(str1);
pipelineAnnotationMap.put(str1, strAnno1);
}
pipelineSentiment.annotate(pipelineAnnotationMap.values());
return pipelineAnnotationMap;
}
private Double getScoreRelationNewMsgToRecentMsg(String str, String mostRecentMsg) {
SimilarityMatrix SMX = new SimilarityMatrix(str, mostRecentMsg);
SentimentValueCache cacheSentiment1 = sentimentCachingMap.getOrDefault(str, null);
SentimentValueCache cacheSentiment2 = sentimentCachingMap.getOrDefault(mostRecentMsg, null);
Callable<SimilarityMatrix> worker = new SentimentAnalyzerTest(str, mostRecentMsg, SMX,
jmweAnnotationCache.get(str), jmweAnnotationCache.get(mostRecentMsg), pipelineAnnotationCache.get(str),
pipelineAnnotationCache.get(mostRecentMsg), pipelineSentimentAnnotationCache.get(str),
pipelineSentimentAnnotationCache.get(mostRecentMsg), coreDocumentAnnotationCache.get(str),
coreDocumentAnnotationCache.get(mostRecentMsg), cacheSentiment1, cacheSentiment2);
SimilarityMatrix callSMX = null;
try {
callSMX = worker.call();
} catch (Exception ex) {
Logger.getLogger(Datahandler.class
.getName()).log(Level.SEVERE, null, ex);
}
if (callSMX != null) {
double smxDistance = callSMX.getDistance();
return smxDistance;
}
return 0.0;
}
private Double getScoreRelationStrF(String str, String mostRecentMsg) {
SimilarityMatrix SMX = new SimilarityMatrix(str, mostRecentMsg);
SentimentValueCache cacheSentiment1 = sentimentCachingMap.getOrDefault(str, null);
SentimentValueCache cacheSentiment2 = sentimentCachingMap.getOrDefault(mostRecentMsg, null);
Callable<SimilarityMatrix> worker = new SentimentAnalyzerTest(str, mostRecentMsg, SMX,
strAnnoJMWE, jmweAnnotationCache.get(mostRecentMsg), strAnno,
pipelineAnnotationCache.get(mostRecentMsg), strAnnoSentiment,
pipelineSentimentAnnotationCache.get(mostRecentMsg), coreDoc, coreDocumentAnnotationCache.get(mostRecentMsg), cacheSentiment1, cacheSentiment2);
SimilarityMatrix callSMX = null;
try {
callSMX = worker.call();
} catch (Exception ex) {
Logger.getLogger(Datahandler.class
.getName()).log(Level.SEVERE, null, ex);
}
if (callSMX != null) {
double smxDistance = callSMX.getDistance();
return smxDistance;
}
return 0.0;
}
public static ConcurrentMap<Integer, String> filterContent(ConcurrentMap<Integer, String> str) {
ConcurrentMap<Integer, String> strlistreturn = new MapMaker().concurrencyLevel(2).makeMap();
str.values().forEach(str1 -> {
if (!str1.isEmpty() && str1.length() > 3) {
str1 = str1.trim();
if (str1.contains("PM*")) {
str1 = str1.substring(str1.indexOf("PM*") + 3);
}
if (str1.contains("AM*")) {
str1 = str1.substring(str1.indexOf("AM*") + 3);
}
/*
if (str1.contains("?") || str1.contains("°"))
{
if (!str1.contains("http"))
{
str1 = str1.replace("?", " <:wlenny:514861023002624001> ");
str1 = str1.replace("°", " <:wlenny:514861023002624001> ");
}
}
*/
if (str1.contains("(Counter-Terrorist)")) {
str1 = str1.replace("(Counter-Terrorist)", " ");
}
if (str1.contains("(Terrorist)")) {
str1 = str1.replace("(Terrorist)", " ");
}
if (str1.contains("(Spectator)")) {
str1 = str1.replace("(Spectator)", " ");
}
if (str1.contains("*DEAD*")) {
str1 = str1.replace("*DEAD*", " ");
}
if (str1.contains("{red}")) {
str1 = str1.replace("{red}", " ");
}
if (str1.contains("{orange}")) {
str1 = str1.replace("{orange}", " ");
}
if (str1.contains("{yellow}")) {
str1 = str1.replace("{yellow}", " ");
}
if (str1.contains("{green}")) {
str1 = str1.replace("{green}", " ");
}
if (str1.contains("{lightblue}")) {
str1 = str1.replace("{lightblue}", " ");
}
if (str1.contains("{blue}")) {
str1 = str1.replace("{blue}", " ");
}
if (str1.contains("{purple}")) {
str1 = str1.replace("{purple}", " ");
}
if (str1.contains("{white}")) {
str1 = str1.replace("{white}", " ");
}
if (str1.contains("{fullblue}")) {
str1 = str1.replace("{fullblue}", " ");
}
if (str1.contains("{cyan}")) {
str1 = str1.replace("{cyan}", " ");
}
if (str1.contains("{lime}")) {
str1 = str1.replace("{lime}", " ");
}
if (str1.contains("{deeppink}")) {
str1 = str1.replace("{deeppink}", " ");
}
if (str1.contains("{slategray}")) {
str1 = str1.replace("{slategray}", " ");
}
if (str1.contains("{dodgerblue}")) {
str1 = str1.replace("{dodgerblue}", " ");
}
if (str1.contains("{black}")) {
str1 = str1.replace("{black}", " ");
}
if (str1.contains("{orangered}")) {
str1 = str1.replace("{orangered}", " ");
}
if (str1.contains("{darkorchid}")) {
str1 = str1.replace("{darkorchid}", " ");
}
if (str1.contains("{pink}")) {
str1 = str1.replace("{pink}", " ");
}
if (str1.contains("{lightyellow}")) {
str1 = str1.replace("{lightyellow}", " ");
}
if (str1.contains("{chocolate}")) {
str1 = str1.replace("{chocolate}", " ");
}
if (str1.contains("{beige}")) {
str1 = str1.replace("{beige}", " ");
}
if (str1.contains("{azure}")) {
str1 = str1.replace("{azure}", " ");
}
if (str1.contains("{yellowgreen}")) {
str1 = str1.replace("{yellowgreen}", " ");
}
str1 = str1.trim();
if (str1.length() > 2 && (!str1.startsWith("!"))) {
strlistreturn.put(strlistreturn.size(), str1);
}
}
});
return strlistreturn;
}
private ConcurrentMap<Integer, String> annotationCacheUpdate(ConcurrentMap<Integer, String> strmap) {
ConcurrentMap<String, Annotation> jmweAnnotation = PipelineJMWESingleton.INSTANCE.getJMWEAnnotation(strmap.values());
for (Entry<String, Annotation> jmweitr : jmweAnnotation.entrySet()) {
jmweAnnotationCache.put(jmweitr.getKey(), jmweitr.getValue());
}
ConcurrentMap<String, Annotation> Annotationspipeline = new MapMaker().concurrencyLevel(4).makeMap();
ConcurrentMap<String, Annotation> AnnotationspipelineSentiment = new MapMaker().concurrencyLevel(4).makeMap();
ConcurrentMap<String, CoreDocument> coreDocumentpipelineMap = getMultipleCoreDocumentsWaySuggestion(strmap.values(), pipeline);
strmap.values().forEach(str -> {
Annotation strAnno1 = new Annotation(str);
Annotationspipeline.put(str, strAnno1);
Annotation strAnno2 = new Annotation(str);
AnnotationspipelineSentiment.put(str, strAnno2);
stringCache.put(stringCache.size() + 1, str);
});
pipeline.annotate(Annotationspipeline.values());
pipelineSentiment.annotate(AnnotationspipelineSentiment.values());
Annotationspipeline.entrySet().forEach(pipelineEntry -> {
if (pipelineEntry != null) {
pipelineAnnotationCache.put(pipelineEntry.getKey(), pipelineEntry.getValue());
}
});
AnnotationspipelineSentiment.entrySet().forEach(pipelineEntry -> {
if (pipelineEntry != null) {
pipelineSentimentAnnotationCache.put(pipelineEntry.getKey(), pipelineEntry.getValue());
}
});
coreDocumentpipelineMap.entrySet().forEach(coreDocumentEntry -> {
coreDocumentAnnotationCache.put(coreDocumentEntry.getKey(), coreDocumentEntry.getValue());
});
return strmap;
}
public int getMessageOverHead() {
return stringCache.values().size() - (stringCache.values().size() / 10);
}
public void update_autismo_socket_msg() {
try {
try (DatagramSocket serverSocket = new DatagramSocket(48477)) {
try (DatagramSocket serverSocket1 = new DatagramSocket(48478)) {
byte[] receiveData = new byte[4096];
InetAddress IPAddress = InetAddress.getByName("144.76.218.19");
DatagramPacket receivePacket = new DatagramPacket(receiveData, receiveData.length);
while (true) {
serverSocket.receive(receivePacket);
String sentence = new String(receivePacket.getData(), 0, receivePacket.getLength());
sentence = sentence.replace("clientmessage:", "");
String getResponseMsg = getResponseMsg(sentence);
byte[] sendData = getResponseMsg.getBytes("UTF-8");
DatagramPacket sendPacket = new DatagramPacket(sendData, sendData.length, IPAddress, 48477);
serverSocket.send(sendPacket);
receivePacket = new DatagramPacket(receiveData, receiveData.length);
serverSocket1.receive(receivePacket);
sentence = new String(receivePacket.getData(), 0, receivePacket.getLength());
sentence = sentence.replace("clientmessage:", "");
getResponseMsg = getResponseMsg(sentence);
sendData = getResponseMsg.getBytes("UTF-8");
sendPacket = new DatagramPacket(sendData, sendData.length, IPAddress, 48478);
serverSocket1.send(sendPacket);
}
}
} catch (CustomError ex) {
Logger.getLogger(Datahandler.class.getName()).log(Level.SEVERE, null, ex);
}
} catch (SocketException ex) {
Logger.getLogger(Datahandler.class.getName()).log(Level.SEVERE, null, ex);
} catch (UnsupportedEncodingException ex) {
Logger.getLogger(Datahandler.class.getName()).log(Level.SEVERE, null, ex);
} catch (IOException ex) {
Logger.getLogger(Datahandler.class.getName()).log(Level.SEVERE, null, ex);
}
}
private static class AnnotationCollector<T> implements Consumer<T> {
private static int i = 0;
private List<T> annotationsT = new ArrayList();
@Override
public void accept(T ann) {
//System.out.println("adding ann: " + ann.toString());
annotationsT.add(ann);
}
}
public static ConcurrentMap<String, CoreDocument> getMultipleCoreDocumentsWaySuggestion(Collection<String> str, StanfordCoreNLP localNLP) {
AnnotationCollector<Annotation> annCollector = new AnnotationCollector();
for (String exampleString : str) {
localNLP.annotate(new Annotation(exampleString), annCollector);
annCollector.i++;
//System.out.println("iterator: " + annCollector.i + "\nstr size: " + str.size() + "\n");
}
try {
Thread.sleep(8000);
} catch (InterruptedException ex) {
Logger.getLogger(Datahandler.class.getName()).log(Level.SEVERE, null, ex);
}
ConcurrentMap<String, CoreDocument> annotationreturnMap = new MapMaker().concurrencyLevel(6).makeMap();
for (Annotation ann : annCollector.annotationsT) {
if (ann != null) {
ann.compact();
CoreDocument CD = new CoreDocument(ann);
annotationreturnMap.put(CD.text(), CD);
//System.out.println("CD text:" + CD.text() + "\niterator: " + iterator + "\nsize: " + annCollector.annotationsT.size());
}
}
return annotationreturnMap;
}
}

View File

@ -1,658 +0,0 @@
/*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
package FunctionLayer
import DataLayer.DataMapper
import FunctionLayer.StanfordParser.SentimentAnalyzerTest
import com.google.common.base.Stopwatch
import edu.mit.jmwe.data.IMWE
import edu.mit.jmwe.data.IToken
import edu.stanford.nlp.ie.AbstractSequenceClassifier
import edu.stanford.nlp.ie.crf.CRFClassifier
import edu.stanford.nlp.ling.CoreAnnotations
import edu.stanford.nlp.ling.CoreLabel
import edu.stanford.nlp.ling.TaggedWord
import edu.stanford.nlp.parser.lexparser.LexicalizedParser
import edu.stanford.nlp.pipeline.Annotation
import edu.stanford.nlp.pipeline.CoreDocument
import edu.stanford.nlp.pipeline.StanfordCoreNLP
import edu.stanford.nlp.tagger.maxent.MaxentTagger
import edu.stanford.nlp.trees.*
import edu.stanford.nlp.util.CoreMap
import kotlinx.coroutines.*
import org.ejml.simple.SimpleMatrix
import java.util.*
import java.util.concurrent.TimeUnit
import java.util.regex.Pattern
import kotlin.collections.ArrayList
import kotlin.collections.HashMap
/**
*
* @author install1
*/
public class Datahandler {
private val stopwatch: Stopwatch
private val EXPIRE_TIME_IN_MINUTES = TimeUnit.MINUTES.convert(30, TimeUnit.MINUTES)
private var pipelineAnnotationCache: HashMap<String, Annotation>
private var pipelineSentimentAnnotationCache = HashMap<String, Annotation>()
private var coreDocumentAnnotationCache: HashMap<String, CoreDocument>
private var jmweAnnotationCache = HashMap<String, Annotation>()
private var stringCache = ArrayList<String>()
//private val nerModel = "edu/stanford/nlp/models/ner/english.all.3class.caseless.distsim.crf.ser.gz"
private val nerModel = "edu/stanford/nlp/models/ner/english.all.3class.distsim.crf.ser.gz"
private var tagger: MaxentTagger = MaxentTagger()
private var gsf: GrammaticalStructureFactory
private var classifier: AbstractSequenceClassifier<CoreLabel>
//SentimentAnalyzer Hashmaps
private var tokenizeCountingHashMap: HashMap<String, Int> = HashMap()
private var taggedWordListHashMap: HashMap<String, List<List<TaggedWord>>> = HashMap()
private var retrieveTGWListHashMap: HashMap<String, java.util.ArrayList<String>> =
HashMap()
private var sentences1HashMap: HashMap<String, List<CoreMap>> = HashMap()
private var sentencesSentimentHashMap: HashMap<String, List<CoreMap>> = HashMap()
private var trees1HashMap: HashMap<String, java.util.ArrayList<Tree>> = HashMap()
private var grammaticalStructureHashMap: HashMap<String, java.util.ArrayList<GrammaticalStructure>> =
HashMap()
private var typedDependenciesHashMap: HashMap<String, java.util.ArrayList<TypedDependency>> =
HashMap()
private var rnnCoreAnnotationsPredictedHashMap: HashMap<String, java.util.ArrayList<Int>> = HashMap()
private var simpleMatricesHashMap: HashMap<String, java.util.ArrayList<SimpleMatrix>> = HashMap()
private var simpleMatricesNodevectorsHashMap: HashMap<String, java.util.ArrayList<SimpleMatrix>> = HashMap()
private var listHashMap: HashMap<String, MutableList<Any?>> = HashMap()
private var longestHashMap: HashMap<String, Int> = HashMap()
private var sentimentHashMap: HashMap<String, Int> = HashMap()
private var imwesHashMap: HashMap<String, List<IMWE<IToken>>> = HashMap()
private var InflectedCounterNegativeHashMap: HashMap<String, Int> = HashMap()
private var InflectedCounterPositiveHashMap: HashMap<String, Int> = HashMap()
private var tokenEntryHashMap: HashMap<String, ArrayList<String>> = HashMap()
private var MarkedContinuousCounterHashMap: HashMap<String, Int> = HashMap()
private var UnmarkedPatternCounterHashMap: HashMap<String, Int> = HashMap()
private var strTokensIpartFormHashMap: HashMap<String, java.util.ArrayList<String>> = HashMap()
private var tokenFormsHashMap: HashMap<String, java.util.ArrayList<String>> = HashMap()
private var strTokenEntryGetPOSHashMap: HashMap<String, java.util.ArrayList<String>> = HashMap()
private var intTokenEntyCountsHashMap: HashMap<String, java.util.ArrayList<Int>> = HashMap()
private var ITokenTagsHashMap: HashMap<String, java.util.ArrayList<String>> = HashMap()
private var strTokenStemsHashMap: HashMap<String, java.util.ArrayList<String>> = HashMap()
private var AnotatorcounterHashMap: HashMap<String, Int> = HashMap()
private var TokensCounterHashMap: HashMap<String, Int> = HashMap()
private var entityTokenTagsHashMap: HashMap<String, java.util.ArrayList<String>> = HashMap()
private var nerEntitiesHashMap: HashMap<String, java.util.ArrayList<String>> = HashMap()
private var nerEntitiesTypeHashMap: HashMap<String, java.util.ArrayList<String>> = HashMap()
private var stopWordTokenHashMap: HashMap<String, java.util.ArrayList<String>> = HashMap()
private var stopWordLemmaHashMap: HashMap<String, java.util.ArrayList<String>> = HashMap()
private var PairCounterHashMap: HashMap<String, Int> = HashMap()
constructor() {
stopwatch = Stopwatch.createUnstarted()
jmweAnnotationCache = HashMap<String, Annotation>()
pipelineAnnotationCache = HashMap<String, Annotation>()
pipelineSentimentAnnotationCache = HashMap<String, Annotation>()
coreDocumentAnnotationCache = HashMap<String, CoreDocument>()
gsf = initiateGrammaticalStructureFactory()
classifier = CRFClassifier.getClassifierNoExceptions(nerModel)
}
fun initiateGrammaticalStructureFactory(): GrammaticalStructureFactory {
val options = arrayOf("-maxLength", "100")
//val lexParserEnglishRNN = "edu/stanford/nlp/models/lexparser/englishRNN.ser.gz"
val lexParserEnglishPCFG = "edu/stanford/nlp/models/lexparser/englishPCFG.ser.gz"
val lp = LexicalizedParser.loadModel(lexParserEnglishPCFG, *options)
val tlp = lp.getOp().langpack()
return tlp.grammaticalStructureFactory()
}
public fun pipeLineSetUp(): StanfordCoreNLP {
val props = Properties()
val shiftReduceParserPath = "edu/stanford/nlp/models/srparser/englishSR.ser.gz"
//val nerModel2 = "edu/stanford/nlp/models/ner/english.conll.4class.caseless.distsim.crf.ser.gz"
val nerModel2 = "edu/stanford/nlp/models/ner/english.conll.4class.distsim.crf.ser.gz"
//val nerModel3 = "edu/stanford/nlp/models/ner/english.muc.7class.caseless.distsim.crf.ser.gz"
val nerModel3 = "edu/stanford/nlp/models/ner/english.muc.7class.distsim.crf.ser.gz"
props.setProperty("annotators", "tokenize,ssplit,pos,lemma,ner,parse")
props.setProperty("parse.model", shiftReduceParserPath)
props.setProperty("parse.maxlen", "90")
props.setProperty("parse.binaryTrees", "true")
props.setProperty("threads", "5")
props.setProperty("pos.maxlen", "90")
props.setProperty("tokenize.maxlen", "90")
props.setProperty("ssplit.maxlen", "90")
props.setProperty("lemma.maxlen", "90")
props.setProperty("ner.model", "$nerModel,$nerModel2,$nerModel3")
props.setProperty("ner.combinationMode", "HIGH_RECALL")
props.setProperty("regexner.ignorecase", "true")
props.setProperty("ner.fine.regexner.ignorecase", "true")
props.setProperty("tokenize.options", "untokenizable=firstKeep")
return StanfordCoreNLP(props)
}
fun shiftReduceParserInitiate(): StanfordCoreNLP {
val propsSentiment = Properties()
//val lexParserEnglishRNN = "edu/stanford/nlp/models/lexparser/englishRNN.ser.gz"
val lexParserEnglishPCFG = "edu/stanford/nlp/models/lexparser/englishPCFG.ser.gz"
val sentimentModel = "edu/stanford/nlp/models/sentiment/sentiment.ser.gz"
//val taggerPath = "edu/stanford/nlp/models/pos-tagger/english-left3words/english-left3words-distsim.tagger"
val taggerPath = "edu/stanford/nlp/models/pos-tagger/english-left3words-distsim.tagger"
val customStopWordList = "start,starts,period,periods,a,an,and,are,as,at,be,but,by,for,if,in,into,is,it,no,not,of," +
"on,or,such,that,the,their,then,there,these,they,this,to,was,will,with"
propsSentiment.setProperty("parse.model", lexParserEnglishPCFG)
propsSentiment.setProperty("sentiment.model", sentimentModel)
propsSentiment.setProperty("parse.maxlen", "90")
propsSentiment.setProperty("threads", "5")
propsSentiment.setProperty("pos.maxlen", "90")
propsSentiment.setProperty("tokenize.maxlen", "90")
propsSentiment.setProperty("ssplit.maxlen", "90")
propsSentiment.setProperty("annotators", "tokenize,ssplit,pos,parse,sentiment,lemma,stopword") //coref too expensive memorywise
propsSentiment.setProperty("customAnnotatorClass.stopword", "FunctionLayer.StopwordAnnotator")
propsSentiment.setProperty(StopwordAnnotator.STOPWORDS_LIST, customStopWordList)
propsSentiment.setProperty("tokenize.options", "untokenizable=firstKeep")
tagger = MaxentTagger(taggerPath)
println("finished shiftReduceParserInitiate\n")
return StanfordCoreNLP(propsSentiment)
}
fun updateStringCache() {
if (stopwatch.elapsed(TimeUnit.MINUTES) >= EXPIRE_TIME_IN_MINUTES || !stopwatch.isRunning) {
if (!stopwatch.isRunning) {
stopwatch.start()
} else {
stopwatch.reset()
}
stringCache.sortWith(Comparator.comparingInt(String::length).reversed());
System.out.println("pre InsertMYSQLStrings")
val arrayList = java.util.ArrayList<String>(stringCache)
DataMapper.InsertMYSQLStrings(arrayList)
DataMapper.checkStringsToDelete();
stringCache = ArrayList<String>();
initiateMYSQL();
}
}
fun initiateMYSQL() {
stringCache.addAll(DataMapper.getAllStrings())
}
private fun trimString(str: String): String {
var message = str.trim { it <= ' ' }
if (message.startsWith("<@")) {
message = message.substring(message.indexOf("> ") + 2)
}
if (!message.isEmpty()) {
message = message.replace("@", "")
if (message.contains("<>")) {
message = message.substring(message.indexOf(">"))
}
if (message.startsWith("[ *")) {
message = message.substring(message.indexOf("]"))
}
}
return message
}
private fun createStrAnnotation(str: String, stanfordCoreNLP: StanfordCoreNLP, sentimentBool: Boolean) {
val strAnno2 = Annotation(str)
strAnno2.compact()
stanfordCoreNLP.annotate(strAnno2)
if (sentimentBool) {
pipelineSentimentAnnotationCache.put(str, strAnno2)
} else {
pipelineAnnotationCache.put(str, strAnno2)
}
}
private fun getResponseFutures(strF: String, stanfordCoreNLP: StanfordCoreNLP, stanfordCoreNLPSentiment: StanfordCoreNLP): String {
val strAnno: Annotation = Annotation(strF)
strAnno.compact()
stanfordCoreNLP.annotate(strAnno)
val strAnnoSentiment: Annotation = Annotation(strF)
strAnnoSentiment.compact()
stanfordCoreNLPSentiment.annotate(strAnnoSentiment)
val coreDocument = CoreDocument(strF)
stanfordCoreNLP.annotate(coreDocument)
val values_copy: List<String> = ArrayList(stringCache)
var preRelationUserCounters = -155000.0
val concurrentRelations: MutableList<String> = arrayListOf()
val SB = StringBuilder()
var jmweAnnotationF = PipelineJMWESingleton.INSTANCE.getJMWEAnnotation(strF)
var tokenizeCountingF: Int? = null
var taggedWordListF: List<List<TaggedWord>>? = null
var retrieveTGWListF: java.util.ArrayList<String>? = null
var sentencesF: List<CoreMap>? = null
var sentencesSentimentF: List<CoreMap>? = null
var coreMaps1: List<CoreMap> = jmweAnnotationF.get(CoreAnnotations.SentencesAnnotation::class.java)
var treesF: java.util.ArrayList<Tree>? = null
var grammaticalStructuresF: ArrayList<GrammaticalStructure>? = null
var typedDependenciesF: java.util.ArrayList<TypedDependency>? = null
var rnnCoreAnnotationsPredictedF: java.util.ArrayList<Int>? = null
var simpleMatricesF: java.util.ArrayList<SimpleMatrix>? = null
var simpleMatricesNodevectorsF: java.util.ArrayList<SimpleMatrix>? = null
var listF: MutableList<Any?>? = null
var longestF: Int? = null
var sentimentLongestF: Int? = null
var imwesF: List<IMWE<IToken>>? = null
var InflectedCounterNegativeF: Int? = null
var InflectedCounterPositiveF: Int? = null
var tokenEntryF: ArrayList<String>? = null
var MarkedContinuousCounterF: Int? = null
var UnmarkedPatternCounterF: Int? = null
var strTokensIpartFormF: ArrayList<String>? = null
var tokenFormsF: java.util.ArrayList<String>? = null
var strTokenEntryGetPOSF: ArrayList<String>? = null
var intTokenEntyCountsF: java.util.ArrayList<Int>? = null
var ITokenTagsF: ArrayList<String>? = null
var strTokenStemsF: java.util.ArrayList<String>? = null
var AnotatorcounterF: Int? = null
var TokensCounterF: Int? = null
var entityTokenTagsF: java.util.ArrayList<String>? = null
var nerEntitiesF: java.util.ArrayList<String>? = null
var nerEntitiesTypeF: java.util.ArrayList<String>? = null
var stopWordTokenF: java.util.ArrayList<String>? = null
var stopWordLemmaF: java.util.ArrayList<String>? = null
var PairCounterF: Int? = null
for (str1 in values_copy) {
if (strF != str1) {
val annotation2 = pipelineSentimentAnnotationCache.getOrDefault(str1, null)
val annotation4 = pipelineAnnotationCache.getOrDefault(str1, null)
val coreDocument1 = coreDocumentAnnotationCache.getOrDefault(str1, null)
var jmweAnnotation = jmweAnnotationCache.getOrDefault(str1, null)
if (annotation2 == null) {
createStrAnnotation(str1, stanfordCoreNLPSentiment, true)
}
if (annotation4 == null) {
createStrAnnotation(str1, stanfordCoreNLP, false)
}
if (coreDocument1 == null) {
getCoreDocumentsSuggested(stanfordCoreNLP, str1)
}
if (jmweAnnotation == null) {
getJMWEAnnotation(str1)
jmweAnnotation = jmweAnnotationCache.get(str1)
}
val tokenizeCounting: Int? = tokenizeCountingHashMap.getOrDefault(str1, null)
val taggedWordList1: List<List<TaggedWord>>? = taggedWordListHashMap.getOrDefault(str1, null)
val retrieveTGWList1: java.util.ArrayList<String>? = retrieveTGWListHashMap.getOrDefault(str1, null)
val sentence1: List<CoreMap>? = sentences1HashMap.getOrDefault(str1, null)
val sentenceSentiment1: List<CoreMap>? = sentencesSentimentHashMap.getOrDefault(str1, null)
val trees1 = trees1HashMap.getOrDefault(str1, null)
var coreMaps2: List<CoreMap> = listOf()
val grammaticalStructures1 = grammaticalStructureHashMap.getOrDefault(
str1, null)
if (jmweAnnotation != null) {
coreMaps2 = jmweAnnotation.get(CoreAnnotations.SentencesAnnotation::class.java)
}
val typedDependencies1 = typedDependenciesHashMap.getOrDefault(str1, null)
val rnnCoreAnnotationsPredicted1 = rnnCoreAnnotationsPredictedHashMap.getOrDefault(str1, null)
val simpleMatrices1 = simpleMatricesHashMap.getOrDefault(str1, null);
val simpleMatricesNodevectors1 = simpleMatricesNodevectorsHashMap.getOrDefault(str1, null);
val list1 = listHashMap.getOrDefault(str1, null);
val longest1 = longestHashMap.getOrDefault(str1, null);
val sentimentLongest1 = sentimentHashMap.getOrDefault(str1, null);
val imwes1 = imwesHashMap.getOrDefault(str1, null);
val InflectedCounterNegative1 = InflectedCounterNegativeHashMap.getOrDefault(str1, null);
val InflectedCounterPositive1 = InflectedCounterPositiveHashMap.getOrDefault(str1, null)
val tokenEntry1 = tokenEntryHashMap.getOrDefault(str1, null)
val MarkedContinuousCounter1 = MarkedContinuousCounterHashMap.getOrDefault(str1, null)
val UnmarkedPatternCounter1 = UnmarkedPatternCounterHashMap.getOrDefault(str1, null)
val strTokensIpartForm1 = strTokensIpartFormHashMap.getOrDefault(str1, null);
val tokenForms1 = tokenFormsHashMap.getOrDefault(str1, null);
val strTokenEntryGetPOS1 = strTokenEntryGetPOSHashMap.getOrDefault(str1, null)
val intTokenEntyCounts1 = intTokenEntyCountsHashMap.getOrDefault(str1, null);
val ITokenTags1 = ITokenTagsHashMap.getOrDefault(str1, null);
val strTokenStems1 = strTokenStemsHashMap.getOrDefault(str1, null);
val Anotatorcounter1 = AnotatorcounterHashMap.getOrDefault(str1, null);
val TokensCounter1 = TokensCounterHashMap.getOrDefault(str1, null);
val entityTokenTags1 = entityTokenTagsHashMap.getOrDefault(str1, null);
val nerEntities1 = nerEntitiesHashMap.getOrDefault(str1, null);
val nerEntitiesType1 = nerEntitiesTypeHashMap.getOrDefault(str1, null);
val stopWordToken1 = stopWordTokenHashMap.getOrDefault(str1, null);
val stopWordLemma1 = stopWordLemmaHashMap.getOrDefault(str1, null);
val PairCounter1 = PairCounterHashMap.getOrDefault(str1, null);
var SMX = SentimentAnalyzerTest(strF, str1, SimilarityMatrix(strF, str1),
coreMaps1, coreMaps2, strAnno,
pipelineAnnotationCache[str1], strAnnoSentiment,
pipelineSentimentAnnotationCache[str1], coreDocument, coreDocumentAnnotationCache[str1],
tagger, gsf, classifier, tokenizeCounting, tokenizeCountingF,
taggedWordListF, taggedWordList1, retrieveTGWListF, retrieveTGWList1,
sentencesF, sentence1, sentencesSentimentF, sentenceSentiment1, treesF, trees1,
grammaticalStructuresF, grammaticalStructures1, typedDependenciesF,
typedDependencies1, rnnCoreAnnotationsPredictedF, rnnCoreAnnotationsPredicted1,
simpleMatricesF, simpleMatrices1, simpleMatricesNodevectorsF, simpleMatricesNodevectors1,
listF, list1, longestF, longest1, sentimentLongestF, sentimentLongest1, imwesF,
imwes1, InflectedCounterNegativeF, InflectedCounterNegative1, InflectedCounterPositiveF,
InflectedCounterPositive1, tokenEntryF, tokenEntry1, MarkedContinuousCounterF,
MarkedContinuousCounter1, UnmarkedPatternCounterF, UnmarkedPatternCounter1,
strTokensIpartFormF, strTokensIpartForm1, tokenFormsF, tokenForms1,
strTokenEntryGetPOSF, strTokenEntryGetPOS1, intTokenEntyCountsF,
intTokenEntyCounts1, ITokenTagsF, ITokenTags1, strTokenStemsF, strTokenStems1,
AnotatorcounterF, Anotatorcounter1, TokensCounterF, TokensCounter1,
entityTokenTagsF, entityTokenTags1, nerEntitiesF, nerEntities1, nerEntitiesTypeF,
nerEntitiesType1, stopWordTokenF, stopWordToken1, stopWordLemmaF, stopWordLemma1,
PairCounterF, PairCounter1)
if (tokenizeCounting == null) {
tokenizeCountingHashMap.put(str1, SMX.getTokenizeCounting())
}
if (taggedWordList1 == null) {
taggedWordListHashMap.put(str1, SMX.getTaggedWordList1())
}
if (tokenizeCountingF == null) {
tokenizeCountingF = SMX.getTokenizeCountingF();
}
if (taggedWordListF == null) {
taggedWordListF = SMX.getTaggedWordListF();
}
if (retrieveTGWListF == null) {
retrieveTGWListF = SMX.getRetrieveTGWListF();
}
if (retrieveTGWList1 == null) {
retrieveTGWListHashMap.put(str1, SMX.getRetrieveTGWList1());
}
if (sentencesF == null) {
sentencesF = SMX.getSentencesF();
}
if (sentence1 == null) {
sentences1HashMap.put(str1, SMX.getSentences1())
}
if (sentencesSentimentF == null) {
sentencesSentimentF = SMX.getSentencesSentimentF();
}
if (sentenceSentiment1 == null) {
sentencesSentimentHashMap.put(str1, SMX.getSentencesSentiment1());
}
if (treesF == null) {
treesF = SMX.getTreesF();
}
if (trees1 == null) {
trees1HashMap.put(str1, SMX.getTrees1())
}
if (grammaticalStructuresF == null) {
grammaticalStructuresF = SMX.getGrammaticalStructuresF();
}
if (grammaticalStructures1 == null) {
grammaticalStructureHashMap.put(str1, SMX.getGrammaticalStructures1())
}
if (typedDependenciesF == null) {
typedDependenciesF = SMX.getTypedDependenciesF();
}
if (typedDependencies1 == null) {
typedDependenciesHashMap.put(str1, SMX.getTypedDependencies1())
}
if (rnnCoreAnnotationsPredictedF == null) {
rnnCoreAnnotationsPredictedF = SMX.getRnnCoreAnnotationsPredictedF()
}
if (rnnCoreAnnotationsPredicted1 == null) {
rnnCoreAnnotationsPredictedHashMap.put(str1, SMX.getRnnCoreAnnotationsPredicted1())
}
if (simpleMatricesF == null) {
simpleMatricesF = SMX.getSimpleMatricesF();
}
if (simpleMatrices1 == null) {
simpleMatricesHashMap.put(str1, SMX.getSimpleMatrices1());
}
if (simpleMatricesNodevectorsF == null) {
simpleMatricesNodevectorsF = SMX.getSimpleMatricesNodevectorsF();
}
if (simpleMatricesNodevectors1 == null) {
simpleMatricesNodevectorsHashMap.put(str1, SMX.getSimpleMatricesNodevectors1());
}
if (listF == null) {
listF = SMX.getListF();
}
if (list1 == null) {
listHashMap.put(str1, SMX.getList1());
}
if (longestF == null) {
longestF = SMX.getLongestF();
}
if (longest1 == null) {
longestHashMap.put(str1, SMX.getLongest1());
}
if (sentimentLongestF == null) {
sentimentLongestF = SMX.getSentimentLongestF();
}
if (sentimentLongest1 == null) {
sentimentHashMap.put(str1, SMX.getSentimentLongest1());
}
if (imwesF == null) {
imwesF = SMX.getImwesF();
}
if (imwes1 == null) {
imwesHashMap.put(str1, SMX.getImwes1());
}
if (InflectedCounterNegativeF == null) {
InflectedCounterNegativeF = SMX.getInflectedCounterNegativeF();
}
if (InflectedCounterNegative1 == null) {
InflectedCounterNegativeHashMap.put(str1, SMX.getInflectedCounterNegative1());
}
if (InflectedCounterPositiveF == null) {
InflectedCounterPositiveF = SMX.getInflectedCounterPositiveF();
}
if (InflectedCounterPositive1 == null) {
InflectedCounterPositiveHashMap.put(str1, SMX.getInflectedCounterPositive1());
}
if (tokenEntryF == null) {
tokenEntryF = SMX.getTokenEntryF();
}
if (tokenEntry1 == null) {
tokenEntryHashMap.put(str1, SMX.getTokenEntry1())
}
if (MarkedContinuousCounterF == null) {
MarkedContinuousCounterF = SMX.getMarkedContinuousCounterF();
}
if (MarkedContinuousCounter1 == null) {
MarkedContinuousCounterHashMap.put(str1, SMX.getMarkedContinuousCounter1());
}
if (UnmarkedPatternCounterF == null) {
UnmarkedPatternCounterF = SMX.getUnmarkedPatternCounterF();
}
if (UnmarkedPatternCounter1 == null) {
UnmarkedPatternCounterHashMap.put(str1, SMX.getUnmarkedPatternCounter1());
}
if (strTokensIpartFormF == null) {
strTokensIpartFormF = SMX.getStrTokensIpartFormF();
}
if (strTokensIpartForm1 == null) {
strTokensIpartFormHashMap.put(str1, SMX.getStrTokensIpartForm1());
}
if (tokenFormsF == null) {
tokenFormsF = SMX.getTokenFormsF();
}
if (tokenForms1 == null) {
tokenFormsHashMap.put(str1, SMX.getTokenForms1());
}
if (strTokenEntryGetPOSF == null) {
strTokenEntryGetPOSF = SMX.getStrTokenEntryGetPOSF();
}
if (strTokenEntryGetPOS1 == null) {
strTokenEntryGetPOSHashMap.put(str1, SMX.getStrTokenEntryGetPOS1())
}
if (intTokenEntyCountsF == null) {
intTokenEntyCountsF = SMX.getIntTokenEntyCountsF();
}
if (intTokenEntyCounts1 == null) {
intTokenEntyCountsHashMap.put(str1, SMX.getIntTokenEntyCounts1());
}
if (ITokenTagsF == null) {
ITokenTagsF = SMX.getITokenTagsF();
}
if (ITokenTags1 == null) {
ITokenTagsHashMap.put(str1, SMX.getITokenTags1());
}
if (strTokenStemsF == null) {
strTokenStemsF = SMX.getStrTokenStemsF();
}
if (strTokenStems1 == null) {
strTokenStemsHashMap.put(str1, SMX.getStrTokenStems1());
}
if (AnotatorcounterF == null) {
AnotatorcounterF = SMX.getAnotatorcounterF();
}
if (Anotatorcounter1 == null) {
AnotatorcounterHashMap.put(str1, SMX.getAnotatorcounter1());
}
if (TokensCounterF == null) {
TokensCounterF = SMX.getTokensCounterF();
}
if (TokensCounter1 == null) {
TokensCounterHashMap.put(str1, SMX.getTokensCounter1());
}
if (entityTokenTagsF == null) {
entityTokenTagsF = SMX.getEntityTokenTagsF();
}
if (entityTokenTags1 == null) {
entityTokenTagsHashMap.put(str1, SMX.getEntityTokenTags1());
}
if (nerEntitiesF == null) {
nerEntitiesF = SMX.getNerEntitiesF();
}
if (nerEntities1 == null) {
nerEntitiesHashMap.put(str1, SMX.getNerEntities1());
}
if (nerEntitiesTypeF == null) {
nerEntitiesTypeF = SMX.getNerEntitiesTypeF();
}
if (nerEntitiesType1 == null) {
nerEntitiesTypeHashMap.put(str1, SMX.getNerEntitiesType1());
}
if (stopWordTokenF == null) {
stopWordTokenF = SMX.getStopWordTokenF();
}
if (stopWordToken1 == null) {
stopWordTokenHashMap.put(str1, SMX.getStopWordToken1());
}
if (stopWordLemmaF == null) {
stopWordLemmaF = SMX.getStopWordLemmaF();
}
if (stopWordLemma1 == null) {
stopWordLemmaHashMap.put(str1, SMX.getStopWordLemma1());
}
if (PairCounterF == null) {
PairCounterF = SMX.getPairCounterF();
}
if (PairCounter1 == null) {
PairCounterHashMap.put(str1, SMX.getPairCounter1());
}
var getSMX: SimilarityMatrix = SMX.callSMX()
val scoreRelationLastUserMsg = getSMX.distance
if (scoreRelationLastUserMsg > preRelationUserCounters) {
preRelationUserCounters = scoreRelationLastUserMsg
concurrentRelations.add(getSMX.secondaryString)
}
}
}
val cacheRequirement = 6500;
if (preRelationUserCounters > cacheRequirement && !stringCache.contains(strF) && filterContent(strF)) {
stringCache.add(strF)
}
val randomLenghtPermit = strF.length * (Math.random() * Math.random() * Math.random() * (Math.random() * 10))
Collections.reverse(concurrentRelations)
val mysqlUpdateLastUsed: ArrayList<String> = ArrayList()
if (!concurrentRelations.isEmpty()) {
for (secondaryRelation in concurrentRelations) {
if (SB.toString().length > randomLenghtPermit && !SB.toString().isEmpty()) {
break
}
SB.append(secondaryRelation).append(" ")
mysqlUpdateLastUsed.add(secondaryRelation)
}
}
if (SB.toString().isEmpty()) {
return "failure, preventing stuckness"
}
runBlocking {
CoroutineScope(launch(Dispatchers.IO) {
DataMapper.updateLastUsed(mysqlUpdateLastUsed);
yield()
})
}
return SB.toString()
}
private fun getJMWEAnnotation(str1: String) {
val jmweAnnotation = PipelineJMWESingleton.INSTANCE.getJMWEAnnotation(str1)
jmweAnnotationCache.put(str1, jmweAnnotation)
}
fun getResponseMsg(str: String, personName: String, stanfordCoreNLP: StanfordCoreNLP,
stanfordCoreNLPSentiment: StanfordCoreNLP, ingameResponse: Boolean): String {
var responseFutures: String = ""
runBlocking {
val launch1 = launch(Dispatchers.Default) {
var strF = trimString(str)
responseFutures = getResponseFutures(strF, stanfordCoreNLP, stanfordCoreNLPSentiment)
if (!ingameResponse) {
responseFutures = checkPersonPresentInSentence(personName, responseFutures, strF, stanfordCoreNLP,
stanfordCoreNLPSentiment)
}
yield()
}
launch1.join()
}
return responseFutures
}
private fun checkPersonPresentInSentence(personName: String, responseMsg: String, userLastMessage: String,
stanfordCoreNLP: StanfordCoreNLP,
stanfordCoreNLPSentiment: StanfordCoreNLP): String {
try {
val pipelineCoreDcoument = CoreDocument(responseMsg)
val pipelineCoreDcoumentLastMsg = CoreDocument(userLastMessage)
stanfordCoreNLP.annotate(pipelineCoreDcoument)
stanfordCoreNLPSentiment.annotate(pipelineCoreDcoumentLastMsg)
val regex = "(.*?\\d){10,}"
for (em in pipelineCoreDcoument.entityMentions()) {
val entityType = em.entityType()
if (entityType == "PERSON") {
var str = responseMsg
val emText = em.text()
val pattern = Pattern.compile(regex)
val matcher = pattern.matcher(personName)
val isMatched = matcher.matches()
if (emText != personName && !isMatched) {
for (emLastMsg in pipelineCoreDcoumentLastMsg.entityMentions()) {
if (emText != emLastMsg.text() && !Character.isDigit(emLastMsg.text().trim { it <= ' ' }[0])) {
//System.out.println("emLastMsg.text(): " + emLastMsg.text());
str = (responseMsg.substring(0, responseMsg.indexOf(emText)) + " "
+ emLastMsg + " " + responseMsg.substring(responseMsg.indexOf(emText)))
}
}
str += " $personName"
return str
}
}
}
} catch (e: Exception) {
println("""SCUFFED JAYZ: ${e.localizedMessage}""".trimIndent())
}
return responseMsg
}
fun filterContent(str: String): Boolean {
if (!str.isEmpty() && str.length > 3) {
var str1Local: String = str.trim();
if (str1Local.length > 2 && !str1Local.startsWith("!")) {
return true
}
}
return false
}
fun getCoreDocumentsSuggested(pipeline: StanfordCoreNLP, str: String) {
val annotation = Annotation(str)
pipeline.annotate(annotation)
val coreDocument = CoreDocument(annotation)
coreDocumentAnnotationCache.put(str, coreDocument)
}
}

View File

@ -9,43 +9,45 @@ import PresentationLayer.DiscordHandler;
import discord4j.core.event.domain.message.MessageCreateEvent;
import discord4j.core.object.entity.User;
import discord4j.core.object.entity.channel.TextChannel;
import java.math.BigInteger;
import java.util.ArrayList;
import java.util.List;
import java.util.logging.Level;
import java.util.logging.Logger;
import edu.stanford.nlp.pipeline.StanfordCoreNLP;
import reactor.core.publisher.Flux;
import reactor.core.publisher.Mono;
/**
*
* @author install1
*/
public class DoStuff {
public static boolean occupied = false;
public static void doStuff(MessageCreateEvent event, String usernameBot, Datahandler datahandler,
StanfordCoreNLP stanfordCoreNLP, StanfordCoreNLP stanfordCoreNLPSentiment) {
String username = "";
public static boolean isOccupied() {
return occupied;
}
public static void doStuff(MessageCreateEvent event, String usernameBot) {
String username = null;
try {
username = event.getMessage().getAuthor().get().getUsername();
} catch (java.util.NoSuchElementException e) {
username = null;
}
if (username != null && !username.equals(usernameBot)) {
occupied = true;
TextChannel block = event.getMessage().getChannel().cast(TextChannel.class).block();
String name = block.getCategory().block().getName();
name = name.toLowerCase();
String channelName = block.getName().toLowerCase();
boolean channelpermissionsDenied = false;
if (channelName.contains("suggestion-box")) {
channelpermissionsDenied = true;
}
switch (name) {
case "public area":
case "public area": {
break;
}
case "information area": {
break;
}
@ -54,34 +56,49 @@ public class DoStuff {
break;
}
}
List<User> blockLast = event.getMessage().getUserMentions().buffer().blockLast();
String content = event.getMessage().getContent();
if (!channelpermissionsDenied) {
List<User> blockLast = event.getMessage().getUserMentions().buffer().blockLast();
String content = event.getMessage().getContent();
if (blockLast != null) {
if (blockLast != null)
{
for (User user : blockLast) {
content = content.replace(user.getId().asString(), "");
}
}
boolean mentionedBot = false;
if (blockLast != null) {
for (User user : blockLast) {
if (user.getUsername().equals(usernameBot)) {
mentionedBot = true;
break;
}
MessageResponseHandler.getMessage(content);
}
boolean mentionedBot = false;
if (blockLast != null){
for (User user : blockLast)
{
if (user.getUsername().equals(usernameBot))
{
mentionedBot = true;
break;
}
}
if (mentionedBot || channelName.contains("general-autism")) {
}
if (mentionedBot || channelName.contains("general-autism")) {
try {
String ResponseStr;
ResponseStr = datahandler.getResponseMsg(content, username, stanfordCoreNLP, stanfordCoreNLPSentiment,
false);
ResponseStr = MessageResponseHandler.selectReponseMessage(content, username);
if (!ResponseStr.isEmpty()) {
System.out.print("\nResponseStr3: " + ResponseStr + "\n");
event.getMessage().getChannel().block().createMessage(ResponseStr).block();
}
} catch (CustomError ex) {
Logger.getLogger(DoStuff.class.getName()).log(Level.SEVERE, null, ex);
}
}
datahandler.updateStringCache();
new Thread(() -> {
try {
Datahandler.instance.checkIfUpdateStrings();
} catch (CustomError ex) {
Logger.getLogger(DiscordHandler.class.getName()).log(Level.SEVERE, null, ex);
}
}).start();
occupied = false;
}
}
}

View File

@ -0,0 +1,101 @@
/*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
package FunctionLayer;
import com.google.common.collect.MapMaker;
import edu.stanford.nlp.pipeline.CoreDocument;
import edu.stanford.nlp.pipeline.CoreEntityMention;
import java.util.ArrayList;
import java.util.List;
import java.util.concurrent.ConcurrentMap;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
/**
*
* @author install1
*/
public class MessageResponseHandler {
private static ConcurrentMap<Integer, String> str = new MapMaker().concurrencyLevel(2).makeMap();
public static ConcurrentMap<Integer, String> getStr() {
return str;
}
public static void setStr(ConcurrentMap<Integer, String> str) {
MessageResponseHandler.str = str;
}
public static void getMessage(String message) {
if (message != null && !message.isEmpty()) {
message = message.replace("@", "");
if (message.contains("<>")) {
message = message.substring(message.indexOf(">"));
}
if (message.startsWith("[ *")) {
message = message.substring(message.indexOf("]"));
}
str.put(str.size() + 1, message);
}
}
public static String selectReponseMessage(String toString, String personName) throws CustomError {
ConcurrentMap<Integer, String> str1 = new MapMaker().concurrencyLevel(6).makeMap();
str1.put(str1.size() + 1, toString);
String strreturn = "";
for (String str : str1.values()) {
if (!str.isEmpty()) {
strreturn = str;
}
}
String getResponseMsg = Datahandler.instance.getResponseMsg(strreturn);
getResponseMsg = checkPersonPresentInSentence(personName, getResponseMsg, strreturn);
return getResponseMsg;
}
private static String checkPersonPresentInSentence(String personName, String responseMsg, String userLastMessage) {
//check if userlastmsg contains person as refference
//check if first person is author or their person of mention
try {
String strreturn = responseMsg;
CoreDocument pipelineCoreDcoument = new CoreDocument(responseMsg);
CoreDocument pipelineCoreDcoumentLastMsg = new CoreDocument(userLastMessage);
Datahandler.getPipeline().annotate(pipelineCoreDcoument);
Datahandler.getPipeline().annotate(pipelineCoreDcoumentLastMsg);
String regex = "(.*?\\d){10,}";
for (CoreEntityMention em : pipelineCoreDcoument.entityMentions()) {
String entityType = em.entityType();
if (entityType.equals("PERSON")) {
String str = strreturn;
String emText = em.text();
Pattern pattern = Pattern.compile(regex);
Matcher matcher = pattern.matcher(personName);
boolean isMatched = matcher.matches();
if (!emText.equals(personName) && !isMatched) {
for (CoreEntityMention emLastMsg : pipelineCoreDcoumentLastMsg.entityMentions()) {
if (!emText.equals(emLastMsg.text()) && !Character.isDigit(emLastMsg.text().trim().charAt(0))) {
//System.out.println("emLastMsg.text(): " + emLastMsg.text());
str = strreturn.substring(0, strreturn.indexOf(emText)) + " "
+ emLastMsg + " " + strreturn.substring(strreturn.indexOf(emText));
}
}
str += " " + personName;
return str;
}
}
}
} catch (Exception e) {
System.out.println("SCUFFED JAYZ: " + e.getLocalizedMessage() + "\n");
}
return responseMsg;
}
public static int getOverHead() {
int getResponseMsgOverHead = Datahandler.instance.getMessageOverHead();
return getResponseMsgOverHead;
}
}

View File

@ -5,6 +5,7 @@
*/
package FunctionLayer;
import com.google.common.collect.MapMaker;
import edu.mit.jmwe.data.IMWE;
import edu.mit.jmwe.data.IToken;
import edu.mit.jmwe.data.Token;
@ -23,29 +24,37 @@ import edu.stanford.nlp.ling.JMWEAnnotation;
import edu.stanford.nlp.pipeline.Annotation;
import edu.stanford.nlp.pipeline.StanfordCoreNLP;
import edu.stanford.nlp.util.CoreMap;
import java.io.File;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
import java.util.Properties;
import java.util.concurrent.ConcurrentMap;
/**
*
* @author install1
*/
//maybe not public?
public class PipelineJMWESingleton {
//if not needed to be volatile dont make it, increases time
//public volatile static PipelineJMWESingleton INSTANCE;
public static PipelineJMWESingleton INSTANCE;
public volatile static PipelineJMWESingleton INSTANCE;
private static StanfordCoreNLP localNLP = initializeJMWE();
private static String underscoreSpaceReplacement;
private static IMWEIndex index;
private static IMWEDetector detector;
private PipelineJMWESingleton() {
String jmweIndexData = "/home/gameservers/autism_bot/lib/mweindex_wordnet3.0_semcor1.6.data"; // ./lib/mweindex_wordnet3.0_semcor1.6.data
}
public static void getINSTANCE() {
INSTANCE = new PipelineJMWESingleton();
}
public final ConcurrentMap<String, Annotation> getJMWEAnnotation(Collection<String> strvalues) {
boolean verbose = false;
IMWEIndex index;
String jmweIndexData = "/home/debian/autism_bot/lib/mweindex_wordnet3.0_semcor1.6.data"; // ./lib/mweindex_wordnet3.0_semcor1.6.data
String jmweIndexDataLocalTest = "E:/java8/Projects/mweindex_wordnet3.0_semcor1.6.data";
File indexFile = new File((String) jmweIndexData);
index = new MWEIndex(indexFile);
@ -55,45 +64,36 @@ public class PipelineJMWESingleton {
} catch (IOException e) {
throw new RuntimeException("unable to open IMWEIndex index: " + e + "\n");
}
detector = getDetector(index, detectorName);
IMWEDetector detector = getDetector(index, detectorName);
ConcurrentMap<String, Annotation> returnAnnotations = new MapMaker().concurrencyLevel(2).makeMap();
strvalues.forEach(str -> {
Annotation annoStr = new Annotation(str);
returnAnnotations.put(str, annoStr);
});
localNLP.annotate(returnAnnotations.values());
returnAnnotations.values().parallelStream().forEach(annoStr -> {
for (CoreMap sentence : annoStr.get(CoreAnnotations.SentencesAnnotation.class)) {
List<IMWE<IToken>> mwes = getjMWEInSentence(sentence, index, detector, verbose);
sentence.set(JMWEAnnotation.class, mwes);
}
});
index.close();
}
public static void getINSTANCE() {
INSTANCE = new PipelineJMWESingleton();
}
public final Annotation getJMWEAnnotation(String str) {
try {
index.open();
} catch (IOException e) {
throw new RuntimeException("unable to open IMWEIndex index: " + e + "\n");
}
Annotation annoStr = new Annotation(str);
localNLP.annotate(annoStr);
Class<CoreAnnotations.SentencesAnnotation> sentencesAnnotationClass = CoreAnnotations.SentencesAnnotation.class;
for (CoreMap sentence : annoStr.get(sentencesAnnotationClass)) {
List<IMWE<IToken>> mwes = getjMWEInSentence(sentence, index, detector, false);
//annoStr.set(JMWEAnnotation.class, mwes);
sentence.set(JMWEAnnotation.class, mwes);
}
index.close();
return annoStr;
return returnAnnotations;
}
public final static StanfordCoreNLP initializeJMWE() {
Properties propsJMWE;
propsJMWE = new Properties();
propsJMWE.setProperty("annotators", "tokenize,ssplit,pos,lemma");
propsJMWE.setProperty("tokenize.options", "untokenizable=firstKeep");
propsJMWE.setProperty("threads", "5");
propsJMWE.setProperty("tokenize.options", "untokenizable=firstDelete");
propsJMWE.setProperty("threads", "25");
propsJMWE.setProperty("pos.maxlen", "90");
propsJMWE.setProperty("tokenize.maxlen", "90");
propsJMWE.setProperty("ssplit.maxlen", "90");
propsJMWE.setProperty("lemma.maxlen", "90");
underscoreSpaceReplacement = "-";
localNLP = new StanfordCoreNLP(propsJMWE);
System.out.println("finished JMWE constructor \n");
System.out.println("finished singleton constructor \n");
return localNLP;
}
@ -124,7 +124,7 @@ public class PipelineJMWESingleton {
}
public List<IMWE<IToken>> getjMWEInSentence(CoreMap sentence, IMWEIndex index, IMWEDetector detector,
boolean verbose) {
boolean verbose) {
List<IToken> tokens = getITokens(sentence.get(CoreAnnotations.TokensAnnotation.class));
List<IMWE<IToken>> mwes = detector.detect(tokens);
if (verbose) {
@ -146,4 +146,5 @@ public class PipelineJMWESingleton {
}
return sentence;
}
}

View File

@ -5,7 +5,10 @@
*/
package FunctionLayer;
import FunctionLayer.StanfordParser.SentimentValueCache;
/**
*
* @author install1
*/
public class SimilarityMatrix {
@ -13,6 +16,8 @@ public class SimilarityMatrix {
private String PrimaryString;
private String SecondaryString;
private double distance;
private SentimentValueCache cacheValue1;
private SentimentValueCache cacheValue2;
public final double getDistance() {
return distance;
@ -33,8 +38,36 @@ public class SimilarityMatrix {
this.distance = result;
}
public final String getPrimaryString() {
return PrimaryString;
}
public final void setPrimaryString(String PrimaryString) {
this.PrimaryString = PrimaryString;
}
public final String getSecondaryString() {
return SecondaryString;
}
public final void setSecondaryString(String SecondaryString) {
this.SecondaryString = SecondaryString;
}
public final SentimentValueCache getCacheValue1() {
return cacheValue1;
}
public final void setCacheValue1(SentimentValueCache cacheValue1) {
this.cacheValue1 = cacheValue1;
}
public final SentimentValueCache getCacheValue2() {
return cacheValue2;
}
public final void setCacheValue2(SentimentValueCache cacheValue2) {
this.cacheValue2 = cacheValue2;
}
}

View File

@ -0,0 +1,334 @@
/*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
package FunctionLayer.StanfordParser;
import com.google.common.collect.MapMaker;
import edu.stanford.nlp.ling.TaggedWord;
import edu.stanford.nlp.trees.GrammaticalStructure;
import edu.stanford.nlp.trees.Tree;
import edu.stanford.nlp.trees.TypedDependency;
import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
import java.util.Map;
import java.util.concurrent.ConcurrentMap;
import org.ejml.simple.SimpleMatrix;
/**
*
* @author install1
*/
public class SentimentValueCache {
private String sentence;
private int counter;
private List<List<TaggedWord>> taggedwordlist = new ArrayList();
private final ConcurrentMap<Integer, String> tgwlistIndex = new MapMaker().concurrencyLevel(2).makeMap();
private final ConcurrentMap<Integer, Tree> sentenceConstituencyParseList = new MapMaker().concurrencyLevel(2).makeMap();
private final Collection<TypedDependency> allTypedDependencies = new ArrayList();
private final ConcurrentMap<Integer, GrammaticalStructure> gsMap = new MapMaker().concurrencyLevel(2).makeMap();
private final ConcurrentMap<Integer, SimpleMatrix> simpleSMXlist = new MapMaker().concurrencyLevel(3).makeMap();
private final ConcurrentMap<Integer, SimpleMatrix> simpleSMXlistVector = new MapMaker().concurrencyLevel(3).makeMap();
private final ConcurrentMap<Integer, Integer> rnnPredictClassMap = new MapMaker().concurrencyLevel(3).makeMap();
private List classifyRaw;
private int mainSentiment = 0;
private int longest = 0;
private int tokensCounter = 0;
private int anotatorcounter = 0;
private int inflectedCounterPositive = 0;
private int inflectedCounterNegative = 0;
private int MarkedContinuousCounter = 0;
private int MarkedContiniousCounterEntries = 0;
private int UnmarkedPatternCounter = 0;
private int pairCounter = 0;
private final ConcurrentMap<Integer, String> ITokenMapTag = new MapMaker().concurrencyLevel(2).makeMap();
private final ConcurrentMap<Integer, String> strTokenStems = new MapMaker().concurrencyLevel(2).makeMap();
private final ConcurrentMap<Integer, String> strTokenForm = new MapMaker().concurrencyLevel(2).makeMap();
private final ConcurrentMap<Integer, String> strTokenGetEntry = new MapMaker().concurrencyLevel(2).makeMap();
private final ConcurrentMap<Integer, String> strTokenGetiPart = new MapMaker().concurrencyLevel(2).makeMap();
private final ConcurrentMap<Integer, String> strTokenEntryPOS = new MapMaker().concurrencyLevel(2).makeMap();
private final ConcurrentMap<Integer, Integer> entryCounts = new MapMaker().concurrencyLevel(2).makeMap();
private final ConcurrentMap<Integer, String> nerEntities1 = new MapMaker().concurrencyLevel(2).makeMap();
private final ConcurrentMap<Integer, String> nerEntities2 = new MapMaker().concurrencyLevel(2).makeMap();
private final ConcurrentMap<Integer, String> nerEntityTokenTags = new MapMaker().concurrencyLevel(3).makeMap();
private final ConcurrentMap<Integer, String> stopwordTokens = new MapMaker().concurrencyLevel(2).makeMap();
private final ConcurrentMap<Integer, String> stopWordLemma = new MapMaker().concurrencyLevel(2).makeMap();
public int getPairCounter() {
return pairCounter;
}
public void setPairCounter(int pairCounter) {
this.pairCounter = pairCounter;
}
public void addStopWordLemma(String str) {
stopWordLemma.put(stopWordLemma.size(), str);
}
public void addstopwordTokens(String str) {
stopwordTokens.put(stopwordTokens.size(), str);
}
public ConcurrentMap<Integer, String> getStopwordTokens() {
return stopwordTokens;
}
public ConcurrentMap<Integer, String> getStopWordLemma() {
return stopWordLemma;
}
public void addnerEntityTokenTags(String str) {
nerEntityTokenTags.put(nerEntityTokenTags.size(), str);
}
public ConcurrentMap<Integer, String> getnerEntityTokenTags() {
return nerEntityTokenTags;
}
public ConcurrentMap<Integer, String> getnerEntities1() {
return nerEntities1;
}
public ConcurrentMap<Integer, String> getnerEntities2() {
return nerEntities2;
}
public void addNEREntities1(String str) {
nerEntities1.put(nerEntities1.size(), str);
}
public void addNEREntities2(String str) {
nerEntities2.put(nerEntities2.size(), str);
}
public void setTaggedwords(List<List<TaggedWord>> twlist) {
taggedwordlist = twlist;
}
public List<List<TaggedWord>> getTaggedwordlist() {
return taggedwordlist;
}
public void addEntryCounts(int counts) {
entryCounts.put(entryCounts.size(), counts);
}
public ConcurrentMap<Integer, Integer> getEntryCounts() {
return entryCounts;
}
public void addstrTokenEntryPOS(String str) {
strTokenEntryPOS.put(strTokenEntryPOS.size(), str);
}
public ConcurrentMap<Integer, String> getstrTokenEntryPOS() {
return strTokenEntryPOS;
}
public void addstrTokenGetiPart(String str) {
strTokenGetiPart.put(strTokenGetiPart.size(), str);
}
public ConcurrentMap<Integer, String> getstrTokenGetiPart() {
return strTokenGetiPart;
}
public ConcurrentMap<Integer, String> getstrTokenGetEntry() {
return strTokenGetEntry;
}
public void addstrTokenGetEntry(String str) {
strTokenGetEntry.put(strTokenGetEntry.size(), str);
}
public ConcurrentMap<Integer, String> getstrTokenForm() {
return strTokenForm;
}
public void addstrTokenForm(String str) {
strTokenForm.put(strTokenForm.size(), str);
}
public ConcurrentMap<Integer, String> getstrTokenStems() {
return strTokenStems;
}
public void addstrTokenStems(String str) {
strTokenStems.put(strTokenStems.size(), str);
}
public ConcurrentMap<Integer, String> getITokenMapTag() {
return ITokenMapTag;
}
public void addITokenMapTag(String str) {
ITokenMapTag.put(ITokenMapTag.size(), str);
}
public int getUnmarkedPatternCounter() {
return UnmarkedPatternCounter;
}
public void setUnmarkedPatternCounter(int UnmarkedPatternCounter) {
this.UnmarkedPatternCounter = UnmarkedPatternCounter;
}
public int getMarkedContiniousCounterEntries() {
return MarkedContiniousCounterEntries;
}
public void setMarkedContiniousCounterEntries(int MarkedContiniousCounterEntries) {
this.MarkedContiniousCounterEntries = MarkedContiniousCounterEntries;
}
public int getMarkedContinuousCounter() {
return MarkedContinuousCounter;
}
public void setMarkedContinuousCounter(int MarkedContinuousCounter) {
this.MarkedContinuousCounter = MarkedContinuousCounter;
}
public int getInflectedCounterNegative() {
return inflectedCounterNegative;
}
public void setInflectedCounterNegative(int inflectedCounterNegative) {
this.inflectedCounterNegative = inflectedCounterNegative;
}
public int getInflectedCounterPositive() {
return inflectedCounterPositive;
}
public void setInflectedCounterPositive(int inflectedCounterPositive) {
this.inflectedCounterPositive = inflectedCounterPositive;
}
public int getAnotatorcounter() {
return anotatorcounter;
}
public void setAnotatorcounter(int anotatorcounter) {
this.anotatorcounter = anotatorcounter;
}
public int getTokensCounter() {
return tokensCounter;
}
public void setTokensCounter(int tokensCounter) {
this.tokensCounter = tokensCounter;
}
public int getMainSentiment() {
return mainSentiment;
}
public void setMainSentiment(int mainSentiment) {
this.mainSentiment = mainSentiment;
}
public int getLongest() {
return longest;
}
public void setLongest(int longest) {
this.longest = longest;
}
public List getClassifyRaw() {
return classifyRaw;
}
public void setClassifyRaw(List classifyRaw) {
this.classifyRaw = classifyRaw;
}
public ConcurrentMap<Integer, Integer> getRnnPrediectClassMap() {
return rnnPredictClassMap;
}
public void addRNNPredictClass(int rnnPrediction) {
rnnPredictClassMap.put(rnnPredictClassMap.size(), rnnPrediction);
}
public void addSimpleMatrix(SimpleMatrix SMX) {
simpleSMXlist.put(simpleSMXlist.size(), SMX);
}
public void addSimpleMatrixVector(SimpleMatrix SMX) {
simpleSMXlistVector.put(simpleSMXlistVector.size(), SMX);
}
public ConcurrentMap<Integer, GrammaticalStructure> getGsMap() {
return gsMap;
}
public ConcurrentMap<Integer, SimpleMatrix> getSimpleSMXlist() {
return simpleSMXlist;
}
public ConcurrentMap<Integer, SimpleMatrix> getSimpleSMXlistVector() {
return simpleSMXlistVector;
}
public ConcurrentMap<Integer, GrammaticalStructure> getGs() {
return gsMap;
}
public int getCounter() {
return counter;
}
public void addGS(GrammaticalStructure gs) {
gsMap.put(gsMap.size(), gs);
}
public Collection<TypedDependency> getAllTypedDependencies() {
return allTypedDependencies;
}
public void addTypedDependencies(Collection<TypedDependency> TDPlist) {
for (TypedDependency TDP : TDPlist) {
allTypedDependencies.add(TDP);
}
}
public ConcurrentMap<Integer, Tree> getSentenceConstituencyParseList() {
return sentenceConstituencyParseList;
}
public void addSentenceConstituencyParse(Tree tree) {
sentenceConstituencyParseList.put(sentenceConstituencyParseList.size(), tree);
}
public void setCounter(int counter) {
counter = counter;
}
public String getSentence() {
return sentence;
}
public SentimentValueCache(String str, int counter) {
this.sentence = str;
this.counter = counter;
}
public ConcurrentMap<Integer, String> getTgwlistIndex() {
return tgwlistIndex;
}
public void addTgwlistIndex(String str) {
tgwlistIndex.put(tgwlistIndex.size(), str);
}
public SentimentValueCache(String str) {
this.sentence = str;
}
}

View File

@ -1,3 +0,0 @@
Manifest-Version: 1.0
Main-Class: PresentationLayer.DiscordHandler

View File

@ -1,111 +1,71 @@
/*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
ps ax | grep EventNotfierDiscordBot-1.0
kill $pid (number)
nohup screen -d -m -S nonroot java -Xmx6048M -jar /home/javatests/ArtificialAutism-1.0.jar
nohup screen -d -m -S nonroot java -Xmx6800M -jar /home/javatests/ArtificialAutism-1.0.jar
screen -ls (number1)
screen -X -S (number1) quit
*/
package PresentationLayer;
import DataLayer.settings;
import FunctionLayer.Datahandler;
import FunctionLayer.DoStuff;
import FunctionLayer.PipelineJMWESingleton;
import discord4j.core.DiscordClient;
import discord4j.core.GatewayDiscordClient;
import discord4j.core.event.domain.message.MessageCreateEvent;
import edu.stanford.nlp.pipeline.StanfordCoreNLP;
import java.io.IOException;
import java.io.UnsupportedEncodingException;
import java.net.*;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.Timer;
import java.util.TimerTask;
import java.util.logging.Level;
import java.util.logging.Logger;
import DataLayer.settings;
import discord4j.common.util.Snowflake;
import discord4j.core.event.domain.message.MessageCreateEvent;
import java.math.BigInteger;
/**
*
* @author install1
*/
public class DiscordHandler {
private static void receiveAndSendPacket(DatagramSocket serverSocket, InetAddress ipAddress, int port,
Datahandler datahandler, StanfordCoreNLP stanfordCoreNLP, StanfordCoreNLP stanfordCoreNLPSentiment) throws IOException {
byte[] receiveData = new byte[4096];
DatagramPacket receivePacket = new DatagramPacket(receiveData, receiveData.length);
public static void main(String[] args) {
System.setProperty("java.util.concurrent.ForkJoinPool.common.parallelism", "15");
try {
serverSocket.receive(receivePacket);
} catch (IOException e) {
e.printStackTrace();
Datahandler.instance.initiateMYSQL();
//nohup screen -d -m -S nonroot java -Xmx6900M -jar /home/javatests/ArtificialAutism-1.0.jar
//uncomment db fetch when ready, just keep the comment for future reference
System.out.println("finished initiating MYSQL");
} catch (SQLException | IOException ex) {
Logger.getLogger(DiscordHandler.class.getName()).log(Level.SEVERE, null, ex);
}
String sentence = new String(receivePacket.getData(), 0,
receivePacket.getLength());
sentence = sentence.replace("clientmessage:", "");
String ResponseMsg = datahandler.getResponseMsg(sentence, "", stanfordCoreNLP, stanfordCoreNLPSentiment,
true);
byte[] sendData = ResponseMsg.getBytes("UTF-8");
int deliver_port = 0;
switch (port) {
case 48475:
deliver_port = 48470;
break;
case 48476:
deliver_port = 48471;
break;
case 48477:
deliver_port = 48472;
break;
case 48478:
deliver_port = 48473;
break;
}
DatagramPacket sendPacket = new DatagramPacket(sendData, sendData.length, ipAddress, deliver_port);
serverSocket.send(sendPacket);
}
public static void handleUDPTraffic(int port, Datahandler datahandler,
StanfordCoreNLP stanfordCoreNLP, StanfordCoreNLP stanfordCoreNLPSentiment) {
try (DatagramSocket serverSocket = new DatagramSocket(port)) {
String hostIP = "195.154.53.196";
if (port == 48477 || port == 48478) {
hostIP = "51.158.20.245";
}
InetAddress ipAddress = InetAddress.getByName(hostIP);//used ip'
while (true) {
receiveAndSendPacket(serverSocket, ipAddress, port, datahandler, stanfordCoreNLP, stanfordCoreNLPSentiment);
}
} catch (SocketException | UnknownHostException e) {
e.printStackTrace();
} catch (UnsupportedEncodingException e) {
e.printStackTrace();
} catch (IOException e) {
e.printStackTrace();
}
}
public static void main(String[] args) throws IOException, SQLException {
Datahandler datahandler = new Datahandler();
datahandler.initiateMYSQL();
PipelineJMWESingleton.getINSTANCE();
StanfordCoreNLP stanfordCoreNLP = datahandler.pipeLineSetUp();
StanfordCoreNLP stanfordCoreNLPSentiment = datahandler.shiftReduceParserInitiate();
Datahandler.instance.instantiateAnnotationMapJMWE();
Datahandler.instance.shiftReduceParserInitiate();
Datahandler.instance.instantiateAnnotationMap();
System.out.println("FINISHED ALL ANNOTATIONS");
datahandler.updateStringCache();
System.out.println("updatedstring cache");
Datahandler.instance.addHLstatsMessages();
Datahandler.instance.updateStringCache();
//String token = "NTI5NzAxNTk5NjAyMjc4NDAx.Dw0vDg.7-aMjVWdQMYPl8qVNyvTCPS5F_A";
String token = new settings().getDiscordToken();
final DiscordClient client = DiscordClient.create(token);
final GatewayDiscordClient gateway = client.login().block();
String usernameBot = gateway.getSelf().block().getUsername();
int autismbotCount = 4;
//make sure not to use ports that are already occupied.
for (int i = 0; i < autismbotCount; i++) {
final int j = i;
new Thread(() -> {
ArrayList<Integer> ports = new ArrayList<Integer>();
ports.add(48475);
ports.add(48476);
ports.add(48477);
ports.add(48478);
handleUDPTraffic(ports.get(j), datahandler, stanfordCoreNLP, stanfordCoreNLPSentiment);
}).start();
}
new Thread(() -> {
Datahandler.instance.update_autismo_socket_msg();
}).start();
gateway.on(MessageCreateEvent.class).subscribe(event -> {
FunctionLayer.DoStuff.doStuff(event, usernameBot, datahandler, stanfordCoreNLP, stanfordCoreNLPSentiment);
if (!FunctionLayer.DoStuff.isOccupied()) {
FunctionLayer.DoStuff.doStuff(event, usernameBot);
}
});
gateway.onDisconnect().block();
} //3.1.1 discord4j version
}
}

View File

@ -0,0 +1,497 @@
import FunctionLayer.Datahandler;
import FunctionLayer.PipelineJMWESingleton;
import FunctionLayer.StanfordParser.SentimentAnalyzerTest;
import edu.mit.jmwe.data.IMWE;
import edu.mit.jmwe.data.IToken;
import edu.stanford.nlp.ie.AbstractSequenceClassifier;
import edu.stanford.nlp.ie.crf.CRFClassifier;
import edu.stanford.nlp.ling.CoreAnnotations;
import edu.stanford.nlp.ling.CoreLabel;
import edu.stanford.nlp.ling.TaggedWord;
import edu.stanford.nlp.parser.lexparser.LexicalizedParser;
import edu.stanford.nlp.pipeline.Annotation;
import edu.stanford.nlp.pipeline.CoreDocument;
import edu.stanford.nlp.pipeline.StanfordCoreNLP;
import edu.stanford.nlp.tagger.maxent.MaxentTagger;
import edu.stanford.nlp.trees.*;
import edu.stanford.nlp.util.CoreMap;
import org.ejml.simple.SimpleMatrix;
import org.junit.Assert;
import org.junit.Test;
import FunctionLayer.SimilarityMatrix;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import java.util.logging.FileHandler;
import java.util.logging.Logger;
import java.util.logging.SimpleFormatter;
public class junit {
private String taggerPath = "edu/stanford/nlp/models/pos-tagger/english-left3words-distsim.tagger";
private MaxentTagger tagger = new MaxentTagger(taggerPath);
private GrammaticalStructureFactory gsf = initiateGrammaticalStructureFactory();
String nerModel = "edu/stanford/nlp/models/ner/english.all.3class.distsim.crf.ser.gz";
AbstractSequenceClassifier<CoreLabel> classifier = CRFClassifier.
getClassifierNoExceptions(nerModel);
public GrammaticalStructureFactory initiateGrammaticalStructureFactory() {
String lexParserEnglishPCFG = "edu/stanford/nlp/models/lexparser/englishPCFG.ser.gz";
LexicalizedParser lp = LexicalizedParser.
loadModel(lexParserEnglishPCFG, "-maxLength", "100");
TreebankLanguagePack langpack = lp.getOp().langpack();
return langpack.grammaticalStructureFactory();
}
public Double testCall(String sent1, String sent2, StanfordCoreNLP stanfordCoreNLP,
StanfordCoreNLP stanfordCoreNLPSentiment) {
System.out.println("\n\n\n\nNEW ITERATION");
System.out.println("sent1: " + sent1);
System.out.println("sent2: " + sent2);
ArrayList<String> concurrentRelations = new ArrayList<String>();
Annotation jmweAnnotationF = PipelineJMWESingleton.INSTANCE.getJMWEAnnotation(sent1);
Annotation jmweAnnotation = PipelineJMWESingleton.INSTANCE.getJMWEAnnotation(sent2);
Integer tokenizeCountingF = null;
List<List<TaggedWord>> taggedWordListF = null;
List<List<TaggedWord>> taggedWordList1 = null;
ArrayList<String> retrieveTGWListF = null;
java.util.ArrayList<String> retrieveTGWList1 = null;
List<CoreMap> sentencesF = null;
List<CoreMap> sentence1 = null;
List<CoreMap> sentencesSentimentF = null;
List<CoreMap> sentenceSentiment1 = null;
List<CoreMap> coreMaps1 = jmweAnnotationF.get(CoreAnnotations.SentencesAnnotation.class);
ArrayList<Tree> treesF = null;
ArrayList<Tree> trees1 = null;
ArrayList<GrammaticalStructure> grammaticalStructuresF = null;
ArrayList<GrammaticalStructure> grammaticalStructures1 = null;
ArrayList<TypedDependency> typedDependenciesF = null;
ArrayList<Integer> rnnCoreAnnotationsPredictedF = null;
ArrayList<SimpleMatrix> simpleMatricesF = null;
ArrayList<SimpleMatrix> simpleMatricesNodevectorsF = null;
ArrayList<?> listF = null;
Integer longestF = null;
Integer sentimentLongestF = null;
List<IMWE<IToken>> imwesF = null;
Integer InflectedCounterNegativeF = null;
Integer InflectedCounterPositiveF = null;
ArrayList<String> tokenEntryF = null;
Integer MarkedContinuousCounterF = null;
Integer UnmarkedPatternCounterF = null;
ArrayList<String> strTokensIpartFormF = null;
ArrayList<String> tokenFormsF = null;
ArrayList<String> strTokenEntryGetPOSF = null;
ArrayList<Integer> intTokenEntyCountsF = null;
ArrayList<String> ITokenTagsF = null;
ArrayList<String> strTokenStemsF = null;
Integer AnotatorcounterF = null;
Integer TokensCounterF = null;
ArrayList<String> entityTokenTagsF = null;
ArrayList<String> nerEntitiesF = null;
ArrayList<String> nerEntitiesTypeF = null;
ArrayList<String> stopWordTokenF = null;
ArrayList<String> stopWordLemmaF = null;
Integer PairCounterF = null;
java.util.ArrayList<TypedDependency> typedDependencies1 = null;
ArrayList<Integer> rnnCoreAnnotationsPredicted1 = null;
ArrayList<SimpleMatrix> simpleMatrices1 = null;
ArrayList<SimpleMatrix> simpleMatricesNodevectors1 = null;
List<?> list1 = null;
Integer longest1 = null;
Integer sentimentLongest1 = null;
List<IMWE<IToken>> imwes1 = null;
Integer InflectedCounterNegative1 = null;
Integer InflectedCounterPositive1 = null;
ArrayList<String> tokenEntry1 = null;
Integer MarkedContinuousCounter1 = null;
Integer UnmarkedPatternCounter1 = null;
ArrayList<String> strTokensIpartForm1 = null;
ArrayList<String> tokenForms1 = null;
ArrayList<String> strTokenEntryGetPOS1 = null;
ArrayList<Integer> intTokenEntyCounts1 = null;
ArrayList<String> ITokenTags1 = null;
ArrayList<String> strTokenStems1 = null;
Integer Anotatorcounter1 = null;
Integer TokensCounter1 = null;
ArrayList<String> entityTokenTags1 = null;
ArrayList<String> nerEntities1 = null;
ArrayList<String> nerEntitiesType1 = null;
ArrayList<String> stopWordToken1 = null;
ArrayList<String> stopWordLemma1 = null;
Integer PairCounter1 = null;
List<CoreMap> coreMaps2 = jmweAnnotation.get(CoreAnnotations.SentencesAnnotation.class);
Annotation strAnno = new Annotation(sent1);
strAnno.compact();
stanfordCoreNLP.annotate(strAnno);
Annotation strAnnoSentiment = new Annotation(sent2);
strAnnoSentiment.compact();
stanfordCoreNLPSentiment.annotate(strAnnoSentiment);
Annotation strAnno2 = new Annotation(sent2);
strAnno2.compact();
stanfordCoreNLP.annotate(strAnno2);
Annotation strAnno22 = new Annotation(sent2);
strAnno22.compact();
stanfordCoreNLPSentiment.annotate(strAnno22);
Annotation annotation = new Annotation(sent1);
stanfordCoreNLP.annotate(annotation);
CoreDocument coreDocument = new CoreDocument(annotation);
annotation = new Annotation(sent2);
stanfordCoreNLP.annotate(annotation);
CoreDocument coreDocument1 = new CoreDocument(annotation);
Integer tokenizeCounting = null;
SentimentAnalyzerTest sentimentAnalyzerTest = new SentimentAnalyzerTest(sent1, sent2,
new SimilarityMatrix(sent1, sent2), coreMaps1, coreMaps2, strAnno,
strAnno2, strAnnoSentiment,
strAnno22, coreDocument,
coreDocument1,
tagger, gsf, classifier, tokenizeCounting, tokenizeCountingF,
taggedWordListF, taggedWordList1, retrieveTGWListF, retrieveTGWList1,
sentencesF, sentence1, sentencesSentimentF, sentenceSentiment1, treesF, trees1,
grammaticalStructuresF, grammaticalStructures1, typedDependenciesF,
typedDependencies1, rnnCoreAnnotationsPredictedF, rnnCoreAnnotationsPredicted1,
simpleMatricesF, simpleMatrices1, simpleMatricesNodevectorsF, simpleMatricesNodevectors1,
listF, list1, longestF, longest1, sentimentLongestF, sentimentLongest1, imwesF,
imwes1, InflectedCounterNegativeF, InflectedCounterNegative1, InflectedCounterPositiveF,
InflectedCounterPositive1, tokenEntryF, tokenEntry1, MarkedContinuousCounterF,
MarkedContinuousCounter1, UnmarkedPatternCounterF, UnmarkedPatternCounter1,
strTokensIpartFormF, strTokensIpartForm1, tokenFormsF, tokenForms1,
strTokenEntryGetPOSF, strTokenEntryGetPOS1, intTokenEntyCountsF,
intTokenEntyCounts1, ITokenTagsF, ITokenTags1, strTokenStemsF, strTokenStems1,
AnotatorcounterF, Anotatorcounter1, TokensCounterF, TokensCounter1,
entityTokenTagsF, entityTokenTags1, nerEntitiesF, nerEntities1, nerEntitiesTypeF,
nerEntitiesType1, stopWordTokenF, stopWordToken1, stopWordLemmaF, stopWordLemma1,
PairCounterF, PairCounter1);
Double score = sentimentAnalyzerTest.callSMX().getDistance();
System.out.println("score: " + score + "\n");
return score;
}
//@Test
public void testScoring() {
Datahandler datahandler = new Datahandler();
PipelineJMWESingleton.getINSTANCE();
StanfordCoreNLP stanfordCoreNLP = datahandler.pipeLineSetUp();
StanfordCoreNLP stanfordCoreNLPSentiment = datahandler.shiftReduceParserInitiate();
String sent1 = "I was thinking to small supplies to avoid waiting in the rain. This way, in case of trouble you go home and take in your supply instead of waiting 45 min";
String sent2 = "*NêkØ* Kroaat_West bG <3";
double PerformTesting = testCall(sent1, sent2, stanfordCoreNLP, stanfordCoreNLPSentiment);
Assert.assertTrue(PerformTesting < 800.0);
sent2 = "no thanks but i know some ladyboys here that would";
sent1 = "u want head from me :wlenny:";
double PerformTestingFitting = testCall(sent1, sent2, stanfordCoreNLP, stanfordCoreNLPSentiment);
Assert.assertTrue(PerformTestingFitting > 200.0);
sent1 = "we need a trim for kroaat's teamwin";
double PerformTestingFittingLess = testCall(sent1, sent2, stanfordCoreNLP, stanfordCoreNLPSentiment);
Assert.assertTrue(PerformTestingFitting > PerformTestingFittingLess);
sent1 = "i am not a stalker";
sent2 = "but we ban for bhop hack";
PerformTestingFittingLess = testCall(sent1, sent2, stanfordCoreNLP, stanfordCoreNLPSentiment);
sent2 = "hey stalker";
PerformTestingFitting = testCall(sent1, sent2, stanfordCoreNLP, stanfordCoreNLPSentiment);
Assert.assertTrue(PerformTestingFittingLess < PerformTestingFitting);
sent1 = "what do you think of humanzz";
sent2 = "did we actually go inside rocket -_-";
PerformTestingFittingLess = testCall(sent1, sent2, stanfordCoreNLP, stanfordCoreNLPSentiment);
sent2 = "crying for beeing tossed for fire";
PerformTestingFitting = testCall(sent1, sent2, stanfordCoreNLP, stanfordCoreNLPSentiment);
Assert.assertTrue(PerformTestingFitting > PerformTestingFittingLess);
Assert.assertTrue(PerformTestingFitting > 3000);
sent1 = "admin ! this map needs a Free Torchlight for all";
sent2 = "( ? <:wlenny:514861023002624001> ?? ? <:wlenny:514861023002624001> ) ( ? <:wlenny:514861023002624001> ?? ? <:wlenny:514861023002624001> ) ( ? <:wlenny:514861023002624001> ?? ? <:wlenny:514861023002624001> ) ( ? <:wlenny:514861023002624001> ?? ?";
PerformTestingFittingLess = testCall(sent1, sent2, stanfordCoreNLP, stanfordCoreNLPSentiment);
Assert.assertTrue(PerformTestingFittingLess < 100);
sent1 = "i said that because i indeed have more knowledge about medicines than the average joe";
PerformTestingFittingLess = testCall(sent1, sent2, stanfordCoreNLP, stanfordCoreNLPSentiment);
sent2 = "Depends on the situation but i will mostly trust my doctor if he says this will help and i actually need it";
PerformTestingFitting = testCall(sent1, sent2, stanfordCoreNLP, stanfordCoreNLPSentiment);
Assert.assertTrue(PerformTestingFitting > PerformTestingFittingLess);
sent1 = "tell me something";
sent2 = "you learn fast yoshmi";
PerformTestingFittingLess = testCall(sent1, sent2, stanfordCoreNLP, stanfordCoreNLPSentiment);
sent2 = "when i see europeans dead i laugh";
PerformTestingFitting = testCall(sent1, sent2, stanfordCoreNLP, stanfordCoreNLPSentiment);
Assert.assertTrue(PerformTestingFitting > PerformTestingFittingLess);
sent1 = "crobat im gonna nominate next event for you";
sent2 = "why did we sploit . <:wlenny:514861023002624001> <:wlenny:514861023002624001> <:wlenny:514861023002624001>";
PerformTestingFittingLess = testCall(sent1, sent2, stanfordCoreNLP, stanfordCoreNLPSentiment);
sent2 = "lets go for mako";
PerformTestingFitting = testCall(sent1, sent2, stanfordCoreNLP, stanfordCoreNLPSentiment);
Assert.assertTrue(PerformTestingFitting > PerformTestingFittingLess);
sent1 = "how are the calcluations going? any issue with the JMWE?";
sent2 = "anyone know if upgrading damage increases the mines' damage also";
PerformTestingFittingLess = testCall(sent1, sent2, stanfordCoreNLP, stanfordCoreNLPSentiment);
sent2 = "i have to get back to work";
PerformTestingFitting = testCall(sent1, sent2, stanfordCoreNLP, stanfordCoreNLPSentiment);
Assert.assertTrue(PerformTestingFitting > PerformTestingFittingLess);
sent1 = "sprichst du Deutsch?";
sent2 = "like rpggift unknown !!! 130";
PerformTestingFittingLess = testCall(sent1, sent2, stanfordCoreNLP, stanfordCoreNLPSentiment);
Assert.assertTrue(PerformTestingFittingLess < 500);
sent1 = "do you like memes?";
sent2 = "we need to adapt to the team we have";
PerformTestingFittingLess = testCall(sent1, sent2, stanfordCoreNLP, stanfordCoreNLPSentiment);
Assert.assertTrue(PerformTestingFittingLess < 3400);
sent2 = "i have to get back to work";
PerformTestingFittingLess = testCall(sent1, sent2, stanfordCoreNLP, stanfordCoreNLPSentiment);
Assert.assertTrue(PerformTestingFittingLess < 4400);
sent1 = "is that a cursed sentence?";
sent2 = "just didnt want to have heal since i died";
PerformTestingFittingLess = testCall(sent1, sent2, stanfordCoreNLP, stanfordCoreNLPSentiment);
Assert.assertTrue(PerformTestingFittingLess < 2000);
sent1 = "my name is ? ? ? ? G ? ? ? but this server doesn't read my special ? ? ? ? ? ? characters";
sent2 = "dont say that sentence again";
PerformTestingFittingLess = testCall(sent1, sent2, stanfordCoreNLP, stanfordCoreNLPSentiment);
Assert.assertTrue(PerformTestingFittingLess < 5000);
sent2 = "please dont tell me your gonna repeat that";
PerformTestingFittingLess = testCall(sent1, sent2, stanfordCoreNLP, stanfordCoreNLPSentiment);
Assert.assertTrue(PerformTestingFittingLess < 2000);
sent2 = "na it was a good try";
PerformTestingFittingLess = testCall(sent1, sent2, stanfordCoreNLP, stanfordCoreNLPSentiment);
Assert.assertTrue(PerformTestingFittingLess < 2000);
sent2 = "NATSU DIES IN THE END";
PerformTestingFittingLess = testCall(sent1, sent2, stanfordCoreNLP, stanfordCoreNLPSentiment);
Assert.assertTrue(PerformTestingFittingLess < 2000);
sent1 = "reeee";
sent2 = "??( ? :wlenny~1: ?? ? :wlenny~1: )?? ( ? :wlenny~1: ?? ? :wlenny~1: )/ [ :wlenny~1: ?~ :wlenny~1: :] ? :wlenny~1: ?? ?? <";
PerformTestingFittingLess = testCall(sent1, sent2, stanfordCoreNLP, stanfordCoreNLPSentiment);
Assert.assertTrue(PerformTestingFittingLess < 2500);
sent1 = "dw, my mom is a stupid cunt, she deserved it";
sent2 = "(????????????-)---….. JOINT :wlenny~1: !";
PerformTestingFittingLess = testCall(sent1, sent2, stanfordCoreNLP, stanfordCoreNLPSentiment);
Assert.assertTrue(PerformTestingFittingLess < 2500);
sent1 = "are you a paste cut or a cut and paste?";
sent2 = "? ? ? ? ? ? ? ? A ? ? ? V ? ? ? ? ? ? ? ? ?";
PerformTestingFittingLess = testCall(sent1, sent2, stanfordCoreNLP, stanfordCoreNLPSentiment);
Assert.assertTrue(PerformTestingFittingLess < 2500);
sent1 = "Did you know that Denmark's short form (DK) is pronounced as \"decay\"? :thonk~1:";
sent2 = "?( ? <:wlenny:514861023002624001> ?? ? <:wlenny:514861023002624001> )??( ? <:wlenny:514861023002624001> ?? ? <:wlenny:514861023002624001> )??( ? <:wlenny:514861023002624001> ?? ?<:wlenny:514861023002624001>)??( ?<:wlenny:514861023002624001>?? ? <:w";
PerformTestingFittingLess = testCall(sent1, sent2, stanfordCoreNLP, stanfordCoreNLPSentiment);
Assert.assertTrue(PerformTestingFittingLess < 2500);
sent1 = "are you a space cat or a cat in space? <:thonk:382012909942734858>";
sent2 = "{fullred}( ? <:wlenny:514861023002624001> ? ? {hotpink}? ? ? ? ? ? ? ? ? ? ? ? ? ? ? ? <:wlenny:514861023002624001> ?)";
PerformTestingFittingLess = testCall(sent1, sent2, stanfordCoreNLP, stanfordCoreNLPSentiment);
Assert.assertTrue(PerformTestingFittingLess < 2500);
sent1 = "something else to tell me now";
sent2 = "{fullred}(--)? ?(--? )?{mediumblue}?(--)? ?(--)?{magenta}?(--)?{indigo}?(--? )?";
PerformTestingFittingLess = testCall(sent1, sent2, stanfordCoreNLP, stanfordCoreNLPSentiment);
Assert.assertTrue(PerformTestingFittingLess < 2500);
sent1 = "do you have repeating sentences";
sent2 = "its pretty cheap with 10 ppl you pay about 60 euro for a week";
PerformTestingFittingLess = testCall(sent1, sent2, stanfordCoreNLP, stanfordCoreNLPSentiment);
Assert.assertTrue(PerformTestingFittingLess < 5500);
sent1 = "what is 60 euro a week";
sent2 = "{fullred}( ? <:wlenny:514861023002624001> ? ? {hotpink}? ? ? ? ? ? ? ? ? ? ? ? ? ? ? ? <:wlenny:514861023002624001> ?)";
PerformTestingFittingLess = testCall(sent1, sent2, stanfordCoreNLP, stanfordCoreNLPSentiment);
Assert.assertTrue(PerformTestingFittingLess < 2500);
sent1 = "do you watch news and if yes which one";
sent2 = "? ? ? ? ? ? ? ? A ? ? ? V ? ? ? ? ? ? ? ? ?";
PerformTestingFittingLess = testCall(sent1, sent2, stanfordCoreNLP, stanfordCoreNLPSentiment);
Assert.assertTrue(PerformTestingFittingLess < 2500);
sent1 = "\"im gonna bad manner you";
sent2 = "? ? ? ? ? ? ? ? A ? ? ? V ? ? ? ? ? ? ? ? ?";
PerformTesting = testCall(sent1, sent2, stanfordCoreNLP, stanfordCoreNLPSentiment);
Assert.assertTrue(PerformTesting < 2500.0);
sent1 = "LOOK OUT BIG DOG";
sent2 = "? ? ? ? ? ? ? ? A ? ? ? V ? ? ? ? ? ? ? ? ?";
PerformTesting = testCall(sent1, sent2, stanfordCoreNLP, stanfordCoreNLPSentiment);
Assert.assertTrue(PerformTesting < 2500.0);
sent1 = "3 days = 30 cents";
sent2 = "? ? ? ? ? ? ? ? A ? ? ? V ? ? ? ? ? ? ? ? ?";
PerformTesting = testCall(sent1, sent2, stanfordCoreNLP, stanfordCoreNLPSentiment);
Assert.assertTrue(PerformTesting < 2500.0);
sent1 = ":D we had a lot of fun for 2 rounds :D";
sent2 = "? ? ? ? ? ? ? ? A ? ? ? V ? ? ? ? ? ? ? ? ?";
PerformTesting = testCall(sent1, sent2, stanfordCoreNLP, stanfordCoreNLPSentiment);
Assert.assertTrue(PerformTesting < 2500.0);
sent1 = ">FUCK I JUST PRESSED MY ZTELE BIND";
sent2 = "? ? ? ? ? ? ? ? A ? ? ? V ? ? ? ? ? ? ? ? ?";
PerformTesting = testCall(sent1, sent2, stanfordCoreNLP, stanfordCoreNLPSentiment);
Assert.assertTrue(PerformTesting < 2500.0);
sent1 = "400 solos on mako <:wlenny:514861023002624001>";
sent2 = "? ? ? ? ? ? ? ? A ? ? ? V ? ? ? ? ? ? ? ? ?";
PerformTesting = testCall(sent1, sent2, stanfordCoreNLP, stanfordCoreNLPSentiment);
Assert.assertTrue(PerformTesting < 2500.0);
sent1 = "2 noobs 3 not bad";
sent2 = "{fullred}( ? <:wlenny:514861023002624001> ? ? {hotpink}? ? ? ? ? ? ? ? ? ? ? ? ? ? ? ? <:wlenny:514861023002624001> ?)";
PerformTesting = testCall(sent1, sent2, stanfordCoreNLP, stanfordCoreNLPSentiment);
Assert.assertTrue(PerformTesting < 2500.0);
sent1 = "??????? NOW WE RIOT ???????";
sent2 = "{fullred}( ? <:wlenny:514861023002624001> ? ? {hotpink}? ? ? ? ? ? ? ? ? ? ? ? ? ? ? ? <:wlenny:514861023002624001> ?)";
PerformTesting = testCall(sent1, sent2, stanfordCoreNLP, stanfordCoreNLPSentiment);
Assert.assertTrue(PerformTesting < 2500.0);
sent1 = "admin turn on bhop pleas";
sent2 = "{fullred}( ? <:wlenny:514861023002624001> ? ? {hotpink}? ? ? ? ? ? ? ? ? ? ? ? ? ? ? ? <:wlenny:514861023002624001> ?)";
PerformTesting = testCall(sent1, sent2, stanfordCoreNLP, stanfordCoreNLPSentiment);
Assert.assertTrue(PerformTesting < 2500.0);
sent1 = "paranoid is never event";
sent2 = "{fullred}( ? <:wlenny:514861023002624001> ? ? {hotpink}? ? ? ? ? ? ? ? ? ? ? ? ? ? ? ? <:wlenny:514861023002624001> ?)";
PerformTesting = testCall(sent1, sent2, stanfordCoreNLP, stanfordCoreNLPSentiment);
Assert.assertTrue(PerformTesting < 2500.0);
sent1 = "players keep diying LLLLOOOOLLL";
sent2 = "{fullred}( ? <:wlenny:514861023002624001> ? ? {hotpink}? ? ? ? ? ? ? ? ? ? ? ? ? ? ? ? <:wlenny:514861023002624001> ?)";
PerformTesting = testCall(sent1, sent2, stanfordCoreNLP, stanfordCoreNLPSentiment);
Assert.assertTrue(PerformTesting < 2500.0);
sent1 = "PRESS THE FUCKING BUTTON IN THE ELEVATOR";
sent2 = "{fullred}( ? <:wlenny:514861023002624001> ? ? {hotpink}? ? ? ? ? ? ? ? ? ? ? ? ? ? ? ? <:wlenny:514861023002624001> ?)";
PerformTesting = testCall(sent1, sent2, stanfordCoreNLP, stanfordCoreNLPSentiment);
Assert.assertTrue(PerformTesting < 2500.0);
sent1 = "but instead of minecraft server i got css ze";
sent2 = "Rocklee when did you come back from the isis khalifate <:wlenny:514861023002624001>";
PerformTesting = testCall(sent1, sent2, stanfordCoreNLP, stanfordCoreNLPSentiment);
Assert.assertTrue(PerformTesting < 5500.0);
sent1 = "First time there's that many CT at this point";
sent2 = "Rocklee when did you come back from the isis khalifate <:wlenny:514861023002624001>";
PerformTesting = testCall(sent1, sent2, stanfordCoreNLP, stanfordCoreNLPSentiment);
Assert.assertTrue(PerformTesting < 2500.0);
sent1 = "go to spec so changemap";
sent2 = "Rocklee when did you come back from the isis khalifate <:wlenny:514861023002624001>";
PerformTesting = testCall(sent1, sent2, stanfordCoreNLP, stanfordCoreNLPSentiment);
Assert.assertTrue(PerformTesting < 2500.0);
sent1 = "What's for lunch?";
sent2 = "what does bm stand for";
double PerformTesting1 = testCall(sent1, sent2, stanfordCoreNLP, stanfordCoreNLPSentiment);
sent2 = "2 eggs and 1 cup";
double PerformTesting2 = testCall(sent1, sent2, stanfordCoreNLP, stanfordCoreNLPSentiment);
Assert.assertTrue(PerformTesting1 < PerformTesting2);
sent1 = "do you watch any series or animes or cartoons";
sent2 = "you guys voted for this";
PerformTesting1 = testCall(sent1, sent2, stanfordCoreNLP, stanfordCoreNLPSentiment);
Assert.assertTrue(PerformTesting1 < 5500);
sent1 = "do you know pyrono";
sent2 = "i have to get accustomed to it";
PerformTesting1 = testCall(sent1, sent2, stanfordCoreNLP, stanfordCoreNLPSentiment);
Assert.assertTrue(PerformTesting1 < 2000);
sent1 = "Is William a good admin?";
sent2 = "but this is helms deep";
PerformTesting1 = testCall(sent1, sent2, stanfordCoreNLP, stanfordCoreNLPSentiment);
sent2 = "keep an eye on them";
PerformTesting2 = testCall(sent1, sent2, stanfordCoreNLP, stanfordCoreNLPSentiment);
Assert.assertTrue(PerformTesting2 > PerformTesting1);
sent1 = "scuffed jenz";
sent2 = "I HAVE WATCHED ONLY ONE CARTOON AND IT'S POKEMON";
PerformTesting1 = testCall(sent1, sent2, stanfordCoreNLP, stanfordCoreNLPSentiment);
Assert.assertTrue(PerformTesting1 < 2500);
sent1 = "So what?";
PerformTesting1 = testCall(sent1, sent2, stanfordCoreNLP, stanfordCoreNLPSentiment);
Assert.assertTrue(PerformTesting1 < 5500);
sent1 = "Who is the enemy?";
PerformTesting1 = testCall(sent1, sent2, stanfordCoreNLP, stanfordCoreNLPSentiment);
Assert.assertTrue(PerformTesting1 < 2500);
sent1 = "Sounds bad, doesn't work";
sent2 = "that hold is identical to the other room";
PerformTesting1 = testCall(sent1, sent2, stanfordCoreNLP, stanfordCoreNLPSentiment);
Assert.assertTrue(PerformTesting1 < 2500);
sent1 = "oh wait, because I don't have any";
sent2 = "would be cool if VIPs would nominate other than paranoid. All the maps in the vote all the time suck so people just vote for an";
PerformTesting1 = testCall(sent1, sent2, stanfordCoreNLP, stanfordCoreNLPSentiment);
Assert.assertTrue(PerformTesting1 < 4500);
sent1 = "{navy}? :wlenny~1: ?? {violet}? :wlenny~1: ? :wlenny~1: ? :wlenny~1: ?? ? :wlenny~1: ? :wlenny~1: ? :wlenny~1: ??";
sent2 = "will you still be online tommorow?";
PerformTesting1 = testCall(sent1, sent2, stanfordCoreNLP, stanfordCoreNLPSentiment);
Assert.assertTrue(PerformTesting1 < 4500);
sent1 = "stop being such a kid and act more polite towards people ";
sent2 = "i played nemesis on paradise a few days ago and it worked fine";
PerformTesting1 = testCall(sent1, sent2, stanfordCoreNLP, stanfordCoreNLPSentiment);
Assert.assertTrue(PerformTesting1 < 6500);
sent1 = "Enfin. Map noob";
sent2 = "dagger dagger";
PerformTesting = testCall(sent1, sent2, stanfordCoreNLP, stanfordCoreNLPSentiment);
Assert.assertTrue(PerformTesting < 400.0);
sent1 = "u have to hit the middle one with ur nade";
sent2 = "your not going to mcdonalds, you have had your chance with the cheeseburger";
PerformTesting = testCall(sent1, sent2, stanfordCoreNLP, stanfordCoreNLPSentiment);
Assert.assertTrue(PerformTesting < 1400.0);
sent1 = "How is everyone doing";
sent2 = "wieso ist dein Bein am Arsch. Eigdl hängt das runter";
PerformTesting = testCall(sent1, sent2, stanfordCoreNLP, stanfordCoreNLPSentiment);
Assert.assertTrue(PerformTesting < 400.0);
sent2 = "meshlem how does it feel to be russian";
PerformTesting = testCall(sent1, sent2, stanfordCoreNLP, stanfordCoreNLPSentiment);
Assert.assertTrue(PerformTesting < 700.0);
//new pairs
sent1 = "they dont buy kefvlar";
sent2 = "you have a database available again";
PerformTesting = testCall(sent1, sent2, stanfordCoreNLP, stanfordCoreNLPSentiment);
Assert.assertTrue(PerformTesting < 400.0);
sent1 = "because of lag?";
sent2 = "french tourit";
PerformTesting = testCall(sent1, sent2, stanfordCoreNLP, stanfordCoreNLPSentiment);
Assert.assertTrue(PerformTesting < 400.0);
sent2 = "Even when I'm working";
PerformTesting = testCall(sent1, sent2, stanfordCoreNLP, stanfordCoreNLPSentiment);
Assert.assertTrue(PerformTesting < 3500.0);
sent1 = "or need another restart";
sent2 = "Even when I'm working";
PerformTesting = testCall(sent1, sent2, stanfordCoreNLP, stanfordCoreNLPSentiment);
Assert.assertTrue(PerformTesting < 600.0);
sent2 = "french tourit";
PerformTesting = testCall(sent1, sent2, stanfordCoreNLP, stanfordCoreNLPSentiment);
Assert.assertTrue(PerformTesting < 400.0);
sent1 = "wow that clock works/";
sent2 = "didnt the bot like mako? what happened to that?";
PerformTesting = testCall(sent1, sent2, stanfordCoreNLP, stanfordCoreNLPSentiment);
Assert.assertTrue(PerformTesting < 400.0);
sent1 = "haHAA stop that cringe chat haHAA";
PerformTesting = testCall(sent1, sent2, stanfordCoreNLP, stanfordCoreNLPSentiment);
Assert.assertTrue(PerformTesting < 400.0);
sent1 = "like 1s down now i guess i will die";
sent2 = "monkaGIGA";
PerformTesting = testCall(sent1, sent2, stanfordCoreNLP, stanfordCoreNLPSentiment);
Assert.assertTrue(PerformTesting < 400.0);
sent1 = "what do you want";
sent2 = "admun extend";
PerformTesting = testCall(sent1, sent2, stanfordCoreNLP, stanfordCoreNLPSentiment);
Assert.assertTrue(PerformTesting > 100.0);
sent1 = "You are a one large bug";
sent2 = "omg you are right";
PerformTesting = testCall(sent1, sent2, stanfordCoreNLP, stanfordCoreNLPSentiment);
Assert.assertTrue(PerformTesting > 5900.0);
sent1 = "I'm not a mapper, wtf";
sent2 = "this map was made by wtf";
PerformTesting = testCall(sent1, sent2, stanfordCoreNLP, stanfordCoreNLPSentiment);
Assert.assertTrue(PerformTesting > 1400.0);
sent1 = "do you have plants thonk";
sent2 = "banana trees are plants";
PerformTesting = testCall(sent1, sent2, stanfordCoreNLP, stanfordCoreNLPSentiment);
Assert.assertTrue(PerformTesting > 1400.0);
sent2 = "fucking alcolo";
PerformTesting = testCall(sent1, sent2, stanfordCoreNLP, stanfordCoreNLPSentiment);
Assert.assertTrue(PerformTesting < 600.0);
sent2 = "qual arma e 382012909942734858";
PerformTesting = testCall(sent1, sent2, stanfordCoreNLP, stanfordCoreNLPSentiment);
Assert.assertTrue(PerformTesting < -400.0);
sent2 = "wlenny on gamebanana";
PerformTesting = testCall(sent1, sent2, stanfordCoreNLP, stanfordCoreNLPSentiment);
Assert.assertTrue(PerformTesting < 2500.0);
sent1 = "And how was it? :wlenny~1:";
sent2 = "at lvl 1 avad is 140 cd";
PerformTesting = testCall(sent1, sent2, stanfordCoreNLP, stanfordCoreNLPSentiment);
Assert.assertTrue(PerformTesting < 400.0);
sent1 = "wtf? :monkaS~2:";
sent2 = "think thats it kangaroo next";
PerformTesting = testCall(sent1, sent2, stanfordCoreNLP, stanfordCoreNLPSentiment);
Assert.assertTrue(PerformTesting < -400.0);
sent1 = "yurope";
sent2 = "?? ??????? ??? ??";
PerformTesting = testCall(sent1, sent2, stanfordCoreNLP, stanfordCoreNLPSentiment);
Assert.assertTrue(PerformTesting < -2400.0);
sent1 = "fuck";
PerformTesting = testCall(sent1, sent2, stanfordCoreNLP, stanfordCoreNLPSentiment);
Assert.assertTrue(PerformTesting < -2400.0);
}
}