refactoring the flow, still missing some caching for sentimentAnalyzerTest

This commit is contained in:
christian 2021-08-22 17:10:29 +02:00
parent 5568a1dc75
commit 78532929ae
10 changed files with 1053 additions and 1762 deletions

View File

@ -5,69 +5,39 @@
*/ */
package DataLayer; package DataLayer;
import FunctionLayer.SimilarityMatrix;
import FunctionLayer.CustomError;
import com.google.common.collect.MapMaker;
import java.sql.Connection; import java.sql.Connection;
import java.sql.PreparedStatement; import java.sql.PreparedStatement;
import java.sql.ResultSet; import java.sql.ResultSet;
import java.sql.SQLException; import java.sql.SQLException;
import java.sql.Statement; import java.util.*;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.concurrent.ConcurrentMap;
import java.util.logging.Level; import java.util.logging.Level;
import java.util.logging.Logger; import java.util.logging.Logger;
/** /**
*
* @author install1 * @author install1
*/ */
public class DataMapper { public class DataMapper {
public static void createTables() throws CustomError { public static ArrayList<String> getAllStrings() throws SQLException {
Connection l_cCon = null;
PreparedStatement l_pStatement = null;
ResultSet l_rsSearch = null;
try {
l_cCon = DBCPDataSource.getConnection();
String l_sSQL = "CREATE TABLE IF NOT EXISTS `ArtificialAutism`.`Sentences` (`Strings` text NOT NULL)";
l_pStatement = l_cCon.prepareStatement(l_sSQL);
l_pStatement.execute();
} catch (SQLException ex) {
throw new CustomError("failed in DataMapper " + ex.getMessage());
} finally {
CloseConnections(l_pStatement, l_rsSearch, l_cCon);
}
}
public static ConcurrentMap<Integer, String> getAllStrings() throws CustomError {
ConcurrentMap<Integer, String> allStrings = new MapMaker().concurrencyLevel(2).makeMap();
Connection l_cCon = null; Connection l_cCon = null;
PreparedStatement l_pStatement = null; PreparedStatement l_pStatement = null;
ResultSet l_rsSearch = null; ResultSet l_rsSearch = null;
ArrayList<String> arrayListStr = new ArrayList();
try { try {
l_cCon = DBCPDataSource.getConnection(); l_cCon = DBCPDataSource.getConnection();
String l_sSQL = "SELECT * FROM `Sentences`"; String l_sSQL = "SELECT * FROM `Sentences`";
l_pStatement = l_cCon.prepareStatement(l_sSQL); l_pStatement = l_cCon.prepareStatement(l_sSQL);
l_rsSearch = l_pStatement.executeQuery(); l_rsSearch = l_pStatement.executeQuery();
int ij = 0;
while (l_rsSearch.next()) { while (l_rsSearch.next()) {
allStrings.put(ij, l_rsSearch.getString(1)); arrayListStr.add(l_rsSearch.getString(1));
ij++;
} }
} catch (SQLException ex) {
throw new CustomError("failed in DataMapper " + ex.getMessage());
} finally { } finally {
CloseConnections(l_pStatement, l_rsSearch, l_cCon); CloseConnections(l_pStatement, l_rsSearch, l_cCon);
} }
return allStrings; return arrayListStr;
} }
public static void InsertMYSQLStrings(ConcurrentMap<Integer, String> str) throws CustomError { public static void InsertMYSQLStrings(ArrayList<String> str) throws SQLException {
Connection l_cCon = null; Connection l_cCon = null;
PreparedStatement l_pStatement = null; PreparedStatement l_pStatement = null;
ResultSet l_rsSearch = null; ResultSet l_rsSearch = null;
@ -75,14 +45,10 @@ public class DataMapper {
try { try {
l_cCon = DBCPDataSource.getConnection(); l_cCon = DBCPDataSource.getConnection();
l_pStatement = l_cCon.prepareStatement(l_sSQL); l_pStatement = l_cCon.prepareStatement(l_sSQL);
for (String str1 : str.values()) { for (String str1 : str) {
//System.out.println("adding str1: " + str1 + "\n");
l_pStatement.setString(1, str1); l_pStatement.setString(1, str1);
l_pStatement.addBatch(); l_pStatement.execute();
} }
l_pStatement.executeBatch();
} catch (SQLException ex) {
throw new CustomError("failed in DataMapper " + ex.getMessage());
} finally { } finally {
CloseConnections(l_pStatement, l_rsSearch, l_cCon); CloseConnections(l_pStatement, l_rsSearch, l_cCon);
} }

View File

@ -1,17 +0,0 @@
/*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
package FunctionLayer;
/**
*
* @author install1
*/
public class CustomError extends Exception {
public CustomError(String msg) {
super(msg);
}
}

View File

@ -7,32 +7,29 @@ package FunctionLayer
import DataLayer.DataMapper import DataLayer.DataMapper
import FunctionLayer.StanfordParser.SentimentAnalyzerTest import FunctionLayer.StanfordParser.SentimentAnalyzerTest
import FunctionLayer.StanfordParser.SentimentValueCache
import com.google.common.base.Stopwatch import com.google.common.base.Stopwatch
import com.google.common.collect.MapMaker
import edu.stanford.nlp.ie.AbstractSequenceClassifier import edu.stanford.nlp.ie.AbstractSequenceClassifier
import edu.stanford.nlp.ie.crf.CRFClassifier import edu.stanford.nlp.ie.crf.CRFClassifier
import edu.stanford.nlp.ling.CoreAnnotations
import edu.stanford.nlp.ling.CoreLabel import edu.stanford.nlp.ling.CoreLabel
import edu.stanford.nlp.ling.TaggedWord
import edu.stanford.nlp.parser.lexparser.LexicalizedParser import edu.stanford.nlp.parser.lexparser.LexicalizedParser
import edu.stanford.nlp.pipeline.Annotation import edu.stanford.nlp.pipeline.Annotation
import edu.stanford.nlp.pipeline.CoreDocument import edu.stanford.nlp.pipeline.CoreDocument
import edu.stanford.nlp.pipeline.StanfordCoreNLP import edu.stanford.nlp.pipeline.StanfordCoreNLP
import edu.stanford.nlp.tagger.maxent.MaxentTagger import edu.stanford.nlp.tagger.maxent.MaxentTagger
import edu.stanford.nlp.trees.GrammaticalStructureFactory import edu.stanford.nlp.trees.GrammaticalStructureFactory
import edu.stanford.nlp.trees.TreebankLanguagePack import edu.stanford.nlp.trees.Tree
import kotlinx.coroutines.* import edu.stanford.nlp.util.CoreMap
import java.io.IOException import kotlinx.coroutines.Dispatchers
import java.io.UnsupportedEncodingException import kotlinx.coroutines.launch
import java.net.* import kotlinx.coroutines.runBlocking
import java.sql.SQLException import kotlinx.coroutines.yield
import java.util.* import java.util.*
import java.util.concurrent.ConcurrentMap
import java.util.concurrent.CountDownLatch
import java.util.concurrent.TimeUnit import java.util.concurrent.TimeUnit
import java.util.function.Consumer import java.util.regex.Pattern
import java.util.logging.Level
import java.util.logging.Logger
import kotlin.collections.ArrayList import kotlin.collections.ArrayList
import kotlin.collections.HashMap
/** /**
@ -41,338 +38,266 @@ import kotlin.collections.ArrayList
*/ */
public class Datahandler { public class Datahandler {
private val stopwatch: Stopwatch private val stopwatch: Stopwatch
fun shiftReduceParserInitiate() = runBlocking { private val EXPIRE_TIME_IN_MINUTES = TimeUnit.MINUTES.convert(30, TimeUnit.MINUTES)
val cdl = CountDownLatch(2) private var pipelineAnnotationCache: HashMap<String, Annotation>
coroutineScope { private var pipelineSentimentAnnotationCache = HashMap<String, Annotation>()
val job = launch(Dispatchers.Default) { private var coreDocumentAnnotationCache: HashMap<String, CoreDocument>
propsSentiment.setProperty("parse.model", lexParserEnglishRNN) private var jmweAnnotationCache = HashMap<String, Annotation>()
propsSentiment.setProperty("sentiment.model", sentimentModel) private val stringCache = ArrayList<String>()
propsSentiment.setProperty("parse.maxlen", "90") private val nerModel = "edu/stanford/nlp/models/ner/english.all.3class.caseless.distsim.crf.ser.gz"
propsSentiment.setProperty("threads", "5") private var tagger: MaxentTagger = MaxentTagger()
propsSentiment.setProperty("pos.maxlen", "90") private var gsf: GrammaticalStructureFactory
propsSentiment.setProperty("tokenize.maxlen", "90") private var classifier: AbstractSequenceClassifier<CoreLabel>
propsSentiment.setProperty("ssplit.maxlen", "90")
propsSentiment.setProperty("annotators", "tokenize,ssplit,pos,parse,sentiment,lemma,stopword") //coref too expensive memorywise //SentimentAnalyzer Hashmaps
propsSentiment.setProperty("customAnnotatorClass.stopword", "FunctionLayer.StopwordAnnotator") private var tokenizeCountingHashMap: HashMap<String, Int> = HashMap()
propsSentiment.setProperty(StopwordAnnotator.STOPWORDS_LIST, customStopWordList) private var taggedWordListHashMap: HashMap<String, List<List<TaggedWord>>> = HashMap()
propsSentiment.setProperty("tokenize.options", "untokenizable=firstKeep") private var retrieveTGWListHashMap: HashMap<String, java.util.ArrayList<String>> =
pipelineSentiment = StanfordCoreNLP(propsSentiment) HashMap()
tagger = MaxentTagger(taggerPath) private var sentences1HashMap: HashMap<String, List<CoreMap>> = HashMap()
lp = LexicalizedParser.loadModel(lexParserEnglishRNN, *options) private var sentencesSentimentHashMap: HashMap<String, List<CoreMap>> = HashMap()
tlp = lp.getOp().langpack() private var trees1HashMap: HashMap<String, java.util.ArrayList<Tree>> = HashMap()
gsf = tlp.grammaticalStructureFactory()
cdl.countDown() constructor() {
yield() stopwatch = Stopwatch.createUnstarted()
} jmweAnnotationCache = HashMap<String, Annotation>()
job.join() pipelineAnnotationCache = HashMap<String, Annotation>()
} pipelineSentimentAnnotationCache = HashMap<String, Annotation>()
coroutineScope { coreDocumentAnnotationCache = HashMap<String, CoreDocument>()
val job = launch(Dispatchers.Default) { gsf = initiateGrammaticalStructureFactory()
try { classifier = CRFClassifier.getClassifierNoExceptions(nerModel)
classifier = CRFClassifier.getClassifierNoExceptions(nerModel) }
} catch (ex: ClassCastException) {
Logger.getLogger(Datahandler::class.java.name).log(Level.SEVERE, null, ex) fun initiateGrammaticalStructureFactory(): GrammaticalStructureFactory {
} val options = arrayOf("-maxLength", "100")
cdl.countDown() val lexParserEnglishRNN = "edu/stanford/nlp/models/lexparser/englishRNN.ser.gz"
yield() val lp = LexicalizedParser.loadModel(lexParserEnglishRNN, *options)
} val tlp = lp.getOp().langpack()
job.join() return tlp.grammaticalStructureFactory()
} }
try {
cdl.await() public fun pipeLineSetUp(): StanfordCoreNLP {
} catch (ex: InterruptedException) { val props = Properties()
//System.out.println("cdl await interrupted: " + ex.getLocalizedMessage() + "\n"); val shiftReduceParserPath = "edu/stanford/nlp/models/srparser/englishSR.ser.gz"
} val nerModel2 = "edu/stanford/nlp/models/ner/english.conll.4class.caseless.distsim.crf.ser.gz"
val nerModel3 = "edu/stanford/nlp/models/ner/english.muc.7class.caseless.distsim.crf.ser.gz"
props.setProperty("annotators", "tokenize,ssplit,pos,lemma,ner,parse")
props.setProperty("parse.model", shiftReduceParserPath)
props.setProperty("parse.maxlen", "90")
props.setProperty("parse.binaryTrees", "true")
props.setProperty("threads", "5")
props.setProperty("pos.maxlen", "90")
props.setProperty("tokenize.maxlen", "90")
props.setProperty("ssplit.maxlen", "90")
props.setProperty("lemma.maxlen", "90")
props.setProperty("ner.model", "$nerModel,$nerModel2,$nerModel3")
props.setProperty("ner.combinationMode", "HIGH_RECALL")
props.setProperty("regexner.ignorecase", "true")
props.setProperty("ner.fine.regexner.ignorecase", "true")
props.setProperty("tokenize.options", "untokenizable=firstKeep")
return StanfordCoreNLP(props)
}
fun shiftReduceParserInitiate(): StanfordCoreNLP {
val propsSentiment = Properties()
val lexParserEnglishRNN = "edu/stanford/nlp/models/lexparser/englishRNN.ser.gz"
val sentimentModel = "edu/stanford/nlp/models/sentiment/sentiment.ser.gz"
val taggerPath = "edu/stanford/nlp/models/pos-tagger/english-left3words/english-left3words-distsim.tagger"
val customStopWordList = "start,starts,period,periods,a,an,and,are,as,at,be,but,by,for,if,in,into,is,it,no,not,of," +
"on,or,such,that,the,their,then,there,these,they,this,to,was,will,with"
propsSentiment.setProperty("parse.model", lexParserEnglishRNN)
propsSentiment.setProperty("sentiment.model", sentimentModel)
propsSentiment.setProperty("parse.maxlen", "90")
propsSentiment.setProperty("threads", "5")
propsSentiment.setProperty("pos.maxlen", "90")
propsSentiment.setProperty("tokenize.maxlen", "90")
propsSentiment.setProperty("ssplit.maxlen", "90")
propsSentiment.setProperty("annotators", "tokenize,ssplit,pos,parse,sentiment,lemma,stopword") //coref too expensive memorywise
propsSentiment.setProperty("customAnnotatorClass.stopword", "FunctionLayer.StopwordAnnotator")
propsSentiment.setProperty(StopwordAnnotator.STOPWORDS_LIST, customStopWordList)
propsSentiment.setProperty("tokenize.options", "untokenizable=firstKeep")
tagger = MaxentTagger(taggerPath)
println("finished shiftReduceParserInitiate\n") println("finished shiftReduceParserInitiate\n")
return StanfordCoreNLP(propsSentiment)
} }
fun updateStringCache() { fun updateStringCache() {
try { if (stopwatch.elapsed(TimeUnit.MINUTES) >= EXPIRE_TIME_IN_MINUTES || !stopwatch.isRunning) {
checkIfUpdateStrings()
} catch (ex: CustomError) {
Logger.getLogger(Datahandler::class.java.name).log(Level.SEVERE, null, ex)
}
}
@get:Throws(SQLException::class, IOException::class, CustomError::class)
private val cache: Map<Int, String?>
private get() = DataMapper.getAllStrings()
@Throws(SQLException::class, IOException::class)
fun initiateMYSQL() {
try {
DataMapper.createTables()
stringCache.putAll(cache)
} catch (ex: CustomError) {
Logger.getLogger(Datahandler::class.java
.name).log(Level.SEVERE, null, ex)
}
}
fun instantiateAnnotationMapJMWE() {
if (!stringCache.isEmpty()) {
val jmweAnnotation = PipelineJMWESingleton.INSTANCE.getJMWEAnnotation(stringCache.values)
for ((key, value) in jmweAnnotation) {
jmweAnnotationCache[key] = value
}
}
}
fun instantiateAnnotationMap() = runBlocking {
if (!stringCache.isEmpty()) {
val Annotationspipeline = MapMaker().concurrencyLevel(5).makeMap<String?, Annotation>()
val AnnotationspipelineSentiment = MapMaker().concurrencyLevel(5).makeMap<String?, Annotation>()
coroutineScope {
for (str in stringCache.values) {
val job = launch(Dispatchers.Default) {
val strAnno = Annotation(str)
strAnno.compact()
Annotationspipeline[str] = strAnno
val strAnno2 = Annotation(str)
strAnno2.compact()
AnnotationspipelineSentiment[str] = strAnno2
yield()
}
job.join();
}
}
System.out.println("PRE getMultipleCoreDocumentsWaySuggestion lag")
val coreDocumentpipelineMap = getMultipleCoreDocumentsWaySuggestion(stringCache.values, pipeline)
//System.out.println("post getMultipleCoreDocumentsWaySuggestion instantiateAnnotationMap lag")
pipeline.annotate(Annotationspipeline.values, 4)
pipelineSentiment!!.annotate(AnnotationspipelineSentiment.values, 4)
//System.out.println("reached second job instantiateAnnotationMap lag");
coroutineScope {
for (i in Annotationspipeline.entries) {
val job = launch(Dispatchers.Default) {
i.value.compact()
pipelineAnnotationCache[i.key] = i.value
yield()
}
job.join();
}
for (i in AnnotationspipelineSentiment.entries) {
val job = launch(Dispatchers.Default) {
i.value.compact()
pipelineSentimentAnnotationCache[i.key] = i.value
yield()
}
job.join();
}
}
System.out.println("post Annotationspipeline lag")
for (i in coreDocumentpipelineMap.entries) {
coreDocumentAnnotationCache[i.key] = i.value
}
}
}
private fun futuresReturnOverallEvaluation(similarityMatrixes: List<SimilarityMatrix?>): ConcurrentMap<Int?, String?> {
var strmapreturn = MapMaker().concurrencyLevel(6).makeMap<Int?, String?>()
if (!similarityMatrixes.isEmpty()) {
for (SMX in similarityMatrixes) {
strmapreturn = addSMXToMapReturn(strmapreturn, SMX)
}
}
return strmapreturn
}
private fun addSMXToMapReturn(strmapreturn: ConcurrentMap<Int?, String?>, SMX: SimilarityMatrix?): ConcurrentMap<Int?, String?> {
if (!strmapreturn.containsValue(SMX!!.primaryString)) {
strmapreturn[strmapreturn.size] = SMX.primaryString
val transmittedStr = SMX.secondaryString
val cacheValue1 = SMX.cacheValue1
val cacheValue2 = SMX.cacheValue2
if (cacheValue1 != null && !sentimentCachingMap.keys.contains(SMX.primaryString)) {
sentimentCachingMap[SMX.secondaryString] = SMX.cacheValue1
}
if (cacheValue2 != null && !sentimentCachingMap.keys.contains(transmittedStr)) {
sentimentCachingMap[transmittedStr] = SMX.cacheValue2
}
}
return strmapreturn
}
private fun checkForNullValues(index: String?): Boolean {
if (jmweAnnotationCache[index] != null && pipelineAnnotationCache[index] != null
&& pipelineSentimentAnnotationCache[index] != null &&
coreDocumentAnnotationCache[index] != null) {
return true;
}
return false;
}
private suspend fun StrComparringNoSentenceRelationMap(strCacheLocal: ConcurrentMap<Int, String?>, strCollection: Collection<String?>, localJMWEMap: ConcurrentMap<String, Annotation>,
localPipelineAnnotation: ConcurrentMap<String?, Annotation>, localPipelineSentimentAnnotation: ConcurrentMap<String?, Annotation>,
localCoreDocumentMap: ConcurrentMap<String, CoreDocument>): List<SimilarityMatrix?> {
val distance_requirement = 10500.0
val prefix_size = 150
val smxReturnList: ArrayList<SimilarityMatrix> = ArrayList<SimilarityMatrix>()
coroutineScope {
for (j in strCollection) {
val job = launch(Dispatchers.Default) {
for (i in strCollection) {
if (j != i) {
val SMXInit = SimilarityMatrix(j, i)
val sentimentCacheStr1 = sentimentCachingMap.getOrDefault(i, null)
val sentimentCacheStr = sentimentCachingMap.getOrDefault(j, null)
var sentimentAnalyzerTest: SentimentAnalyzerTest? = null
val checkedVal: Boolean = checkForNullValues(i)
if (stringCache.size < prefix_size || !checkedVal) {
sentimentAnalyzerTest = SentimentAnalyzerTest(j, i, SMXInit,
localJMWEMap[j], localJMWEMap[i], localPipelineAnnotation[j],
localPipelineAnnotation[i], localPipelineSentimentAnnotation[j],
localPipelineSentimentAnnotation[i], localCoreDocumentMap[j], localCoreDocumentMap[i],
sentimentCacheStr, sentimentCacheStr1)
} else {
sentimentAnalyzerTest = SentimentAnalyzerTest(j, i, SMXInit,
localJMWEMap[j], jmweAnnotationCache[i], localPipelineAnnotation[j],
pipelineAnnotationCache[i], localPipelineSentimentAnnotation[j],
pipelineSentimentAnnotationCache[i], localCoreDocumentMap[j],
coreDocumentAnnotationCache[i], sentimentCacheStr, sentimentCacheStr1)
}
val call = sentimentAnalyzerTest.callSMX();
if (call != null && call.distance > distance_requirement) {
smxReturnList.add(call)
}
}
}
yield()
}
job.join()
}
}
return smxReturnList
}
private suspend fun stringIteratorComparator(strmap: ConcurrentMap<Int?, String?>,
strCacheLocal: ConcurrentMap<Int, String?>, localJMWEMap: ConcurrentMap<String, Annotation>,
localPipelineAnnotation: ConcurrentMap<String?, Annotation>, localPipelineSentimentAnnotation: ConcurrentMap<String?, Annotation>,
localCoreDocumentMap: ConcurrentMap<String, CoreDocument>): ConcurrentMap<Int?, String?> {
//System.out.println("strmap siuze: " + strmap.size());
val ComparringNoSentenceRelationMap: List<SimilarityMatrix> = StrComparringNoSentenceRelationMap(strCacheLocal, strmap.values,
localJMWEMap, localPipelineAnnotation, localPipelineSentimentAnnotation, localCoreDocumentMap) as List<SimilarityMatrix>
Collections.sort(ComparringNoSentenceRelationMap, Comparator<SimilarityMatrix> { e1: SimilarityMatrix, e2: SimilarityMatrix -> e1.primaryString.compareTo(e2.primaryString) })
System.out.println("ComparringNoSentenceRelationMap size: " + ComparringNoSentenceRelationMap.size);
return futuresReturnOverallEvaluation(ComparringNoSentenceRelationMap)
}
private suspend fun removeNonSensicalStrings(strmap: ConcurrentMap<Int?, String?>): ConcurrentMap<Int?, String?> {
val strCacheLocal = stringCache
val localJMWEMap = getMultipleJMWEAnnotation(strmap.values)
val localPipelineAnnotation = getMultiplePipelineAnnotation(strmap.values)
System.out.println("str size post getMultiplePipelineAnnotation: " + strmap.size)
val localPipelineSentimentAnnotation = getMultiplePipelineSentimentAnnotation(strmap.values)
val localCoreDocumentMap = getMultipleCoreDocumentsWaySuggestion(strmap.values, pipeline)
System.out.println("strmap size pre stringIteratorComparator: " + strmap.size)
return stringIteratorComparator(strmap, strCacheLocal, localJMWEMap, localPipelineAnnotation, localPipelineSentimentAnnotation, localCoreDocumentMap)
}
fun checkIfUpdateStrings() = runBlocking {
if (stopwatch.elapsed(TimeUnit.SECONDS) >= EXPIRE_TIME_IN_SECONDS || !stopwatch.isRunning) {
var str = MessageResponseHandler.getStr()
println("str size: " + str.size)
str = filterContent(str)
System.out.println("str size post filtercontent: " + str.size)
str = removeNonSensicalStrings(str)
System.out.println("removeNonSensicalStrings str size POST: " + str.size + "\n");
str = annotationCacheUpdate(str)
println("""
annotationCacheUpdate str size POST: ${str.size}
""".trimIndent())
val strf = str
if (!stringCache.isEmpty()) {
coroutineScope {
val job = launch(Dispatchers.IO) {
try {
DataMapper.InsertMYSQLStrings(strf)
} catch (ex: CustomError) {
Logger.getLogger(Datahandler::class.java
.name).log(Level.SEVERE, null, ex)
}
MessageResponseHandler.setStr(MapMaker().concurrencyLevel(6).makeMap())
yield()
}
job.join()
}
} else {
try {
DataMapper.InsertMYSQLStrings(strf)
} catch (ex: CustomError) {
Logger.getLogger(Datahandler::class.java
.name).log(Level.SEVERE, null, ex)
}
MessageResponseHandler.setStr(MapMaker().concurrencyLevel(6).makeMap())
}
if (!stopwatch.isRunning) { if (!stopwatch.isRunning) {
stopwatch.start() stopwatch.start()
} else { } else {
stopwatch.reset() stopwatch.reset()
} }
stringCache.sortWith(Comparator.comparingInt(String::length).reversed());
System.out.println("pre InsertMYSQLStrings")
DataMapper.InsertMYSQLStrings(stringCache)
} }
} }
fun initiateMYSQL() {
stringCache.addAll(DataMapper.getAllStrings())
}
private fun trimString(str: String): String { private fun trimString(str: String): String {
var str = str var message = str.trim { it <= ' ' }
str = str.trim { it <= ' ' } if (message.startsWith("<@")) {
if (str.startsWith("<@")) { message = message.substring(message.indexOf("> ") + 2)
str = str.substring(str.indexOf("> ") + 2)
} }
return str if (!message.isEmpty()) {
message = message.replace("@", "")
if (message.contains("<>")) {
message = message.substring(message.indexOf(">"))
}
if (message.startsWith("[ *")) {
message = message.substring(message.indexOf("]"))
}
}
return message
} }
private suspend fun getResponseFutures(strF: String): String { private fun createStrAnnotation(str: String, stanfordCoreNLP: StanfordCoreNLP, sentimentBool: Boolean) {
val values_copy: List<String?> = ArrayList(stringCache.values) val strAnno2 = Annotation(str)
Collections.sort<String>(values_copy) { o1, o2 -> o2.length - o1.length } strAnno2.compact()
var preRelationUserCounters = -155000.0 stanfordCoreNLP.annotate(strAnno2)
val concurrentRelations: MutableList<String?> = arrayListOf() if (sentimentBool) {
val SB = StringBuilder() pipelineSentimentAnnotationCache.put(str, strAnno2)
coroutineScope { } else {
for (str1 in values_copy) { pipelineAnnotationCache.put(str, strAnno2)
if (strF != str1) { }
val job = launch(Dispatchers.Default) { }
var sentimentCacheStr1 = sentimentCachingMap.getOrDefault(str1, null)
var sentimentAnalyzerTest = SentimentAnalyzerTest(strF, str1, SimilarityMatrix(strF, str1),
strAnnoJMWE, jmweAnnotationCache[str1], strAnno,
pipelineAnnotationCache[str1], strAnnoSentiment,
pipelineSentimentAnnotationCache[str1], coreDoc, coreDocumentAnnotationCache[str1],
null, sentimentCacheStr1)
var getSMX: SimilarityMatrix = sentimentAnalyzerTest.callSMX() private fun getResponseFutures(strF: String, stanfordCoreNLP: StanfordCoreNLP, stanfordCoreNLPSentiment: StanfordCoreNLP): String {
if (getSMX != null) { val strAnno: Annotation = Annotation(strF)
val scoreRelationLastUserMsg = getSMX.distance strAnno.compact()
if (scoreRelationLastUserMsg > preRelationUserCounters) { stanfordCoreNLP.annotate(strAnno)
preRelationUserCounters = scoreRelationLastUserMsg
concurrentRelations.add(getSMX.secondaryString) val strAnnoSentiment: Annotation = Annotation(strF)
} strAnnoSentiment.compact()
} stanfordCoreNLPSentiment.annotate(strAnnoSentiment)
yield()
} val coreDocument = CoreDocument(strF)
job.join() stanfordCoreNLP.annotate(coreDocument)
val values_copy: List<String> = ArrayList(stringCache)
var preRelationUserCounters = -155000.0
val concurrentRelations: MutableList<String> = arrayListOf()
val SB = StringBuilder()
var jmweAnnotationF = PipelineJMWESingleton.INSTANCE.getJMWEAnnotation(strF)
var tokenizeCountingF: Int? = null
var taggedWordListF: List<List<TaggedWord>>? = null
var retrieveTGWListF: java.util.ArrayList<String>? = null
var sentencesF: List<CoreMap>? = null
var sentencesSentimentF: List<CoreMap>? = null
var coreMaps1: List<CoreMap> = jmweAnnotationF.get(CoreAnnotations.SentencesAnnotation::class.java)
var treesF: java.util.ArrayList<Tree>? = null
for (str1 in values_copy) {
if (strF != str1) {
val annotation2 = pipelineSentimentAnnotationCache.getOrDefault(str1, null)
val annotation4 = pipelineAnnotationCache.getOrDefault(str1, null)
val coreDocument1 = coreDocumentAnnotationCache.getOrDefault(str1, null)
var jmweAnnotation = jmweAnnotationCache.getOrDefault(str1, null)
if (annotation2 == null) {
createStrAnnotation(str1, stanfordCoreNLPSentiment, true)
}
if (annotation4 == null) {
createStrAnnotation(str1, stanfordCoreNLP, false)
}
if (coreDocument1 == null) {
getCoreDocumentsSuggested(stanfordCoreNLP, str1)
}
if (jmweAnnotation == null) {
getJMWEAnnotation(str1)
jmweAnnotation = jmweAnnotationCache.get(str1)
}
val tokenizeCounting: Int? = tokenizeCountingHashMap.getOrDefault(str1, null)
val taggedWordList1: List<List<TaggedWord>>? = taggedWordListHashMap.getOrDefault(str1, null)
val retrieveTGWList1: java.util.ArrayList<String>? = retrieveTGWListHashMap.getOrDefault(str1, null)
val sentence1: List<CoreMap>? = sentences1HashMap.getOrDefault(str1, null)
val sentenceSentiment1: List<CoreMap>? = sentencesSentimentHashMap.getOrDefault(str1, null)
val trees1 = trees1HashMap.getOrDefault(str1, null)
var coreMaps2: List<CoreMap> = listOf()
if (jmweAnnotation != null) {
coreMaps2 = jmweAnnotation.get(CoreAnnotations.SentencesAnnotation::class.java)
}
var SMX = SentimentAnalyzerTest(strF, str1, SimilarityMatrix(strF, str1),
coreMaps1, coreMaps2, strAnno,
pipelineAnnotationCache[str1], strAnnoSentiment,
pipelineSentimentAnnotationCache[str1], coreDocument, coreDocumentAnnotationCache[str1],
tagger, gsf, classifier, tokenizeCounting, tokenizeCountingF,
taggedWordListF, taggedWordList1, retrieveTGWListF, retrieveTGWList1,
sentencesF, sentence1, sentencesSentimentF, sentenceSentiment1, treesF, trees1)
if (tokenizeCounting == null) {
val tokenizeCounting1 = SMX.getTokenizeCounting();
tokenizeCountingHashMap.put(str1, tokenizeCounting1)
}
if (taggedWordList1 == null) {
val taggedWordList1Local = SMX.getTaggedWordList1();
taggedWordListHashMap.put(str1, taggedWordList1Local)
}
if (tokenizeCountingF == null) {
val tokenizeCountingF1 = SMX.getTokenizeCountingF();
tokenizeCountingF = tokenizeCountingF1;
}
if (taggedWordListF == null) {
val taggedWordListF1 = SMX.getTaggedWordListF();
taggedWordListF = taggedWordListF1;
}
if (retrieveTGWListF == null) {
val retrieveTGWListF1 = SMX.getRetrieveTGWListF();
retrieveTGWListF = retrieveTGWListF1;
}
if (retrieveTGWList1 == null) {
val retrieveTGWList11 = SMX.getRetrieveTGWList1();
retrieveTGWListHashMap.put(str1, retrieveTGWList11);
}
if (sentencesF == null) {
val sentencesF1 = SMX.getSentencesF();
sentencesF = sentencesF1;
}
if (sentence1 == null) {
val sentences1 = SMX.getSentences1();
sentences1HashMap.put(str1, sentences1)
}
if (sentencesSentimentF == null) {
val sentencesSentimentF1 = SMX.getSentencesSentimentF();
sentencesSentimentF = sentencesSentimentF1;
}
if (sentenceSentiment1 == null) {
val sentencesSentiment1 = SMX.getSentencesSentiment1();
sentencesSentimentHashMap.put(str1, sentencesSentiment1);
}
if (treesF == null) {
val treesF1 = SMX.getTreesF();
treesF = treesF1;
}
if (trees1 == null) {
val trees11 = SMX.getTrees1();
trees1HashMap.put(str1, trees11)
}
var getSMX: SimilarityMatrix = SMX.callSMX()
val scoreRelationLastUserMsg = getSMX.distance
if (scoreRelationLastUserMsg > preRelationUserCounters) {
preRelationUserCounters = scoreRelationLastUserMsg
concurrentRelations.add(getSMX.secondaryString)
} }
} }
val randomLenghtPermit = strF.length * (Math.random() * Math.random() * Math.random() * (Math.random() * 10)) }
Collections.reverse(concurrentRelations) val cacheRequirement = 6500;
if (!concurrentRelations.isEmpty()) { if (preRelationUserCounters > cacheRequirement && !stringCache.contains(strF) && filterContent(strF)) {
val firstRelation = concurrentRelations[0] stringCache.add(strF)
}
val job1 = launch(Dispatchers.Default) { val randomLenghtPermit = strF.length * (Math.random() * Math.random() * Math.random() * (Math.random() * 10))
for (secondaryRelation in concurrentRelations) { Collections.reverse(concurrentRelations)
if (SB.toString().length > randomLenghtPermit && !SB.toString().isEmpty()) { if (!concurrentRelations.isEmpty()) {
break for (secondaryRelation in concurrentRelations) {
} if (SB.toString().length > randomLenghtPermit && !SB.toString().isEmpty()) {
val append = appendToString(firstRelation, secondaryRelation) break
if (append) {
SB.append(secondaryRelation).append(" ")
}
}
yield()
} }
job1.join() SB.append(secondaryRelation).append(" ")
} }
} }
if (SB.toString().isEmpty()) { if (SB.toString().isEmpty()) {
@ -381,299 +306,79 @@ public class Datahandler {
return SB.toString() return SB.toString()
} }
private fun appendToString(firstRelation: String?, secondaryRelation: String?): Boolean { private fun getJMWEAnnotation(str1: String) {
if (firstRelation == secondaryRelation) { val jmweAnnotation = PipelineJMWESingleton.INSTANCE.getJMWEAnnotation(str1)
return true jmweAnnotationCache.put(str1, jmweAnnotation)
}
val scoreRelationStrF = getScoreRelationStrF(firstRelation, secondaryRelation)
return if (scoreRelationStrF > 1900) {
true
} else false
} }
fun getResponseMsg(str: String): String { fun getResponseMsg(str: String, personName: String, stanfordCoreNLP: StanfordCoreNLP,
val responseFutures: String stanfordCoreNLPSentiment: StanfordCoreNLP, ingameResponse: Boolean): String {
var responseFutures: String = ""
runBlocking { runBlocking {
val strF = trimString(str) val launch1 = launch(Dispatchers.Default) {
getSingularAnnotation(strF) var strF = trimString(str)
responseFutures = getResponseFutures(strF) responseFutures = getResponseFutures(strF, stanfordCoreNLP, stanfordCoreNLPSentiment)
if (!ingameResponse) {
responseFutures = checkPersonPresentInSentence(personName, responseFutures, strF, stanfordCoreNLP,
stanfordCoreNLPSentiment)
}
yield()
}
launch1.join()
} }
return responseFutures return responseFutures
} }
suspend fun getSingularAnnotation(str: String?) { private fun checkPersonPresentInSentence(personName: String, responseMsg: String, userLastMessage: String,
coroutineScope { stanfordCoreNLP: StanfordCoreNLP,
val job = launch(Dispatchers.Default) { stanfordCoreNLPSentiment: StanfordCoreNLP): String {
strAnno = Annotation(str) try {
strAnno!!.compact() val pipelineCoreDcoument = CoreDocument(responseMsg)
pipeline.annotate(strAnno) val pipelineCoreDcoumentLastMsg = CoreDocument(userLastMessage)
yield() stanfordCoreNLP.annotate(pipelineCoreDcoument)
} stanfordCoreNLPSentiment.annotate(pipelineCoreDcoumentLastMsg)
job.join() val regex = "(.*?\\d){10,}"
for (em in pipelineCoreDcoument.entityMentions()) {
val job1 = launch(Dispatchers.Default) { val entityType = em.entityType()
strAnnoSentiment = Annotation(str) if (entityType == "PERSON") {
strAnnoSentiment!!.compact() var str = responseMsg
pipelineSentiment!!.annotate(strAnnoSentiment) val emText = em.text()
val notactualList: MutableList<String?> = arrayListOf() val pattern = Pattern.compile(regex)
notactualList.add(str) val matcher = pattern.matcher(personName)
val jmweAnnotation = PipelineJMWESingleton.INSTANCE.getJMWEAnnotation(notactualList) val isMatched = matcher.matches()
strAnnoJMWE = jmweAnnotation.values.iterator().next() if (emText != personName && !isMatched) {
strAnnoJMWE.compact() for (emLastMsg in pipelineCoreDcoumentLastMsg.entityMentions()) {
yield() if (emText != emLastMsg.text() && !Character.isDigit(emLastMsg.text().trim { it <= ' ' }[0])) {
} //System.out.println("emLastMsg.text(): " + emLastMsg.text());
job1.join() str = (responseMsg.substring(0, responseMsg.indexOf(emText)) + " "
+ emLastMsg + " " + responseMsg.substring(responseMsg.indexOf(emText)))
val job3 = launch(Dispatchers.Default) { }
val coreDocument = CoreDocument(str)
pipeline.annotate(coreDocument)
coreDoc = coreDocument
yield()
}
job3.join()
}
}
private fun getScoreRelationStrF(str: String?, mostRecentMsg: String?): Double {
val SMX = SimilarityMatrix(str, mostRecentMsg)
val cacheSentiment1 = sentimentCachingMap.getOrDefault(str, null)
val cacheSentiment2 = sentimentCachingMap.getOrDefault(mostRecentMsg, null)
val sentimentAnalyzerTest = SentimentAnalyzerTest(str, mostRecentMsg, SMX,
strAnnoJMWE, jmweAnnotationCache[mostRecentMsg], strAnno,
pipelineAnnotationCache[mostRecentMsg], strAnnoSentiment,
pipelineSentimentAnnotationCache[mostRecentMsg], coreDoc,
coreDocumentAnnotationCache[mostRecentMsg],
cacheSentiment1, cacheSentiment2)
val callSMX = sentimentAnalyzerTest.callSMX()
return callSMX.distance ?: 0.0
}
suspend private fun annotationCacheUpdate(strmap: ConcurrentMap<Int?, String?>): ConcurrentMap<Int?, String?> {
val jmweAnnotation = PipelineJMWESingleton.INSTANCE.getJMWEAnnotation(strmap.values)
for ((key, value) in jmweAnnotation) {
jmweAnnotationCache[key] = value
}
val Annotationspipeline = MapMaker().concurrencyLevel(5).makeMap<String?, Annotation>()
val AnnotationspipelineSentiment = MapMaker().concurrencyLevel(5).makeMap<String?, Annotation>()
val coreDocumentpipelineMap = getMultipleCoreDocumentsWaySuggestion(strmap.values, pipeline)
coroutineScope {
val job = launch(Dispatchers.Default) {
for (str in strmap.values) {
val strAnno1 = Annotation(str)
Annotationspipeline[str] = strAnno1
val strAnno2 = Annotation(str)
AnnotationspipelineSentiment[str] = strAnno2
stringCache[stringCache.size + 1] = str
}
yield()
}
pipeline.annotate(Annotationspipeline.values, 5)
pipelineSentiment!!.annotate(AnnotationspipelineSentiment.values, 5)
job.join()
}
coroutineScope {
val job = launch(Dispatchers.Default) {
for (pipelineEntry in Annotationspipeline.entries) {
if (pipelineEntry != null) {
pipelineAnnotationCache[pipelineEntry.key] = pipelineEntry.value
}
}
yield()
}
job.join()
}
coroutineScope {
val job = launch(Dispatchers.Default) {
for (coreDocumentEntry in coreDocumentpipelineMap.entries) {
coreDocumentAnnotationCache[coreDocumentEntry.key] = coreDocumentEntry.value
}
yield()
}
job.join()
}
coroutineScope {
val job1 = launch(Dispatchers.Default) {
for (pipelineEntry in AnnotationspipelineSentiment.entries) {
if (pipelineEntry != null) {
pipelineSentimentAnnotationCache[pipelineEntry.key] = pipelineEntry.value
}
}
yield()
}
job1.join()
}
return strmap
}
private class AnnotationCollector<T> : Consumer<T> {
val annotationsT: MutableList<T?> = arrayListOf()
override fun accept(ann: T) {
//System.out.println("adding ann: " + ann.toString());
annotationsT.add(ann)
}
companion object {
var i = 0
}
}
companion object {
val EXPIRE_TIME_IN_SECONDS = TimeUnit.SECONDS.convert(10, TimeUnit.MINUTES)
val EXPIRE_TIME_IN_SECONDS1 = TimeUnit.SECONDS.convert(10, TimeUnit.HOURS)
@JvmField
var instance = Datahandler()
private var strAnno: Annotation? = null
private var strAnnoSentiment: Annotation? = null
private lateinit var strAnnoJMWE: Annotation
private var coreDoc: CoreDocument? = null
private val stringCache = MapMaker().concurrencyLevel(6).makeMap<Int, String?>()
private lateinit var pipelineAnnotationCache: ConcurrentMap<String?, Annotation>
private lateinit var pipelineSentimentAnnotationCache: ConcurrentMap<String?, Annotation>
private lateinit var jmweAnnotationCache: ConcurrentMap<String, Annotation>
private lateinit var coreDocumentAnnotationCache: ConcurrentMap<String, CoreDocument>
private val sentimentCachingMap = MapMaker().concurrencyLevel(6).makeMap<String, SentimentValueCache>()
private const val similar = ""
private const val shiftReduceParserPath = "edu/stanford/nlp/models/srparser/englishSR.ser.gz"
private const val sentimentModel = "edu/stanford/nlp/models/sentiment/sentiment.ser.gz"
private const val lexParserEnglishRNN = "edu/stanford/nlp/models/lexparser/englishRNN.ser.gz"
private const val taggerPath = "edu/stanford/nlp/models/pos-tagger/english-left3words/english-left3words-distsim.tagger"
private const val nerModel = "edu/stanford/nlp/models/ner/english.all.3class.caseless.distsim.crf.ser.gz"
private const val nerModel2 = "edu/stanford/nlp/models/ner/english.conll.4class.caseless.distsim.crf.ser.gz"
private const val nerModel3 = "edu/stanford/nlp/models/ner/english.muc.7class.caseless.distsim.crf.ser.gz"
private const val customStopWordList = "start,starts,period,periods,a,an,and,are,as,at,be,but,by,for,if,in,into,is,it,no,not,of,on,or,such,that,the,their,then,there,these,they,this,to,was,will,with"
@JvmStatic
var tagger: MaxentTagger? = null
private set
private val options = arrayOf("-maxLength", "100")
private val props = Properties()
private val propsSentiment = Properties()
@JvmStatic
var gsf: GrammaticalStructureFactory? = null
private set
private lateinit var lp: LexicalizedParser
private lateinit var tlp: TreebankLanguagePack
private lateinit var classifier: AbstractSequenceClassifier<CoreLabel>
public fun getPipeLine(): StanfordCoreNLP {
return pipeline
}
// set up Stanford CoreNLP pipeline
@JvmStatic
val pipeline = pipeLineSetUp
private var pipelineSentiment: StanfordCoreNLP? = null
private val pipeLineSetUp: StanfordCoreNLP
private get() {
props.setProperty("annotators", "tokenize,ssplit,pos,lemma,ner,parse")
props.setProperty("parse.model", shiftReduceParserPath)
props.setProperty("parse.maxlen", "90")
props.setProperty("parse.binaryTrees", "true")
props.setProperty("threads", "5")
props.setProperty("pos.maxlen", "90")
props.setProperty("tokenize.maxlen", "90")
props.setProperty("ssplit.maxlen", "90")
props.setProperty("lemma.maxlen", "90")
props.setProperty("ner.model", "$nerModel,$nerModel2,$nerModel3")
props.setProperty("ner.combinationMode", "HIGH_RECALL")
props.setProperty("regexner.ignorecase", "true")
props.setProperty("ner.fine.regexner.ignorecase", "true")
props.setProperty("tokenize.options", "untokenizable=firstKeep")
return StanfordCoreNLP(props)
}
@JvmStatic
fun getClassifier(): AbstractSequenceClassifier<CoreLabel> {
return classifier
}
private fun getMultipleJMWEAnnotation(str: Collection<String?>): ConcurrentMap<String, Annotation> {
return PipelineJMWESingleton.INSTANCE.getJMWEAnnotation(str)
}
private fun getMultiplePipelineAnnotation(str: Collection<String?>): ConcurrentMap<String?, Annotation> {
val pipelineAnnotationMap = MapMaker().concurrencyLevel(5).makeMap<String?, Annotation>()
for (str1 in str) {
val strAnno1 = Annotation(str1)
pipelineAnnotationMap[str1] = strAnno1
}
pipeline.annotate(pipelineAnnotationMap.values, 5)
return pipelineAnnotationMap
}
private fun getMultiplePipelineSentimentAnnotation(str: Collection<String?>): ConcurrentMap<String?, Annotation> {
val pipelineAnnotationMap = MapMaker().concurrencyLevel(5).makeMap<String?, Annotation>()
for (str1 in str) {
val strAnno1 = Annotation(str1)
pipelineAnnotationMap[str1] = strAnno1
}
pipelineSentiment?.annotate(pipelineAnnotationMap.values, 5)
return pipelineAnnotationMap
}
fun filterContent(str: ConcurrentMap<Int?, String?>): ConcurrentMap<Int?, String?> {
val strlistreturn = MapMaker().concurrencyLevel(5).makeMap<Int?, String?>()
for (str1: String? in str.values) {
if (!str1?.isEmpty()!! && str1.length > 3) {
var str1Local: String = str1.trim();
if (str1Local.length > 2 && !str1Local.startsWith("!")) {
strlistreturn[strlistreturn.size] = str1Local
}
}
}
return strlistreturn
}
suspend fun getMultipleCoreDocumentsWaySuggestion(str: Collection<String?>, localNLP: StanfordCoreNLP): ConcurrentMap<String, CoreDocument> {
val annCollector: AnnotationCollector<Annotation?> = AnnotationCollector<Annotation?>()
val annotationreturnMap = MapMaker().concurrencyLevel(6).makeMap<String, CoreDocument>()
coroutineScope {
val job = launch(Dispatchers.Default) {
for (exampleString in str) {
localNLP.annotate(Annotation(exampleString), annCollector)
AnnotationCollector.i++
}
yield()
}
job.join()
}
try {
Thread.sleep(1500)
} catch (ex: InterruptedException) {
Logger.getLogger(Datahandler::class.java.name).log(Level.SEVERE, null, ex)
}
coroutineScope {
val job1 = launch(Dispatchers.Default) {
for (ann in annCollector.annotationsT) {
if (ann != null) {
ann.compact()
val CD = CoreDocument(ann)
annotationreturnMap[CD.text()] = CD
} }
str += " $personName"
return str
} }
yield()
} }
job1.join()
} }
try { } catch (e: Exception) {
Thread.sleep(1500) println("""SCUFFED JAYZ: ${e.localizedMessage}""".trimIndent())
} catch (ex: InterruptedException) {
Logger.getLogger(Datahandler::class.java.name).log(Level.SEVERE, null, ex)
}
return annotationreturnMap
} }
return responseMsg
} }
init { fun filterContent(str: String): Boolean {
stopwatch = Stopwatch.createUnstarted() if (!str.isEmpty() && str.length > 3) {
jmweAnnotationCache = MapMaker().concurrencyLevel(5).makeMap<String, Annotation>() var str1Local: String = str.trim();
pipelineAnnotationCache = MapMaker().concurrencyLevel(5).makeMap<String, Annotation>() if (str1Local.length > 2 && !str1Local.startsWith("!")) {
pipelineSentimentAnnotationCache = MapMaker().concurrencyLevel(5).makeMap<String, Annotation>() return true
coreDocumentAnnotationCache = MapMaker().concurrencyLevel(5).makeMap<String, CoreDocument>() }
}
return false
}
fun getCoreDocumentsSuggested(pipeline: StanfordCoreNLP, str: String) {
val annotation = Annotation(str)
pipeline.annotate(annotation)
val coreDocument = CoreDocument(annotation)
coreDocumentAnnotationCache.put(str, coreDocument)
} }
} }

View File

@ -16,6 +16,7 @@ import java.util.List;
import java.util.logging.Level; import java.util.logging.Level;
import java.util.logging.Logger; import java.util.logging.Logger;
import edu.stanford.nlp.pipeline.StanfordCoreNLP;
import reactor.core.publisher.Flux; import reactor.core.publisher.Flux;
import reactor.core.publisher.Mono; import reactor.core.publisher.Mono;
@ -25,30 +26,26 @@ import reactor.core.publisher.Mono;
*/ */
public class DoStuff { public class DoStuff {
public static boolean occupied = false;
public static boolean isOccupied() { public static void doStuff(MessageCreateEvent event, String usernameBot, Datahandler datahandler,
return occupied; StanfordCoreNLP stanfordCoreNLP, StanfordCoreNLP stanfordCoreNLPSentiment) {
} String username = "";
public static void doStuff(MessageCreateEvent event, String usernameBot) {
String username = null;
try { try {
username = event.getMessage().getAuthor().get().getUsername(); username = event.getMessage().getAuthor().get().getUsername();
} catch (java.util.NoSuchElementException e) { } catch (java.util.NoSuchElementException e) {
username = null; username = null;
} }
if (username != null && !username.equals(usernameBot)) { if (username != null && !username.equals(usernameBot)) {
occupied = true;
TextChannel block = event.getMessage().getChannel().cast(TextChannel.class).block(); TextChannel block = event.getMessage().getChannel().cast(TextChannel.class).block();
String name = block.getCategory().block().getName(); String name = block.getCategory().block().getName();
name = name.toLowerCase(); name = name.toLowerCase();
String channelName = block.getName().toLowerCase(); String channelName = block.getName().toLowerCase();
boolean channelpermissionsDenied = false; boolean channelpermissionsDenied = false;
if (channelName.contains("suggestion-box")) {
channelpermissionsDenied = true;
}
switch (name) { switch (name) {
case "public area": { case "public area":
break;
}
case "information area": { case "information area": {
break; break;
} }
@ -57,37 +54,36 @@ public class DoStuff {
break; break;
} }
} }
List<User> blockLast = event.getMessage().getUserMentions().buffer().blockLast();
String content = event.getMessage().getContent();
if (!channelpermissionsDenied) { if (!channelpermissionsDenied) {
List<User> blockLast = event.getMessage().getUserMentions().buffer().blockLast();
String content = event.getMessage().getContent();
if (blockLast != null) { if (blockLast != null) {
for (User user : blockLast) { for (User user : blockLast) {
content = content.replace(user.getId().asString(), ""); content = content.replace(user.getId().asString(), "");
} }
} }
MessageResponseHandler.getMessage(content); boolean mentionedBot = false;
} if (blockLast != null) {
boolean mentionedBot = false; for (User user : blockLast) {
if (blockLast != null) { if (user.getUsername().equals(usernameBot)) {
for (User user : blockLast) { mentionedBot = true;
if (user.getUsername().equals(usernameBot)) { break;
mentionedBot = true; }
break; }
}
if (mentionedBot || channelName.contains("general-autism")) {
String ResponseStr;
ResponseStr = datahandler.getResponseMsg(content, username, stanfordCoreNLP, stanfordCoreNLPSentiment,
false);
if (!ResponseStr.isEmpty()) {
System.out.print("\nResponseStr3: " + ResponseStr + "\n");
event.getMessage().getChannel().block().createMessage(ResponseStr).block();
} }
} }
} }
if (mentionedBot || channelName.contains("general-autism")) {
String ResponseStr;
ResponseStr = MessageResponseHandler.selectReponseMessage(content, username);
if (!ResponseStr.isEmpty()) {
System.out.print("\nResponseStr3: " + ResponseStr + "\n");
event.getMessage().getChannel().block().createMessage(ResponseStr).block();
}
}
new Thread(() -> { new Thread(() -> {
Datahandler.instance.checkIfUpdateStrings(); datahandler.updateStringCache();
}).start(); }).start();
occupied = false;
} }
} }
} }

View File

@ -1,99 +0,0 @@
/*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
package FunctionLayer;
import com.google.common.collect.MapMaker;
import edu.stanford.nlp.pipeline.CoreDocument;
import edu.stanford.nlp.pipeline.CoreEntityMention;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.List;
import java.util.concurrent.ConcurrentMap;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
/**
* @author install1
*/
public class MessageResponseHandler {
private static ConcurrentMap<Integer, String> str = new MapMaker().concurrencyLevel(6).makeMap();
public static ConcurrentMap<Integer, String> getStr() {
ArrayList<String> arrayList = new ArrayList(str.values());
Collections.sort(arrayList, (o1, o2) -> o2.length() - o1.length());
int iterator = 0;
for (String str1 : arrayList) {
str.put(iterator, str1);
iterator++;
}
return str;
}
public static void setStr(ConcurrentMap<Integer, String> str) {
MessageResponseHandler.str = str;
}
public static void getMessage(String message) {
if (message != null && !message.isEmpty()) {
message = message.replace("@", "");
if (message.contains("<>")) {
message = message.substring(message.indexOf(">"));
}
if (message.startsWith("[ *")) {
message = message.substring(message.indexOf("]"));
}
str.put(str.size() + 1, message);
}
}
public static String selectReponseMessage(String toString, String personName) {
String getResponseMsg = Datahandler.instance.getResponseMsg(toString);
getResponseMsg = checkPersonPresentInSentence(personName, getResponseMsg, toString);
return getResponseMsg;
}
private static String checkPersonPresentInSentence(String personName, String responseMsg, String userLastMessage) {
//check if userlastmsg contains person as refference
//check if first person is author or their person of mention
try {
String strreturn = responseMsg;
CoreDocument pipelineCoreDcoument = new CoreDocument(responseMsg);
CoreDocument pipelineCoreDcoumentLastMsg = new CoreDocument(userLastMessage);
Datahandler.getPipeline().annotate(pipelineCoreDcoument);
Datahandler.getPipeline().annotate(pipelineCoreDcoumentLastMsg);
//Datahandler.pipeline.annotate(pipelineCoreDcoument);
//Datahandler.pipeline.annotate(pipelineCoreDcoumentLastMsg);
String regex = "(.*?\\d){10,}";
for (CoreEntityMention em : pipelineCoreDcoument.entityMentions()) {
String entityType = em.entityType();
if (entityType.equals("PERSON")) {
String str = strreturn;
String emText = em.text();
Pattern pattern = Pattern.compile(regex);
Matcher matcher = pattern.matcher(personName);
boolean isMatched = matcher.matches();
if (!emText.equals(personName) && !isMatched) {
for (CoreEntityMention emLastMsg : pipelineCoreDcoumentLastMsg.entityMentions()) {
if (!emText.equals(emLastMsg.text()) && !Character.isDigit(emLastMsg.text().trim().charAt(0))) {
//System.out.println("emLastMsg.text(): " + emLastMsg.text());
str = strreturn.substring(0, strreturn.indexOf(emText)) + " "
+ emLastMsg + " " + strreturn.substring(strreturn.indexOf(emText));
}
}
str += " " + personName;
return str;
}
}
}
} catch (Exception e) {
System.out.println("SCUFFED JAYZ: " + e.getLocalizedMessage() + "\n");
}
return responseMsg;
}
}

View File

@ -5,7 +5,6 @@
*/ */
package FunctionLayer; package FunctionLayer;
import com.google.common.collect.MapMaker;
import edu.mit.jmwe.data.IMWE; import edu.mit.jmwe.data.IMWE;
import edu.mit.jmwe.data.IToken; import edu.mit.jmwe.data.IToken;
import edu.mit.jmwe.data.Token; import edu.mit.jmwe.data.Token;
@ -28,10 +27,8 @@ import edu.stanford.nlp.util.CoreMap;
import java.io.File; import java.io.File;
import java.io.IOException; import java.io.IOException;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.Collection;
import java.util.List; import java.util.List;
import java.util.Properties; import java.util.Properties;
import java.util.concurrent.ConcurrentMap;
/** /**
* @author install1 * @author install1
@ -40,20 +37,14 @@ import java.util.concurrent.ConcurrentMap;
public class PipelineJMWESingleton { public class PipelineJMWESingleton {
//if not needed to be volatile dont make it, increases time //if not needed to be volatile dont make it, increases time
public volatile static PipelineJMWESingleton INSTANCE; //public volatile static PipelineJMWESingleton INSTANCE;
public static PipelineJMWESingleton INSTANCE;
private static StanfordCoreNLP localNLP = initializeJMWE(); private static StanfordCoreNLP localNLP = initializeJMWE();
private static String underscoreSpaceReplacement; private static String underscoreSpaceReplacement;
private static IMWEIndex index;
private static IMWEDetector detector;
private PipelineJMWESingleton() { private PipelineJMWESingleton() {
}
public static void getINSTANCE() {
INSTANCE = new PipelineJMWESingleton();
}
public final ConcurrentMap<String, Annotation> getJMWEAnnotation(Collection<String> strvalues) {
boolean verbose = false;
IMWEIndex index;
String jmweIndexData = "/home/gameservers/autism_bot/lib/mweindex_wordnet3.0_semcor1.6.data"; // ./lib/mweindex_wordnet3.0_semcor1.6.data String jmweIndexData = "/home/gameservers/autism_bot/lib/mweindex_wordnet3.0_semcor1.6.data"; // ./lib/mweindex_wordnet3.0_semcor1.6.data
String jmweIndexDataLocalTest = "E:/java8/Projects/mweindex_wordnet3.0_semcor1.6.data"; String jmweIndexDataLocalTest = "E:/java8/Projects/mweindex_wordnet3.0_semcor1.6.data";
File indexFile = new File((String) jmweIndexData); File indexFile = new File((String) jmweIndexData);
@ -64,22 +55,30 @@ public class PipelineJMWESingleton {
} catch (IOException e) { } catch (IOException e) {
throw new RuntimeException("unable to open IMWEIndex index: " + e + "\n"); throw new RuntimeException("unable to open IMWEIndex index: " + e + "\n");
} }
IMWEDetector detector = getDetector(index, detectorName); detector = getDetector(index, detectorName);
ConcurrentMap<String, Annotation> returnAnnotations = new MapMaker().concurrencyLevel(5).makeMap();
strvalues.forEach(str -> {
Annotation annoStr = new Annotation(str);
returnAnnotations.put(str, annoStr);
});
localNLP.annotate(returnAnnotations.values(), 4);
returnAnnotations.values().parallelStream().forEach(annoStr -> {
for (CoreMap sentence : annoStr.get(CoreAnnotations.SentencesAnnotation.class)) {
List<IMWE<IToken>> mwes = getjMWEInSentence(sentence, index, detector, verbose);
sentence.set(JMWEAnnotation.class, mwes);
}
});
index.close(); index.close();
return returnAnnotations; }
public static void getINSTANCE() {
INSTANCE = new PipelineJMWESingleton();
}
public final Annotation getJMWEAnnotation(String str) {
try {
index.open();
} catch (IOException e) {
throw new RuntimeException("unable to open IMWEIndex index: " + e + "\n");
}
Annotation annoStr = new Annotation(str);
localNLP.annotate(annoStr);
Class<CoreAnnotations.SentencesAnnotation> sentencesAnnotationClass = CoreAnnotations.SentencesAnnotation.class;
for (CoreMap sentence : annoStr.get(sentencesAnnotationClass)) {
List<IMWE<IToken>> mwes = getjMWEInSentence(sentence, index, detector, false);
//annoStr.set(JMWEAnnotation.class, mwes);
sentence.set(JMWEAnnotation.class, mwes);
}
index.close();
return annoStr;
} }
public final static StanfordCoreNLP initializeJMWE() { public final static StanfordCoreNLP initializeJMWE() {
@ -94,7 +93,7 @@ public class PipelineJMWESingleton {
propsJMWE.setProperty("lemma.maxlen", "90"); propsJMWE.setProperty("lemma.maxlen", "90");
underscoreSpaceReplacement = "-"; underscoreSpaceReplacement = "-";
localNLP = new StanfordCoreNLP(propsJMWE); localNLP = new StanfordCoreNLP(propsJMWE);
System.out.println("finished singleton constructor \n"); System.out.println("finished JMWE constructor \n");
return localNLP; return localNLP;
} }
@ -147,5 +146,4 @@ public class PipelineJMWESingleton {
} }
return sentence; return sentence;
} }
} }

View File

@ -5,10 +5,7 @@
*/ */
package FunctionLayer; package FunctionLayer;
import FunctionLayer.StanfordParser.SentimentValueCache;
/** /**
*
* @author install1 * @author install1
*/ */
public class SimilarityMatrix { public class SimilarityMatrix {
@ -16,8 +13,6 @@ public class SimilarityMatrix {
private String PrimaryString; private String PrimaryString;
private String SecondaryString; private String SecondaryString;
private double distance; private double distance;
private SentimentValueCache cacheValue1;
private SentimentValueCache cacheValue2;
public final double getDistance() { public final double getDistance() {
return distance; return distance;
@ -38,36 +33,8 @@ public class SimilarityMatrix {
this.distance = result; this.distance = result;
} }
public final String getPrimaryString() {
return PrimaryString;
}
public final void setPrimaryString(String PrimaryString) {
this.PrimaryString = PrimaryString;
}
public final String getSecondaryString() { public final String getSecondaryString() {
return SecondaryString; return SecondaryString;
} }
public final void setSecondaryString(String SecondaryString) {
this.SecondaryString = SecondaryString;
}
public final SentimentValueCache getCacheValue1() {
return cacheValue1;
}
public final void setCacheValue1(SentimentValueCache cacheValue1) {
this.cacheValue1 = cacheValue1;
}
public final SentimentValueCache getCacheValue2() {
return cacheValue2;
}
public final void setCacheValue2(SentimentValueCache cacheValue2) {
this.cacheValue2 = cacheValue2;
}
} }

View File

@ -1,334 +0,0 @@
/*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
package FunctionLayer.StanfordParser;
import com.google.common.collect.MapMaker;
import edu.stanford.nlp.ling.TaggedWord;
import edu.stanford.nlp.trees.GrammaticalStructure;
import edu.stanford.nlp.trees.Tree;
import edu.stanford.nlp.trees.TypedDependency;
import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
import java.util.Map;
import java.util.concurrent.ConcurrentMap;
import org.ejml.simple.SimpleMatrix;
/**
*
* @author install1
*/
public class SentimentValueCache {
private String sentence;
private int counter;
private List<List<TaggedWord>> taggedwordlist = new ArrayList();
private final ConcurrentMap<Integer, String> tgwlistIndex = new MapMaker().concurrencyLevel(2).makeMap();
private final ConcurrentMap<Integer, Tree> sentenceConstituencyParseList = new MapMaker().concurrencyLevel(2).makeMap();
private final Collection<TypedDependency> allTypedDependencies = new ArrayList();
private final ConcurrentMap<Integer, GrammaticalStructure> gsMap = new MapMaker().concurrencyLevel(2).makeMap();
private final ConcurrentMap<Integer, SimpleMatrix> simpleSMXlist = new MapMaker().concurrencyLevel(3).makeMap();
private final ConcurrentMap<Integer, SimpleMatrix> simpleSMXlistVector = new MapMaker().concurrencyLevel(3).makeMap();
private final ConcurrentMap<Integer, Integer> rnnPredictClassMap = new MapMaker().concurrencyLevel(3).makeMap();
private List classifyRaw;
private int mainSentiment = 0;
private int longest = 0;
private int tokensCounter = 0;
private int anotatorcounter = 0;
private int inflectedCounterPositive = 0;
private int inflectedCounterNegative = 0;
private int MarkedContinuousCounter = 0;
private int MarkedContiniousCounterEntries = 0;
private int UnmarkedPatternCounter = 0;
private int pairCounter = 0;
private final ConcurrentMap<Integer, String> ITokenMapTag = new MapMaker().concurrencyLevel(2).makeMap();
private final ConcurrentMap<Integer, String> strTokenStems = new MapMaker().concurrencyLevel(2).makeMap();
private final ConcurrentMap<Integer, String> strTokenForm = new MapMaker().concurrencyLevel(2).makeMap();
private final ConcurrentMap<Integer, String> strTokenGetEntry = new MapMaker().concurrencyLevel(2).makeMap();
private final ConcurrentMap<Integer, String> strTokenGetiPart = new MapMaker().concurrencyLevel(2).makeMap();
private final ConcurrentMap<Integer, String> strTokenEntryPOS = new MapMaker().concurrencyLevel(2).makeMap();
private final ConcurrentMap<Integer, Integer> entryCounts = new MapMaker().concurrencyLevel(2).makeMap();
private final ConcurrentMap<Integer, String> nerEntities1 = new MapMaker().concurrencyLevel(2).makeMap();
private final ConcurrentMap<Integer, String> nerEntities2 = new MapMaker().concurrencyLevel(2).makeMap();
private final ConcurrentMap<Integer, String> nerEntityTokenTags = new MapMaker().concurrencyLevel(3).makeMap();
private final ConcurrentMap<Integer, String> stopwordTokens = new MapMaker().concurrencyLevel(2).makeMap();
private final ConcurrentMap<Integer, String> stopWordLemma = new MapMaker().concurrencyLevel(2).makeMap();
public int getPairCounter() {
return pairCounter;
}
public void setPairCounter(int pairCounter) {
this.pairCounter = pairCounter;
}
public void addStopWordLemma(String str) {
stopWordLemma.put(stopWordLemma.size(), str);
}
public void addstopwordTokens(String str) {
stopwordTokens.put(stopwordTokens.size(), str);
}
public ConcurrentMap<Integer, String> getStopwordTokens() {
return stopwordTokens;
}
public ConcurrentMap<Integer, String> getStopWordLemma() {
return stopWordLemma;
}
public void addnerEntityTokenTags(String str) {
nerEntityTokenTags.put(nerEntityTokenTags.size(), str);
}
public ConcurrentMap<Integer, String> getnerEntityTokenTags() {
return nerEntityTokenTags;
}
public ConcurrentMap<Integer, String> getnerEntities1() {
return nerEntities1;
}
public ConcurrentMap<Integer, String> getnerEntities2() {
return nerEntities2;
}
public void addNEREntities1(String str) {
nerEntities1.put(nerEntities1.size(), str);
}
public void addNEREntities2(String str) {
nerEntities2.put(nerEntities2.size(), str);
}
public void setTaggedwords(List<List<TaggedWord>> twlist) {
taggedwordlist = twlist;
}
public List<List<TaggedWord>> getTaggedwordlist() {
return taggedwordlist;
}
public void addEntryCounts(int counts) {
entryCounts.put(entryCounts.size(), counts);
}
public ConcurrentMap<Integer, Integer> getEntryCounts() {
return entryCounts;
}
public void addstrTokenEntryPOS(String str) {
strTokenEntryPOS.put(strTokenEntryPOS.size(), str);
}
public ConcurrentMap<Integer, String> getstrTokenEntryPOS() {
return strTokenEntryPOS;
}
public void addstrTokenGetiPart(String str) {
strTokenGetiPart.put(strTokenGetiPart.size(), str);
}
public ConcurrentMap<Integer, String> getstrTokenGetiPart() {
return strTokenGetiPart;
}
public ConcurrentMap<Integer, String> getstrTokenGetEntry() {
return strTokenGetEntry;
}
public void addstrTokenGetEntry(String str) {
strTokenGetEntry.put(strTokenGetEntry.size(), str);
}
public ConcurrentMap<Integer, String> getstrTokenForm() {
return strTokenForm;
}
public void addstrTokenForm(String str) {
strTokenForm.put(strTokenForm.size(), str);
}
public ConcurrentMap<Integer, String> getstrTokenStems() {
return strTokenStems;
}
public void addstrTokenStems(String str) {
strTokenStems.put(strTokenStems.size(), str);
}
public ConcurrentMap<Integer, String> getITokenMapTag() {
return ITokenMapTag;
}
public void addITokenMapTag(String str) {
ITokenMapTag.put(ITokenMapTag.size(), str);
}
public int getUnmarkedPatternCounter() {
return UnmarkedPatternCounter;
}
public void setUnmarkedPatternCounter(int UnmarkedPatternCounter) {
this.UnmarkedPatternCounter = UnmarkedPatternCounter;
}
public int getMarkedContiniousCounterEntries() {
return MarkedContiniousCounterEntries;
}
public void setMarkedContiniousCounterEntries(int MarkedContiniousCounterEntries) {
this.MarkedContiniousCounterEntries = MarkedContiniousCounterEntries;
}
public int getMarkedContinuousCounter() {
return MarkedContinuousCounter;
}
public void setMarkedContinuousCounter(int MarkedContinuousCounter) {
this.MarkedContinuousCounter = MarkedContinuousCounter;
}
public int getInflectedCounterNegative() {
return inflectedCounterNegative;
}
public void setInflectedCounterNegative(int inflectedCounterNegative) {
this.inflectedCounterNegative = inflectedCounterNegative;
}
public int getInflectedCounterPositive() {
return inflectedCounterPositive;
}
public void setInflectedCounterPositive(int inflectedCounterPositive) {
this.inflectedCounterPositive = inflectedCounterPositive;
}
public int getAnotatorcounter() {
return anotatorcounter;
}
public void setAnotatorcounter(int anotatorcounter) {
this.anotatorcounter = anotatorcounter;
}
public int getTokensCounter() {
return tokensCounter;
}
public void setTokensCounter(int tokensCounter) {
this.tokensCounter = tokensCounter;
}
public int getMainSentiment() {
return mainSentiment;
}
public void setMainSentiment(int mainSentiment) {
this.mainSentiment = mainSentiment;
}
public int getLongest() {
return longest;
}
public void setLongest(int longest) {
this.longest = longest;
}
public List getClassifyRaw() {
return classifyRaw;
}
public void setClassifyRaw(List classifyRaw) {
this.classifyRaw = classifyRaw;
}
public ConcurrentMap<Integer, Integer> getRnnPrediectClassMap() {
return rnnPredictClassMap;
}
public void addRNNPredictClass(int rnnPrediction) {
rnnPredictClassMap.put(rnnPredictClassMap.size(), rnnPrediction);
}
public void addSimpleMatrix(SimpleMatrix SMX) {
simpleSMXlist.put(simpleSMXlist.size(), SMX);
}
public void addSimpleMatrixVector(SimpleMatrix SMX) {
simpleSMXlistVector.put(simpleSMXlistVector.size(), SMX);
}
public ConcurrentMap<Integer, GrammaticalStructure> getGsMap() {
return gsMap;
}
public ConcurrentMap<Integer, SimpleMatrix> getSimpleSMXlist() {
return simpleSMXlist;
}
public ConcurrentMap<Integer, SimpleMatrix> getSimpleSMXlistVector() {
return simpleSMXlistVector;
}
public ConcurrentMap<Integer, GrammaticalStructure> getGs() {
return gsMap;
}
public int getCounter() {
return counter;
}
public void addGS(GrammaticalStructure gs) {
gsMap.put(gsMap.size(), gs);
}
public Collection<TypedDependency> getAllTypedDependencies() {
return allTypedDependencies;
}
public void addTypedDependencies(Collection<TypedDependency> TDPlist) {
for (TypedDependency TDP : TDPlist) {
allTypedDependencies.add(TDP);
}
}
public ConcurrentMap<Integer, Tree> getSentenceConstituencyParseList() {
return sentenceConstituencyParseList;
}
public void addSentenceConstituencyParse(Tree tree) {
sentenceConstituencyParseList.put(sentenceConstituencyParseList.size(), tree);
}
public void setCounter(int counter) {
counter = counter;
}
public String getSentence() {
return sentence;
}
public SentimentValueCache(String str, int counter) {
this.sentence = str;
this.counter = counter;
}
public ConcurrentMap<Integer, String> getTgwlistIndex() {
return tgwlistIndex;
}
public void addTgwlistIndex(String str) {
tgwlistIndex.put(tgwlistIndex.size(), str);
}
public SentimentValueCache(String str) {
this.sentence = str;
}
}

View File

@ -15,21 +15,18 @@ screen -X -S (number1) quit
package PresentationLayer; package PresentationLayer;
import DataLayer.settings; import DataLayer.settings;
import FunctionLayer.CustomError;
import FunctionLayer.Datahandler; import FunctionLayer.Datahandler;
import FunctionLayer.PipelineJMWESingleton; import FunctionLayer.PipelineJMWESingleton;
import com.sun.tools.javac.util.List; import com.sun.tools.javac.util.List;
import discord4j.core.DiscordClient; import discord4j.core.DiscordClient;
import discord4j.core.GatewayDiscordClient; import discord4j.core.GatewayDiscordClient;
import discord4j.core.event.domain.message.MessageCreateEvent; import discord4j.core.event.domain.message.MessageCreateEvent;
import edu.stanford.nlp.pipeline.StanfordCoreNLP;
import java.io.IOException; import java.io.IOException;
import java.io.UnsupportedEncodingException; import java.io.UnsupportedEncodingException;
import java.net.*; import java.net.*;
import java.sql.SQLException; import java.sql.SQLException;
import java.util.ArrayList;
import java.util.logging.Level;
import java.util.logging.Logger;
/** /**
@ -37,7 +34,8 @@ import java.util.logging.Logger;
*/ */
public class DiscordHandler { public class DiscordHandler {
private static void receiveAndSendPacket(DatagramSocket serverSocket, InetAddress ipAddress, int port) throws IOException, CustomError { private static void receiveAndSendPacket(DatagramSocket serverSocket, InetAddress ipAddress, int port,
Datahandler datahandler, StanfordCoreNLP stanfordCoreNLP, StanfordCoreNLP stanfordCoreNLPSentiment) throws IOException {
byte[] receiveData = new byte[4096]; byte[] receiveData = new byte[4096];
DatagramPacket receivePacket = new DatagramPacket(receiveData, receiveData.length); DatagramPacket receivePacket = new DatagramPacket(receiveData, receiveData.length);
try { try {
@ -48,8 +46,9 @@ public class DiscordHandler {
String sentence = new String(receivePacket.getData(), 0, String sentence = new String(receivePacket.getData(), 0,
receivePacket.getLength()); receivePacket.getLength());
sentence = sentence.replace("clientmessage:", ""); sentence = sentence.replace("clientmessage:", "");
String getResponseMsg = Datahandler.instance.getResponseMsg(sentence); String ResponseMsg = datahandler.getResponseMsg(sentence, "", stanfordCoreNLP, stanfordCoreNLPSentiment,
byte[] sendData = getResponseMsg.getBytes("UTF-8"); true);
byte[] sendData = ResponseMsg.getBytes("UTF-8");
int deliver_port = 0; int deliver_port = 0;
switch (port) { switch (port) {
case 48470: case 48470:
@ -67,10 +66,10 @@ public class DiscordHandler {
} }
DatagramPacket sendPacket = new DatagramPacket(sendData, sendData.length, ipAddress, deliver_port); DatagramPacket sendPacket = new DatagramPacket(sendData, sendData.length, ipAddress, deliver_port);
serverSocket.send(sendPacket); serverSocket.send(sendPacket);
//System.out.println("receiveAndSendPacket send message to port: " + deliver_port);
} }
public static void handleUDPTraffic(int port) { public static void handleUDPTraffic(int port, Datahandler datahandler,
StanfordCoreNLP stanfordCoreNLP, StanfordCoreNLP stanfordCoreNLPSentiment) {
try (DatagramSocket serverSocket = new DatagramSocket(port)) { try (DatagramSocket serverSocket = new DatagramSocket(port)) {
String hostIP = ""; String hostIP = "";
if (port == 48473 || port == 48471) { if (port == 48473 || port == 48471) {
@ -80,35 +79,27 @@ public class DiscordHandler {
} }
InetAddress ipAddress = InetAddress.getByName(hostIP);//used ip' InetAddress ipAddress = InetAddress.getByName(hostIP);//used ip'
while (true) { while (true) {
receiveAndSendPacket(serverSocket, ipAddress, port); receiveAndSendPacket(serverSocket, ipAddress, port, datahandler, stanfordCoreNLP, stanfordCoreNLPSentiment);
} }
} catch (SocketException | UnknownHostException e) { } catch (SocketException | UnknownHostException e) {
e.printStackTrace(); e.printStackTrace();
} catch (UnsupportedEncodingException e) { } catch (UnsupportedEncodingException e) {
e.printStackTrace(); e.printStackTrace();
} catch (CustomError customError) {
customError.printStackTrace();
} catch (IOException e) { } catch (IOException e) {
e.printStackTrace(); e.printStackTrace();
} }
} }
public static void main(String[] args) { public static void main(String[] args) throws IOException, SQLException {
try { Datahandler datahandler = new Datahandler();
Datahandler.instance.initiateMYSQL(); datahandler.initiateMYSQL();
//nohup screen -d -m -S nonroot java -Xmx6900M -jar /home/javatests/ArtificialAutism-1.0.jar
//uncomment db fetch when ready, just keep the comment for future reference
System.out.println("finished initiating MYSQL");
} catch (SQLException | IOException ex) {
Logger.getLogger(DiscordHandler.class.getName()).log(Level.SEVERE, null, ex);
}
PipelineJMWESingleton.getINSTANCE(); PipelineJMWESingleton.getINSTANCE();
Datahandler.instance.instantiateAnnotationMapJMWE(); StanfordCoreNLP stanfordCoreNLP = datahandler.pipeLineSetUp();
Datahandler.instance.shiftReduceParserInitiate(); StanfordCoreNLP stanfordCoreNLPSentiment = datahandler.shiftReduceParserInitiate();
Datahandler.instance.instantiateAnnotationMap();
System.out.println("FINISHED ALL ANNOTATIONS"); System.out.println("FINISHED ALL ANNOTATIONS");
Datahandler.instance.updateStringCache(); datahandler.updateStringCache();
//String token = "NTI5NzAxNTk5NjAyMjc4NDAx.Dw0vDg.7-aMjVWdQMYPl8qVNyvTCPS5F_A"; System.out.println("updatedstring cache");
String token = new settings().getDiscordToken(); String token = new settings().getDiscordToken();
final DiscordClient client = DiscordClient.create(token); final DiscordClient client = DiscordClient.create(token);
final GatewayDiscordClient gateway = client.login().block(); final GatewayDiscordClient gateway = client.login().block();
@ -119,13 +110,11 @@ public class DiscordHandler {
final int j = i; final int j = i;
new Thread(() -> { new Thread(() -> {
List<Integer> ports = List.of(48470, 48471, 48472, 48473); List<Integer> ports = List.of(48470, 48471, 48472, 48473);
handleUDPTraffic(ports.get(j)); handleUDPTraffic(ports.get(j), datahandler, stanfordCoreNLP, stanfordCoreNLPSentiment);
}).start(); }).start();
} }
gateway.on(MessageCreateEvent.class).subscribe(event -> { gateway.on(MessageCreateEvent.class).subscribe(event -> {
if (!FunctionLayer.DoStuff.isOccupied()) { FunctionLayer.DoStuff.doStuff(event, usernameBot, datahandler, stanfordCoreNLP, stanfordCoreNLPSentiment);
FunctionLayer.DoStuff.doStuff(event, usernameBot);
}
}); });
gateway.onDisconnect().block(); gateway.onDisconnect().block();
} }