some current attempts with coroutines but more has to be changed again i suppose

This commit is contained in:
christian 2021-07-29 15:54:21 +02:00
parent 060fef41ad
commit 5d660187ba
6 changed files with 465 additions and 564 deletions

View File

@ -23,56 +23,61 @@ import edu.stanford.nlp.trees.TreebankLanguagePack
import kotlinx.coroutines.* import kotlinx.coroutines.*
import java.io.IOException import java.io.IOException
import java.io.UnsupportedEncodingException import java.io.UnsupportedEncodingException
import java.lang.Runnable import java.net.*
import java.net.DatagramPacket
import java.net.DatagramSocket
import java.net.InetAddress
import java.net.SocketException
import java.sql.SQLException import java.sql.SQLException
import java.util.* import java.util.*
import java.util.concurrent.* import java.util.concurrent.ConcurrentMap
import java.util.concurrent.CountDownLatch
import java.util.concurrent.TimeUnit
import java.util.function.Consumer import java.util.function.Consumer
import java.util.logging.Level import java.util.logging.Level
import java.util.logging.Logger import java.util.logging.Logger
import kotlin.collections.ArrayList import kotlin.collections.ArrayList
/** /**
* *
* @author install1 * @author install1
*/ */
class Datahandler { public class Datahandler {
private val lHMSMX: LinkedHashMap<Any?, Any?> = LinkedHashMap<Any?, Any?>()
private val stopwatch: Stopwatch private val stopwatch: Stopwatch
fun shiftReduceParserInitiate() { fun shiftReduceParserInitiate() = runBlocking {
val cdl = CountDownLatch(2) val cdl = CountDownLatch(2)
Thread(Runnable { coroutineScope {
try { val job = launch(Dispatchers.Default) {
classifier = CRFClassifier.getClassifierNoExceptions(nerModel) propsSentiment.setProperty("parse.model", lexParserEnglishRNN)
} catch (ex: ClassCastException) { propsSentiment.setProperty("sentiment.model", sentimentModel)
Logger.getLogger(Datahandler::class.java.name).log(Level.SEVERE, null, ex) propsSentiment.setProperty("parse.maxlen", "90")
propsSentiment.setProperty("threads", "5")
propsSentiment.setProperty("pos.maxlen", "90")
propsSentiment.setProperty("tokenize.maxlen", "90")
propsSentiment.setProperty("ssplit.maxlen", "90")
propsSentiment.setProperty("annotators", "tokenize,ssplit,pos,parse,sentiment,lemma,stopword") //coref too expensive memorywise
propsSentiment.setProperty("customAnnotatorClass.stopword", "FunctionLayer.StopwordAnnotator")
propsSentiment.setProperty(StopwordAnnotator.STOPWORDS_LIST, customStopWordList)
propsSentiment.setProperty("tokenize.options", "untokenizable=firstKeep")
pipelineSentiment = StanfordCoreNLP(propsSentiment)
tagger = MaxentTagger(taggerPath)
lp = LexicalizedParser.loadModel(lexParserEnglishRNN, *options)
tlp = lp.getOp().langpack()
gsf = tlp.grammaticalStructureFactory()
cdl.countDown()
yield()
} }
cdl.countDown() job.join()
}).start() }
Thread(Runnable { coroutineScope {
propsSentiment.setProperty("parse.model", lexParserEnglishRNN) val job = launch(Dispatchers.Default) {
propsSentiment.setProperty("sentiment.model", sentimentModel) try {
propsSentiment.setProperty("parse.maxlen", "90") classifier = CRFClassifier.getClassifierNoExceptions(nerModel)
propsSentiment.setProperty("threads", "5") } catch (ex: ClassCastException) {
propsSentiment.setProperty("pos.maxlen", "90") Logger.getLogger(Datahandler::class.java.name).log(Level.SEVERE, null, ex)
propsSentiment.setProperty("tokenize.maxlen", "90") }
propsSentiment.setProperty("ssplit.maxlen", "90") cdl.countDown()
propsSentiment.setProperty("annotators", "tokenize,ssplit,pos,parse,sentiment,lemma,stopword") //coref too expensive memorywise yield()
propsSentiment.setProperty("customAnnotatorClass.stopword", "FunctionLayer.StopwordAnnotator") }
propsSentiment.setProperty(StopwordAnnotator.STOPWORDS_LIST, customStopWordList) job.join()
propsSentiment.setProperty("tokenize.options", "untokenizable=firstKeep") }
pipelineSentiment = StanfordCoreNLP(propsSentiment)
tagger = MaxentTagger(taggerPath)
cdl.countDown()
}).start()
lp = LexicalizedParser.loadModel(lexParserEnglishRNN, *options)
tlp = lp.getOp().langpack()
gsf = tlp.grammaticalStructureFactory()
try { try {
cdl.await() cdl.await()
} catch (ex: InterruptedException) { } catch (ex: InterruptedException) {
@ -98,7 +103,6 @@ class Datahandler {
try { try {
DataMapper.createTables() DataMapper.createTables()
stringCache.putAll(cache) stringCache.putAll(cache)
// lHMSMX = DataMapper.getAllRelationScores();
} catch (ex: CustomError) { } catch (ex: CustomError) {
Logger.getLogger(Datahandler::class.java Logger.getLogger(Datahandler::class.java
.name).log(Level.SEVERE, null, ex) .name).log(Level.SEVERE, null, ex)
@ -114,70 +118,62 @@ class Datahandler {
} }
} }
fun instantiateAnnotationMap() { fun instantiateAnnotationMap() = runBlocking {
if (!stringCache.isEmpty()) { if (!stringCache.isEmpty()) {
val Annotationspipeline = MapMaker().concurrencyLevel(4).makeMap<String?, Annotation>() val Annotationspipeline = MapMaker().concurrencyLevel(5).makeMap<String?, Annotation>()
val AnnotationspipelineSentiment = MapMaker().concurrencyLevel(4).makeMap<String?, Annotation>() val AnnotationspipelineSentiment = MapMaker().concurrencyLevel(5).makeMap<String?, Annotation>()
runBlocking { coroutineScope {
val job = launch(Dispatchers.Default) { for (str in stringCache.values) {
for (str in stringCache.values) { val job = launch(Dispatchers.Default) {
val strAnno = Annotation(str) val strAnno = Annotation(str)
strAnno.compact() strAnno.compact()
Annotationspipeline[str] = strAnno Annotationspipeline[str] = strAnno
val strAnno2 = Annotation(str) val strAnno2 = Annotation(str)
strAnno2.compact() strAnno2.compact()
AnnotationspipelineSentiment[str] = strAnno2 AnnotationspipelineSentiment[str] = strAnno2
yield()
} }
yield() job.join();
} }
job.join();
} }
System.out.println("PRE getMultipleCoreDocumentsWaySuggestion lag")
val coreDocumentpipelineMap = getMultipleCoreDocumentsWaySuggestion(stringCache.values, pipeline) val coreDocumentpipelineMap = getMultipleCoreDocumentsWaySuggestion(stringCache.values, pipeline)
pipeline.annotate(Annotationspipeline.values) //System.out.println("post getMultipleCoreDocumentsWaySuggestion instantiateAnnotationMap lag")
pipelineSentiment!!.annotate(AnnotationspipelineSentiment.values) pipeline.annotate(Annotationspipeline.values, 4)
runBlocking { pipelineSentiment!!.annotate(AnnotationspipelineSentiment.values, 4)
val job = launch(Dispatchers.Default) { //System.out.println("reached second job instantiateAnnotationMap lag");
for (i in Annotationspipeline.entries) { coroutineScope {
for (i in Annotationspipeline.entries) {
val job = launch(Dispatchers.Default) {
i.value.compact() i.value.compact()
pipelineAnnotationCache[i.key] = i.value pipelineAnnotationCache[i.key] = i.value
yield()
} }
yield() job.join();
} }
job.join()
} for (i in AnnotationspipelineSentiment.entries) {
runBlocking { val job = launch(Dispatchers.Default) {
val job = launch(Dispatchers.Default) {
for (i in AnnotationspipelineSentiment.entries) {
i.value.compact() i.value.compact()
pipelineSentimentAnnotationCache[i.key] = i.value pipelineSentimentAnnotationCache[i.key] = i.value
yield()
} }
yield() job.join();
} }
job.join()
} }
runBlocking { System.out.println("post Annotationspipeline lag")
val job = launch(Dispatchers.Default) { for (i in coreDocumentpipelineMap.entries) {
for (i in coreDocumentpipelineMap.entries) { coreDocumentAnnotationCache[i.key] = i.value
coreDocumentAnnotationCache[i.key] = i.value
}
yield()
}
job.join()
} }
} }
} }
private fun futuresReturnOverallEvaluation(similarityMatrixes: List<SimilarityMatrix?>): ConcurrentMap<Int?, String?> { private fun futuresReturnOverallEvaluation(similarityMatrixes: List<SimilarityMatrix?>): ConcurrentMap<Int?, String?> {
var strmapreturn = MapMaker().concurrencyLevel(6).makeMap<Int?, String?>() var strmapreturn = MapMaker().concurrencyLevel(6).makeMap<Int?, String?>()
if (!similarityMatrixes.isEmpty()) { if (!similarityMatrixes.isEmpty()) {
var iterator = 0
for (SMX in similarityMatrixes) { for (SMX in similarityMatrixes) {
val scoreRelationNewMsgToRecentMsg = SMX!!.distance strmapreturn = addSMXToMapReturn(strmapreturn, SMX)
if (scoreRelationNewMsgToRecentMsg > 0.0) {
strmapreturn = addSMXToMapReturn(strmapreturn, SMX)
}
//System.out.println("scoreRelationNewMsgToRecentMsg: " + scoreRelationNewMsgToRecentMsg + "\niterator: " + iterator);
iterator++
} }
} }
return strmapreturn return strmapreturn
@ -199,25 +195,33 @@ class Datahandler {
return strmapreturn return strmapreturn
} }
private fun checkForNullValues(index: String?): Boolean {
if (jmweAnnotationCache[index] != null && pipelineAnnotationCache[index] != null
&& pipelineSentimentAnnotationCache[index] != null &&
coreDocumentAnnotationCache[index] != null) {
return true;
}
return false;
}
private fun StrComparringNoSentenceRelationMap(strCacheLocal: ConcurrentMap<Int, String?>, strCollection: Collection<String?>, localJMWEMap: ConcurrentMap<String, Annotation>, private suspend fun StrComparringNoSentenceRelationMap(strCacheLocal: ConcurrentMap<Int, String?>, strCollection: Collection<String?>, localJMWEMap: ConcurrentMap<String, Annotation>,
localPipelineAnnotation: ConcurrentMap<String?, Annotation>, localPipelineSentimentAnnotation: ConcurrentMap<String?, Annotation>, localPipelineAnnotation: ConcurrentMap<String?, Annotation>, localPipelineSentimentAnnotation: ConcurrentMap<String?, Annotation>,
localCoreDocumentMap: ConcurrentMap<String, CoreDocument>): List<SimilarityMatrix?> { localCoreDocumentMap: ConcurrentMap<String, CoreDocument>): List<SimilarityMatrix?> {
val distance_requirement = 10500.0
//TODO here
val distance_requirement = 15500.0
val prefix_size = 150 val prefix_size = 150
val smxReturnList: ArrayList<SimilarityMatrix> = ArrayList<SimilarityMatrix>() val smxReturnList: ArrayList<SimilarityMatrix> = ArrayList<SimilarityMatrix>()
runBlocking {
val job = launch(Dispatchers.Default) { coroutineScope {
for (j in strCollection) { for (j in strCollection) {
val job = launch(Dispatchers.Default) {
for (i in strCollection) { for (i in strCollection) {
if (j != i) { if (j != i) {
val SMXInit = SimilarityMatrix(j, i) val SMXInit = SimilarityMatrix(j, i)
val sentimentCacheStr1 = sentimentCachingMap.getOrDefault(i, null) val sentimentCacheStr1 = sentimentCachingMap.getOrDefault(i, null)
val sentimentCacheStr = sentimentCachingMap.getOrDefault(j, null) val sentimentCacheStr = sentimentCachingMap.getOrDefault(j, null)
var sentimentAnalyzerTest: SentimentAnalyzerTest? = null var sentimentAnalyzerTest: SentimentAnalyzerTest? = null
if (stringCache.size < prefix_size) { val checkedVal: Boolean = checkForNullValues(i)
if (stringCache.size < prefix_size || !checkedVal) {
sentimentAnalyzerTest = SentimentAnalyzerTest(j, i, SMXInit, sentimentAnalyzerTest = SentimentAnalyzerTest(j, i, SMXInit,
localJMWEMap[j], localJMWEMap[i], localPipelineAnnotation[j], localJMWEMap[j], localJMWEMap[i], localPipelineAnnotation[j],
localPipelineAnnotation[i], localPipelineSentimentAnnotation[j], localPipelineAnnotation[i], localPipelineSentimentAnnotation[j],
@ -230,67 +234,71 @@ class Datahandler {
pipelineSentimentAnnotationCache[i], localCoreDocumentMap[j], pipelineSentimentAnnotationCache[i], localCoreDocumentMap[j],
coreDocumentAnnotationCache[i], sentimentCacheStr, sentimentCacheStr1) coreDocumentAnnotationCache[i], sentimentCacheStr, sentimentCacheStr1)
} }
val call = sentimentAnalyzerTest.call(); val call = sentimentAnalyzerTest.callSMX();
if (call != null && call.distance > distance_requirement) { if (call != null && call.distance > distance_requirement) {
smxReturnList.add(call) smxReturnList.add(call)
} }
} }
} }
yield()
} }
yield() job.join()
} }
job.join()
} }
return smxReturnList return smxReturnList
} }
private fun stringIteratorComparator(strmap: ConcurrentMap<Int?, String?>, private suspend fun stringIteratorComparator(strmap: ConcurrentMap<Int?, String?>,
strCacheLocal: ConcurrentMap<Int, String?>, localJMWEMap: ConcurrentMap<String, Annotation>, strCacheLocal: ConcurrentMap<Int, String?>, localJMWEMap: ConcurrentMap<String, Annotation>,
localPipelineAnnotation: ConcurrentMap<String?, Annotation>, localPipelineSentimentAnnotation: ConcurrentMap<String?, Annotation>, localPipelineAnnotation: ConcurrentMap<String?, Annotation>, localPipelineSentimentAnnotation: ConcurrentMap<String?, Annotation>,
localCoreDocumentMap: ConcurrentMap<String, CoreDocument>): ConcurrentMap<Int?, String?> { localCoreDocumentMap: ConcurrentMap<String, CoreDocument>): ConcurrentMap<Int?, String?> {
//System.out.println("strmap siuze: " + strmap.size()); //System.out.println("strmap siuze: " + strmap.size());
val ComparringNoSentenceRelationMap: List<SimilarityMatrix> = StrComparringNoSentenceRelationMap(strCacheLocal, strmap.values, val ComparringNoSentenceRelationMap: List<SimilarityMatrix> = StrComparringNoSentenceRelationMap(strCacheLocal, strmap.values,
localJMWEMap, localPipelineAnnotation, localPipelineSentimentAnnotation, localCoreDocumentMap) as List<SimilarityMatrix> localJMWEMap, localPipelineAnnotation, localPipelineSentimentAnnotation, localCoreDocumentMap) as List<SimilarityMatrix>
Collections.sort(ComparringNoSentenceRelationMap, Comparator<SimilarityMatrix> { e1: SimilarityMatrix, e2: SimilarityMatrix -> e1.primaryString.compareTo(e2.primaryString) }) Collections.sort(ComparringNoSentenceRelationMap, Comparator<SimilarityMatrix> { e1: SimilarityMatrix, e2: SimilarityMatrix -> e1.primaryString.compareTo(e2.primaryString) })
//System.out.println("strmapreturn size: " + strmapreturn.size()); System.out.println("ComparringNoSentenceRelationMap size: " + ComparringNoSentenceRelationMap.size);
return futuresReturnOverallEvaluation(ComparringNoSentenceRelationMap) return futuresReturnOverallEvaluation(ComparringNoSentenceRelationMap)
} }
private fun removeNonSensicalStrings(strmap: ConcurrentMap<Int?, String?>): ConcurrentMap<Int?, String?> { private suspend fun removeNonSensicalStrings(strmap: ConcurrentMap<Int?, String?>): ConcurrentMap<Int?, String?> {
val strCacheLocal = stringCache val strCacheLocal = stringCache
val localJMWEMap = getMultipleJMWEAnnotation(strmap.values) val localJMWEMap = getMultipleJMWEAnnotation(strmap.values)
val localPipelineAnnotation = getMultiplePipelineAnnotation(strmap.values) val localPipelineAnnotation = getMultiplePipelineAnnotation(strmap.values)
System.out.println("str size post getMultiplePipelineAnnotation: " + strmap.size)
val localPipelineSentimentAnnotation = getMultiplePipelineSentimentAnnotation(strmap.values) val localPipelineSentimentAnnotation = getMultiplePipelineSentimentAnnotation(strmap.values)
val localCoreDocumentMap = getMultipleCoreDocumentsWaySuggestion(strmap.values, pipeline) val localCoreDocumentMap = getMultipleCoreDocumentsWaySuggestion(strmap.values, pipeline)
System.out.println("strmap size pre stringIteratorComparator: " + strmap.size)
return stringIteratorComparator(strmap, strCacheLocal, localJMWEMap, localPipelineAnnotation, localPipelineSentimentAnnotation, localCoreDocumentMap) return stringIteratorComparator(strmap, strCacheLocal, localJMWEMap, localPipelineAnnotation, localPipelineSentimentAnnotation, localCoreDocumentMap)
} }
@Synchronized fun checkIfUpdateStrings() = runBlocking {
@Throws(CustomError::class)
fun checkIfUpdateStrings() {
if (stopwatch.elapsed(TimeUnit.SECONDS) >= EXPIRE_TIME_IN_SECONDS || !stopwatch.isRunning) { if (stopwatch.elapsed(TimeUnit.SECONDS) >= EXPIRE_TIME_IN_SECONDS || !stopwatch.isRunning) {
var str = MessageResponseHandler.getStr() var str = MessageResponseHandler.getStr()
println("str size: " + str.size) println("str size: " + str.size)
str = filterContent(str) str = filterContent(str)
System.out.println("str size post filtercontent: " + str.size)
str = removeNonSensicalStrings(str) str = removeNonSensicalStrings(str)
//System.out.println("removeNonSensicalStrings str size POST: " + str.size() + "\n"); System.out.println("removeNonSensicalStrings str size POST: " + str.size + "\n");
str = annotationCacheUpdate(str) str = annotationCacheUpdate(str)
println(""" println("""
annotationCacheUpdate str size POST: ${str.size} annotationCacheUpdate str size POST: ${str.size}
""".trimIndent()) """.trimIndent())
val strf = str val strf = str
if (!stringCache.isEmpty()) { if (!stringCache.isEmpty()) {
Thread(Runnable { coroutineScope {
try { val job = launch(Dispatchers.IO) {
DataMapper.InsertMYSQLStrings(strf) try {
} catch (ex: CustomError) { DataMapper.InsertMYSQLStrings(strf)
Logger.getLogger(Datahandler::class.java } catch (ex: CustomError) {
.name).log(Level.SEVERE, null, ex) Logger.getLogger(Datahandler::class.java
.name).log(Level.SEVERE, null, ex)
}
MessageResponseHandler.setStr(MapMaker().concurrencyLevel(6).makeMap())
yield()
} }
MessageResponseHandler.setStr(MapMaker().concurrencyLevel(6).makeMap()) job.join()
}).start() }
} else { } else {
try { try {
DataMapper.InsertMYSQLStrings(strf) DataMapper.InsertMYSQLStrings(strf)
@ -317,63 +325,59 @@ class Datahandler {
return str return str
} }
private fun getResponseFutures(strF: String): String { private suspend fun getResponseFutures(strF: String): String {
val values_copy: List<String?> = ArrayList(stringCache.values) val values_copy: List<String?> = ArrayList(stringCache.values)
Collections.shuffle(values_copy) Collections.sort<String>(values_copy) { o1, o2 -> o2.length - o1.length }
var preRelationUserCounters = -155000.0 var preRelationUserCounters = -155000.0
val concurrentRelations: MutableList<String?> = arrayListOf() val concurrentRelations: MutableList<String?> = arrayListOf()
runBlocking { val SB = StringBuilder()
val job = launch(Dispatchers.Default) { coroutineScope {
for (str1 in values_copy) { for (str1 in values_copy) {
if (strF != str1) { if (strF != str1) {
val sentimentCacheStr1 = sentimentCachingMap.getOrDefault(str1, null) val job = launch(Dispatchers.Default) {
val worker: Callable<SimilarityMatrix> = SentimentAnalyzerTest(strF, str1, SimilarityMatrix(strF, str1), var sentimentCacheStr1 = sentimentCachingMap.getOrDefault(str1, null)
var sentimentAnalyzerTest = SentimentAnalyzerTest(strF, str1, SimilarityMatrix(strF, str1),
strAnnoJMWE, jmweAnnotationCache[str1], strAnno, strAnnoJMWE, jmweAnnotationCache[str1], strAnno,
pipelineAnnotationCache[str1], strAnnoSentiment, pipelineAnnotationCache[str1], strAnnoSentiment,
pipelineSentimentAnnotationCache[str1], coreDoc, coreDocumentAnnotationCache[str1], pipelineSentimentAnnotationCache[str1], coreDoc, coreDocumentAnnotationCache[str1],
null, sentimentCacheStr1) null, sentimentCacheStr1)
try {
val getSMX = worker.call() var getSMX: SimilarityMatrix = sentimentAnalyzerTest.callSMX()
if (getSMX != null) { if (getSMX != null) {
val scoreRelationLastUserMsg = getSMX.distance val scoreRelationLastUserMsg = getSMX.distance
if (scoreRelationLastUserMsg > preRelationUserCounters) { if (scoreRelationLastUserMsg > preRelationUserCounters) {
preRelationUserCounters = scoreRelationLastUserMsg preRelationUserCounters = scoreRelationLastUserMsg
concurrentRelations.add(getSMX.secondaryString) concurrentRelations.add(getSMX.secondaryString)
}
} }
} catch (ex: Exception) { }
Logger.getLogger(Datahandler::class.java.name).log(Level.SEVERE, null, ex) yield()
}
job.join()
}
}
val randomLenghtPermit = strF.length * (Math.random() * Math.random() * Math.random() * (Math.random() * 10))
Collections.reverse(concurrentRelations)
if (!concurrentRelations.isEmpty()) {
val firstRelation = concurrentRelations[0]
val job1 = launch(Dispatchers.Default) {
for (secondaryRelation in concurrentRelations) {
if (SB.toString().length > randomLenghtPermit && !SB.toString().isEmpty()) {
break
}
val append = appendToString(firstRelation, secondaryRelation)
if (append) {
SB.append(secondaryRelation).append(" ")
} }
} }
yield()
} }
yield() job1.join()
} }
job.join()
} }
if (SB.toString().isEmpty()) {
val SB = StringBuilder()
val randomLenghtPermit = strF.length * (Math.random() * Math.random() * Math.random() * 5)
Collections.reverse(concurrentRelations)
if (concurrentRelations.isEmpty()) {
return "failure, preventing stuckness" return "failure, preventing stuckness"
} }
val firstRelation = concurrentRelations[0]
runBlocking {
val job = launch(Dispatchers.Default) {
for (secondaryRelation in concurrentRelations) {
if (SB.toString().length > randomLenghtPermit && !SB.toString().isEmpty()) {
break
}
val append = appendToString(firstRelation, secondaryRelation)
if (append) {
SB.append(secondaryRelation).append(" ")
}
}
yield()
}
job.join()
}
return SB.toString() return SB.toString()
} }
@ -387,57 +391,73 @@ class Datahandler {
} else false } else false
} }
@Throws(CustomError::class)
fun getResponseMsg(str: String): String { fun getResponseMsg(str: String): String {
val strF = trimString(str) val responseFutures: String
getSingularAnnotation(strF) runBlocking {
return getResponseFutures(strF) val strF = trimString(str)
getSingularAnnotation(strF)
responseFutures = getResponseFutures(strF)
}
return responseFutures
} }
fun getSingularAnnotation(str: String?) { suspend fun getSingularAnnotation(str: String?) {
strAnno = Annotation(str) coroutineScope {
strAnno!!.compact() val job = launch(Dispatchers.Default) {
pipeline.annotate(strAnno) strAnno = Annotation(str)
strAnnoSentiment = Annotation(str) strAnno!!.compact()
strAnnoSentiment!!.compact() pipeline.annotate(strAnno)
pipelineSentiment!!.annotate(strAnnoSentiment) yield()
val notactualList: MutableList<String?> = arrayListOf() }
notactualList.add(str) job.join()
val jmweAnnotation = PipelineJMWESingleton.INSTANCE.getJMWEAnnotation(notactualList)
strAnnoJMWE = jmweAnnotation.values.iterator().next() val job1 = launch(Dispatchers.Default) {
strAnnoJMWE.compact() strAnnoSentiment = Annotation(str)
val coreDocument = CoreDocument(str) strAnnoSentiment!!.compact()
pipeline.annotate(coreDocument) pipelineSentiment!!.annotate(strAnnoSentiment)
coreDoc = coreDocument val notactualList: MutableList<String?> = arrayListOf()
notactualList.add(str)
val jmweAnnotation = PipelineJMWESingleton.INSTANCE.getJMWEAnnotation(notactualList)
strAnnoJMWE = jmweAnnotation.values.iterator().next()
strAnnoJMWE.compact()
yield()
}
job1.join()
val job3 = launch(Dispatchers.Default) {
val coreDocument = CoreDocument(str)
pipeline.annotate(coreDocument)
coreDoc = coreDocument
yield()
}
job3.join()
}
} }
private fun getScoreRelationStrF(str: String?, mostRecentMsg: String?): Double { private fun getScoreRelationStrF(str: String?, mostRecentMsg: String?): Double {
val SMX = SimilarityMatrix(str, mostRecentMsg) val SMX = SimilarityMatrix(str, mostRecentMsg)
val cacheSentiment1 = sentimentCachingMap.getOrDefault(str, null) val cacheSentiment1 = sentimentCachingMap.getOrDefault(str, null)
val cacheSentiment2 = sentimentCachingMap.getOrDefault(mostRecentMsg, null) val cacheSentiment2 = sentimentCachingMap.getOrDefault(mostRecentMsg, null)
val worker: Callable<SimilarityMatrix> = SentimentAnalyzerTest(str, mostRecentMsg, SMX, val sentimentAnalyzerTest = SentimentAnalyzerTest(str, mostRecentMsg, SMX,
strAnnoJMWE, jmweAnnotationCache[mostRecentMsg], strAnno, strAnnoJMWE, jmweAnnotationCache[mostRecentMsg], strAnno,
pipelineAnnotationCache[mostRecentMsg], strAnnoSentiment, pipelineAnnotationCache[mostRecentMsg], strAnnoSentiment,
pipelineSentimentAnnotationCache[mostRecentMsg], coreDoc, coreDocumentAnnotationCache[mostRecentMsg], cacheSentiment1, cacheSentiment2) pipelineSentimentAnnotationCache[mostRecentMsg], coreDoc,
var callSMX: SimilarityMatrix? = null coreDocumentAnnotationCache[mostRecentMsg],
try { cacheSentiment1, cacheSentiment2)
callSMX = worker.call() val callSMX = sentimentAnalyzerTest.callSMX()
} catch (ex: Exception) { return callSMX.distance ?: 0.0
Logger.getLogger(Datahandler::class.java
.name).log(Level.SEVERE, null, ex)
}
return callSMX?.distance ?: 0.0
} }
private fun annotationCacheUpdate(strmap: ConcurrentMap<Int?, String?>): ConcurrentMap<Int?, String?> { suspend private fun annotationCacheUpdate(strmap: ConcurrentMap<Int?, String?>): ConcurrentMap<Int?, String?> {
val jmweAnnotation = PipelineJMWESingleton.INSTANCE.getJMWEAnnotation(strmap.values) val jmweAnnotation = PipelineJMWESingleton.INSTANCE.getJMWEAnnotation(strmap.values)
for ((key, value) in jmweAnnotation) { for ((key, value) in jmweAnnotation) {
jmweAnnotationCache[key] = value jmweAnnotationCache[key] = value
} }
val Annotationspipeline = MapMaker().concurrencyLevel(6).makeMap<String?, Annotation>() val Annotationspipeline = MapMaker().concurrencyLevel(5).makeMap<String?, Annotation>()
val AnnotationspipelineSentiment = MapMaker().concurrencyLevel(6).makeMap<String?, Annotation>() val AnnotationspipelineSentiment = MapMaker().concurrencyLevel(5).makeMap<String?, Annotation>()
val coreDocumentpipelineMap = getMultipleCoreDocumentsWaySuggestion(strmap.values, pipeline) val coreDocumentpipelineMap = getMultipleCoreDocumentsWaySuggestion(strmap.values, pipeline)
runBlocking {
coroutineScope {
val job = launch(Dispatchers.Default) { val job = launch(Dispatchers.Default) {
for (str in strmap.values) { for (str in strmap.values) {
val strAnno1 = Annotation(str) val strAnno1 = Annotation(str)
@ -448,11 +468,11 @@ class Datahandler {
} }
yield() yield()
} }
pipeline.annotate(Annotationspipeline.values, 5)
pipelineSentiment!!.annotate(AnnotationspipelineSentiment.values, 5)
job.join() job.join()
} }
pipeline.annotate(Annotationspipeline.values) coroutineScope {
pipelineSentiment!!.annotate(AnnotationspipelineSentiment.values)
runBlocking {
val job = launch(Dispatchers.Default) { val job = launch(Dispatchers.Default) {
for (pipelineEntry in Annotationspipeline.entries) { for (pipelineEntry in Annotationspipeline.entries) {
if (pipelineEntry != null) { if (pipelineEntry != null) {
@ -463,18 +483,7 @@ class Datahandler {
} }
job.join() job.join()
} }
runBlocking { coroutineScope {
val job = launch(Dispatchers.Default) {
for (pipelineEntry in AnnotationspipelineSentiment.entries) {
if (pipelineEntry != null) {
pipelineSentimentAnnotationCache[pipelineEntry.key] = pipelineEntry.value
}
}
yield()
}
job.join()
}
runBlocking {
val job = launch(Dispatchers.Default) { val job = launch(Dispatchers.Default) {
for (coreDocumentEntry in coreDocumentpipelineMap.entries) { for (coreDocumentEntry in coreDocumentpipelineMap.entries) {
coreDocumentAnnotationCache[coreDocumentEntry.key] = coreDocumentEntry.value coreDocumentAnnotationCache[coreDocumentEntry.key] = coreDocumentEntry.value
@ -483,50 +492,20 @@ class Datahandler {
} }
job.join() job.join()
} }
coroutineScope {
val job1 = launch(Dispatchers.Default) {
for (pipelineEntry in AnnotationspipelineSentiment.entries) {
if (pipelineEntry != null) {
pipelineSentimentAnnotationCache[pipelineEntry.key] = pipelineEntry.value
}
}
yield()
}
job1.join()
}
return strmap return strmap
} }
val messageOverHead: Int
get() = stringCache.values.size - stringCache.values.size / 10
fun update_autismo_socket_msg() {
try {
try {
DatagramSocket(48480).use { serverSocket ->
DatagramSocket(48471).use { serverSocket1 ->
val receiveData = ByteArray(4096)
val IPAddress = InetAddress.getByName("135.125.188.157") //later moving autism bot 1 and 3 to OVH from SYS
var receivePacket = DatagramPacket(receiveData, receiveData.size)
while (true) {
serverSocket.receive(receivePacket)
var sentence = String(receivePacket.data, 0, receivePacket.length)
sentence = sentence.replace("clientmessage:", "")
var getResponseMsg = getResponseMsg(sentence)
var sendData = getResponseMsg.toByteArray(charset("UTF-8"))
var sendPacket = DatagramPacket(sendData, sendData.size, IPAddress, 48479)
serverSocket.send(sendPacket)
receivePacket = DatagramPacket(receiveData, receiveData.size)
serverSocket1.receive(receivePacket)
sentence = String(receivePacket.data, 0, receivePacket.length)
sentence = sentence.replace("clientmessage:", "")
getResponseMsg = getResponseMsg(sentence)
sendData = getResponseMsg.toByteArray(charset("UTF-8"))
sendPacket = DatagramPacket(sendData, sendData.size, IPAddress, 48476)
serverSocket1.send(sendPacket)
}
}
}
} catch (ex: CustomError) {
Logger.getLogger(Datahandler::class.java.name).log(Level.SEVERE, null, ex)
}
} catch (ex: SocketException) {
Logger.getLogger(Datahandler::class.java.name).log(Level.SEVERE, null, ex)
} catch (ex: UnsupportedEncodingException) {
Logger.getLogger(Datahandler::class.java.name).log(Level.SEVERE, null, ex)
} catch (ex: IOException) {
Logger.getLogger(Datahandler::class.java.name).log(Level.SEVERE, null, ex)
}
}
private class AnnotationCollector<T> : Consumer<T> { private class AnnotationCollector<T> : Consumer<T> {
val annotationsT: MutableList<T?> = arrayListOf() val annotationsT: MutableList<T?> = arrayListOf()
@ -604,160 +583,56 @@ class Datahandler {
props.setProperty("ner.combinationMode", "HIGH_RECALL") props.setProperty("ner.combinationMode", "HIGH_RECALL")
props.setProperty("regexner.ignorecase", "true") props.setProperty("regexner.ignorecase", "true")
props.setProperty("ner.fine.regexner.ignorecase", "true") props.setProperty("ner.fine.regexner.ignorecase", "true")
props.setProperty("tokenize.options", "untokenizable=firstDelete") props.setProperty("tokenize.options", "untokenizable=firstKeep")
return StanfordCoreNLP(props) return StanfordCoreNLP(props)
} }
@JvmStatic @JvmStatic
fun getClassifier(): AbstractSequenceClassifier<CoreLabel>? { fun getClassifier(): AbstractSequenceClassifier<CoreLabel> {
return classifier return classifier
} }
fun setClassifier(classifier: AbstractSequenceClassifier<CoreLabel>?) {
if (classifier != null) {
Companion.classifier = classifier
}
}
private fun getMultipleJMWEAnnotation(str: Collection<String?>): ConcurrentMap<String, Annotation> { private fun getMultipleJMWEAnnotation(str: Collection<String?>): ConcurrentMap<String, Annotation> {
return PipelineJMWESingleton.INSTANCE.getJMWEAnnotation(str) return PipelineJMWESingleton.INSTANCE.getJMWEAnnotation(str)
} }
private fun getMultiplePipelineAnnotation(str: Collection<String?>): ConcurrentMap<String?, Annotation> { private fun getMultiplePipelineAnnotation(str: Collection<String?>): ConcurrentMap<String?, Annotation> {
val pipelineAnnotationMap = MapMaker().concurrencyLevel(2).makeMap<String?, Annotation>() val pipelineAnnotationMap = MapMaker().concurrencyLevel(5).makeMap<String?, Annotation>()
for (str1 in str) { for (str1 in str) {
val strAnno1 = Annotation(str1) val strAnno1 = Annotation(str1)
pipelineAnnotationMap[str1] = strAnno1 pipelineAnnotationMap[str1] = strAnno1
} }
pipeline.annotate(pipelineAnnotationMap.values) pipeline.annotate(pipelineAnnotationMap.values, 5)
return pipelineAnnotationMap return pipelineAnnotationMap
} }
private fun getMultiplePipelineSentimentAnnotation(str: Collection<String?>): ConcurrentMap<String?, Annotation> { private fun getMultiplePipelineSentimentAnnotation(str: Collection<String?>): ConcurrentMap<String?, Annotation> {
val pipelineAnnotationMap = MapMaker().concurrencyLevel(2).makeMap<String?, Annotation>() val pipelineAnnotationMap = MapMaker().concurrencyLevel(5).makeMap<String?, Annotation>()
for (str1 in str) { for (str1 in str) {
val strAnno1 = Annotation(str1) val strAnno1 = Annotation(str1)
pipelineAnnotationMap[str1] = strAnno1 pipelineAnnotationMap[str1] = strAnno1
} }
pipelineSentiment!!.annotate(pipelineAnnotationMap.values) pipelineSentiment?.annotate(pipelineAnnotationMap.values, 5)
return pipelineAnnotationMap return pipelineAnnotationMap
} }
fun filterContent(str: ConcurrentMap<Int?, String?>): ConcurrentMap<Int?, String?> { fun filterContent(str: ConcurrentMap<Int?, String?>): ConcurrentMap<Int?, String?> {
val strlistreturn = MapMaker().concurrencyLevel(2).makeMap<Int?, String?>() val strlistreturn = MapMaker().concurrencyLevel(5).makeMap<Int?, String?>()
str.values.forEach(Consumer { for (str1: String? in str.values) {
var str1: String = "" if (!str1?.isEmpty()!! && str1.length > 3) {
if (!str1.isEmpty() && str1.length > 3) { var str1Local: String = str1.trim();
str1 = str1.trim { it <= ' ' } if (str1Local.length > 2 && !str1Local.startsWith("!")) {
if (str1.contains("PM*")) { strlistreturn[strlistreturn.size] = str1Local
str1 = str1.substring(str1.indexOf("PM*") + 3)
}
if (str1.contains("AM*")) {
str1 = str1.substring(str1.indexOf("AM*") + 3)
}
/*
if (str1.contains("?") || str1.contains("°"))
{
if (!str1.contains("http"))
{
str1 = str1.replace("?", " <:wlenny:514861023002624001> ");
str1 = str1.replace("°", " <:wlenny:514861023002624001> ");
} }
} }
*/if (str1.contains("(Counter-Terrorist)")) { }
str1 = str1.replace("(Counter-Terrorist)", " ")
}
if (str1.contains("(Terrorist)")) {
str1 = str1.replace("(Terrorist)", " ")
}
if (str1.contains("(Spectator)")) {
str1 = str1.replace("(Spectator)", " ")
}
if (str1.contains("*DEAD*")) {
str1 = str1.replace("*DEAD*", " ")
}
if (str1.contains("{red}")) {
str1 = str1.replace("{red}", " ")
}
if (str1.contains("{orange}")) {
str1 = str1.replace("{orange}", " ")
}
if (str1.contains("{yellow}")) {
str1 = str1.replace("{yellow}", " ")
}
if (str1.contains("{green}")) {
str1 = str1.replace("{green}", " ")
}
if (str1.contains("{lightblue}")) {
str1 = str1.replace("{lightblue}", " ")
}
if (str1.contains("{blue}")) {
str1 = str1.replace("{blue}", " ")
}
if (str1.contains("{purple}")) {
str1 = str1.replace("{purple}", " ")
}
if (str1.contains("{white}")) {
str1 = str1.replace("{white}", " ")
}
if (str1.contains("{fullblue}")) {
str1 = str1.replace("{fullblue}", " ")
}
if (str1.contains("{cyan}")) {
str1 = str1.replace("{cyan}", " ")
}
if (str1.contains("{lime}")) {
str1 = str1.replace("{lime}", " ")
}
if (str1.contains("{deeppink}")) {
str1 = str1.replace("{deeppink}", " ")
}
if (str1.contains("{slategray}")) {
str1 = str1.replace("{slategray}", " ")
}
if (str1.contains("{dodgerblue}")) {
str1 = str1.replace("{dodgerblue}", " ")
}
if (str1.contains("{black}")) {
str1 = str1.replace("{black}", " ")
}
if (str1.contains("{orangered}")) {
str1 = str1.replace("{orangered}", " ")
}
if (str1.contains("{darkorchid}")) {
str1 = str1.replace("{darkorchid}", " ")
}
if (str1.contains("{pink}")) {
str1 = str1.replace("{pink}", " ")
}
if (str1.contains("{lightyellow}")) {
str1 = str1.replace("{lightyellow}", " ")
}
if (str1.contains("{chocolate}")) {
str1 = str1.replace("{chocolate}", " ")
}
if (str1.contains("{beige}")) {
str1 = str1.replace("{beige}", " ")
}
if (str1.contains("{azure}")) {
str1 = str1.replace("{azure}", " ")
}
if (str1.contains("{yellowgreen}")) {
str1 = str1.replace("{yellowgreen}", " ")
}
str1 = str1.trim { it <= ' ' }
if (str1.length > 2 && !str1.startsWith("!")) {
strlistreturn[strlistreturn.size] = str1
}
}
})
return strlistreturn return strlistreturn
} }
fun getMultipleCoreDocumentsWaySuggestion(str: Collection<String?>, localNLP: StanfordCoreNLP): ConcurrentMap<String, CoreDocument> { suspend fun getMultipleCoreDocumentsWaySuggestion(str: Collection<String?>, localNLP: StanfordCoreNLP): ConcurrentMap<String, CoreDocument> {
val annCollector: AnnotationCollector<Annotation?> = AnnotationCollector<Annotation?>() val annCollector: AnnotationCollector<Annotation?> = AnnotationCollector<Annotation?>()
val annotationreturnMap = MapMaker().concurrencyLevel(6).makeMap<String, CoreDocument>() val annotationreturnMap = MapMaker().concurrencyLevel(6).makeMap<String, CoreDocument>()
runBlocking { coroutineScope {
val job = launch(Dispatchers.Default) { val job = launch(Dispatchers.Default) {
for (exampleString in str) { for (exampleString in str) {
localNLP.annotate(Annotation(exampleString), annCollector) localNLP.annotate(Annotation(exampleString), annCollector)
@ -772,7 +647,7 @@ class Datahandler {
} catch (ex: InterruptedException) { } catch (ex: InterruptedException) {
Logger.getLogger(Datahandler::class.java.name).log(Level.SEVERE, null, ex) Logger.getLogger(Datahandler::class.java.name).log(Level.SEVERE, null, ex)
} }
runBlocking { coroutineScope {
val job1 = launch(Dispatchers.Default) { val job1 = launch(Dispatchers.Default) {
for (ann in annCollector.annotationsT) { for (ann in annCollector.annotationsT) {
if (ann != null) { if (ann != null) {
@ -796,9 +671,9 @@ class Datahandler {
init { init {
stopwatch = Stopwatch.createUnstarted() stopwatch = Stopwatch.createUnstarted()
jmweAnnotationCache = MapMaker().concurrencyLevel(4).makeMap<String, Annotation>() jmweAnnotationCache = MapMaker().concurrencyLevel(5).makeMap<String, Annotation>()
pipelineAnnotationCache = MapMaker().concurrencyLevel(4).makeMap<String, Annotation>() pipelineAnnotationCache = MapMaker().concurrencyLevel(5).makeMap<String, Annotation>()
pipelineSentimentAnnotationCache = MapMaker().concurrencyLevel(4).makeMap<String, Annotation>() pipelineSentimentAnnotationCache = MapMaker().concurrencyLevel(5).makeMap<String, Annotation>()
coreDocumentAnnotationCache = MapMaker().concurrencyLevel(5).makeMap<String, CoreDocument>() coreDocumentAnnotationCache = MapMaker().concurrencyLevel(5).makeMap<String, CoreDocument>()
} }
} }

View File

@ -9,17 +9,18 @@ import PresentationLayer.DiscordHandler;
import discord4j.core.event.domain.message.MessageCreateEvent; import discord4j.core.event.domain.message.MessageCreateEvent;
import discord4j.core.object.entity.User; import discord4j.core.object.entity.User;
import discord4j.core.object.entity.channel.TextChannel; import discord4j.core.object.entity.channel.TextChannel;
import java.math.BigInteger; import java.math.BigInteger;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.List; import java.util.List;
import java.util.logging.Level; import java.util.logging.Level;
import java.util.logging.Logger; import java.util.logging.Logger;
import reactor.core.publisher.Flux; import reactor.core.publisher.Flux;
import reactor.core.publisher.Mono; import reactor.core.publisher.Mono;
/** /**
*
* @author install1 * @author install1
*/ */
public class DoStuff { public class DoStuff {
@ -59,8 +60,7 @@ public class DoStuff {
List<User> blockLast = event.getMessage().getUserMentions().buffer().blockLast(); List<User> blockLast = event.getMessage().getUserMentions().buffer().blockLast();
String content = event.getMessage().getContent(); String content = event.getMessage().getContent();
if (!channelpermissionsDenied) { if (!channelpermissionsDenied) {
if (blockLast != null) if (blockLast != null) {
{
for (User user : blockLast) { for (User user : blockLast) {
content = content.replace(user.getId().asString(), ""); content = content.replace(user.getId().asString(), "");
} }
@ -68,35 +68,24 @@ public class DoStuff {
MessageResponseHandler.getMessage(content); MessageResponseHandler.getMessage(content);
} }
boolean mentionedBot = false; boolean mentionedBot = false;
if (blockLast != null){ if (blockLast != null) {
for (User user : blockLast) for (User user : blockLast) {
{ if (user.getUsername().equals(usernameBot)) {
if (user.getUsername().equals(usernameBot))
{
mentionedBot = true; mentionedBot = true;
break; break;
} }
} }
} }
if (mentionedBot || channelName.contains("general-autism")) { if (mentionedBot || channelName.contains("general-autism")) {
try { String ResponseStr;
String ResponseStr; ResponseStr = MessageResponseHandler.selectReponseMessage(content, username);
ResponseStr = MessageResponseHandler.selectReponseMessage(content, username); if (!ResponseStr.isEmpty()) {
if (!ResponseStr.isEmpty()) { System.out.print("\nResponseStr3: " + ResponseStr + "\n");
System.out.print("\nResponseStr3: " + ResponseStr + "\n"); event.getMessage().getChannel().block().createMessage(ResponseStr).block();
event.getMessage().getChannel().block().createMessage(ResponseStr).block();
}
} catch (CustomError ex) {
Logger.getLogger(DoStuff.class.getName()).log(Level.SEVERE, null, ex);
} }
} }
new Thread(() -> { new Thread(() -> {
try { Datahandler.instance.checkIfUpdateStrings();
Datahandler.instance.checkIfUpdateStrings();
} catch (CustomError ex) {
Logger.getLogger(DiscordHandler.class.getName()).log(Level.SEVERE, null, ex);
}
}).start(); }).start();
occupied = false; occupied = false;
} }

View File

@ -8,21 +8,30 @@ package FunctionLayer;
import com.google.common.collect.MapMaker; import com.google.common.collect.MapMaker;
import edu.stanford.nlp.pipeline.CoreDocument; import edu.stanford.nlp.pipeline.CoreDocument;
import edu.stanford.nlp.pipeline.CoreEntityMention; import edu.stanford.nlp.pipeline.CoreEntityMention;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.List; import java.util.List;
import java.util.concurrent.ConcurrentMap; import java.util.concurrent.ConcurrentMap;
import java.util.regex.Matcher; import java.util.regex.Matcher;
import java.util.regex.Pattern; import java.util.regex.Pattern;
/** /**
*
* @author install1 * @author install1
*/ */
public class MessageResponseHandler { public class MessageResponseHandler {
private static ConcurrentMap<Integer, String> str = new MapMaker().concurrencyLevel(2).makeMap(); private static ConcurrentMap<Integer, String> str = new MapMaker().concurrencyLevel(6).makeMap();
public static ConcurrentMap<Integer, String> getStr() { public static ConcurrentMap<Integer, String> getStr() {
ArrayList<String> arrayList = new ArrayList(str.values());
Collections.sort(arrayList, (o1, o2) -> o2.length() - o1.length());
int iterator = 0;
for (String str1 : arrayList) {
str.put(iterator, str1);
iterator++;
}
return str; return str;
} }
@ -43,7 +52,7 @@ public class MessageResponseHandler {
} }
} }
public static String selectReponseMessage(String toString, String personName) throws CustomError { public static String selectReponseMessage(String toString, String personName) {
String getResponseMsg = Datahandler.instance.getResponseMsg(toString); String getResponseMsg = Datahandler.instance.getResponseMsg(toString);
getResponseMsg = checkPersonPresentInSentence(personName, getResponseMsg, toString); getResponseMsg = checkPersonPresentInSentence(personName, getResponseMsg, toString);
return getResponseMsg; return getResponseMsg;
@ -87,9 +96,4 @@ public class MessageResponseHandler {
} }
return responseMsg; return responseMsg;
} }
public static int getOverHead() {
int getResponseMsgOverHead = Datahandler.instance.getMessageOverHead();
return getResponseMsgOverHead;
}
} }

View File

@ -24,6 +24,7 @@ import edu.stanford.nlp.ling.JMWEAnnotation;
import edu.stanford.nlp.pipeline.Annotation; import edu.stanford.nlp.pipeline.Annotation;
import edu.stanford.nlp.pipeline.StanfordCoreNLP; import edu.stanford.nlp.pipeline.StanfordCoreNLP;
import edu.stanford.nlp.util.CoreMap; import edu.stanford.nlp.util.CoreMap;
import java.io.File; import java.io.File;
import java.io.IOException; import java.io.IOException;
import java.util.ArrayList; import java.util.ArrayList;
@ -33,7 +34,6 @@ import java.util.Properties;
import java.util.concurrent.ConcurrentMap; import java.util.concurrent.ConcurrentMap;
/** /**
*
* @author install1 * @author install1
*/ */
//maybe not public? //maybe not public?
@ -65,12 +65,13 @@ public class PipelineJMWESingleton {
throw new RuntimeException("unable to open IMWEIndex index: " + e + "\n"); throw new RuntimeException("unable to open IMWEIndex index: " + e + "\n");
} }
IMWEDetector detector = getDetector(index, detectorName); IMWEDetector detector = getDetector(index, detectorName);
ConcurrentMap<String, Annotation> returnAnnotations = new MapMaker().concurrencyLevel(2).makeMap(); ConcurrentMap<String, Annotation> returnAnnotations = new MapMaker().concurrencyLevel(5).makeMap();
strvalues.forEach(str -> { strvalues.forEach(str -> {
Annotation annoStr = new Annotation(str); Annotation annoStr = new Annotation(str);
returnAnnotations.put(str, annoStr); returnAnnotations.put(str, annoStr);
}); });
localNLP.annotate(returnAnnotations.values()); localNLP.annotate(returnAnnotations.values(), 4);
returnAnnotations.values().parallelStream().forEach(annoStr -> { returnAnnotations.values().parallelStream().forEach(annoStr -> {
for (CoreMap sentence : annoStr.get(CoreAnnotations.SentencesAnnotation.class)) { for (CoreMap sentence : annoStr.get(CoreAnnotations.SentencesAnnotation.class)) {
List<IMWE<IToken>> mwes = getjMWEInSentence(sentence, index, detector, verbose); List<IMWE<IToken>> mwes = getjMWEInSentence(sentence, index, detector, verbose);
@ -85,8 +86,8 @@ public class PipelineJMWESingleton {
Properties propsJMWE; Properties propsJMWE;
propsJMWE = new Properties(); propsJMWE = new Properties();
propsJMWE.setProperty("annotators", "tokenize,ssplit,pos,lemma"); propsJMWE.setProperty("annotators", "tokenize,ssplit,pos,lemma");
propsJMWE.setProperty("tokenize.options", "untokenizable=firstDelete"); propsJMWE.setProperty("tokenize.options", "untokenizable=firstKeep");
propsJMWE.setProperty("threads", "25"); propsJMWE.setProperty("threads", "5");
propsJMWE.setProperty("pos.maxlen", "90"); propsJMWE.setProperty("pos.maxlen", "90");
propsJMWE.setProperty("tokenize.maxlen", "90"); propsJMWE.setProperty("tokenize.maxlen", "90");
propsJMWE.setProperty("ssplit.maxlen", "90"); propsJMWE.setProperty("ssplit.maxlen", "90");
@ -124,7 +125,7 @@ public class PipelineJMWESingleton {
} }
public List<IMWE<IToken>> getjMWEInSentence(CoreMap sentence, IMWEIndex index, IMWEDetector detector, public List<IMWE<IToken>> getjMWEInSentence(CoreMap sentence, IMWEIndex index, IMWEDetector detector,
boolean verbose) { boolean verbose) {
List<IToken> tokens = getITokens(sentence.get(CoreAnnotations.TokensAnnotation.class)); List<IToken> tokens = getITokens(sentence.get(CoreAnnotations.TokensAnnotation.class));
List<IMWE<IToken>> mwes = detector.detect(tokens); List<IMWE<IToken>> mwes = detector.detect(tokens);
if (verbose) { if (verbose) {

View File

@ -37,6 +37,7 @@ import edu.stanford.nlp.trees.TypedDependency;
import edu.stanford.nlp.trees.tregex.gui.Tdiff; import edu.stanford.nlp.trees.tregex.gui.Tdiff;
import edu.stanford.nlp.util.CoreMap; import edu.stanford.nlp.util.CoreMap;
import edu.stanford.nlp.util.Pair; import edu.stanford.nlp.util.Pair;
import java.io.StringReader; import java.io.StringReader;
import java.util.AbstractMap; import java.util.AbstractMap;
import java.util.ArrayList; import java.util.ArrayList;
@ -49,6 +50,7 @@ import java.util.Set;
import java.util.concurrent.Callable; import java.util.concurrent.Callable;
import java.util.concurrent.ConcurrentMap; import java.util.concurrent.ConcurrentMap;
import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.atomic.AtomicInteger;
import org.apache.lucene.analysis.core.StopAnalyzer; import org.apache.lucene.analysis.core.StopAnalyzer;
import org.ejml.simple.SimpleMatrix; import org.ejml.simple.SimpleMatrix;
@ -57,11 +59,11 @@ import org.ejml.simple.SimpleMatrix;
* To change this template file, choose Tools | Templates * To change this template file, choose Tools | Templates
* and open the template in the editor. * and open the template in the editor.
*/ */
/** /**
*
* @author install1 * @author install1
*/ */
public class SentimentAnalyzerTest implements Callable<SimilarityMatrix> { public class SentimentAnalyzerTest {
private final SimilarityMatrix smxParam; private final SimilarityMatrix smxParam;
private final String str; private final String str;
@ -80,17 +82,9 @@ public class SentimentAnalyzerTest implements Callable<SimilarityMatrix> {
private SentimentValueCache cacheSentiment1; private SentimentValueCache cacheSentiment1;
private SentimentValueCache cacheSentiment2; private SentimentValueCache cacheSentiment2;
public final SentimentValueCache getCacheSentiment1() {
return cacheSentiment1;
}
public final SentimentValueCache getCacheSentiment2() {
return cacheSentiment2;
}
public SentimentAnalyzerTest(String str, String str1, SimilarityMatrix smxParam, Annotation str1Annotation, Annotation str2Annotation, public SentimentAnalyzerTest(String str, String str1, SimilarityMatrix smxParam, Annotation str1Annotation, Annotation str2Annotation,
Annotation strPipeline1, Annotation strPipeline2, Annotation strPipeSentiment1, Annotation strPipeSentiment2, Annotation strPipeline1, Annotation strPipeline2, Annotation strPipeSentiment1, Annotation strPipeSentiment2,
CoreDocument pipelineCoreDcoument1, CoreDocument pipelineCoreDcoument2, SentimentValueCache cacheValue1, SentimentValueCache cacheValue2) { CoreDocument pipelineCoreDcoument1, CoreDocument pipelineCoreDcoument2, SentimentValueCache cacheValue1, SentimentValueCache cacheValue2) {
this.str = str; this.str = str;
this.str1 = str1; this.str1 = str1;
this.smxParam = smxParam; this.smxParam = smxParam;
@ -112,7 +106,7 @@ public class SentimentAnalyzerTest implements Callable<SimilarityMatrix> {
private List<List<TaggedWord>> getTaggedWordList(String message) { private List<List<TaggedWord>> getTaggedWordList(String message) {
List<List<TaggedWord>> taggedwordlist = new ArrayList(); List<List<TaggedWord>> taggedwordlist = new ArrayList();
DocumentPreprocessor tokenizer = new DocumentPreprocessor(new StringReader(message)); DocumentPreprocessor tokenizer = new DocumentPreprocessor(new StringReader(message));
TokenizerFactory<CoreLabel> ptbTokenizerFactory = PTBTokenizer.factory(new CoreLabelTokenFactory(), "untokenizable=firstDelete"); //noneDelete TokenizerFactory<CoreLabel> ptbTokenizerFactory = PTBTokenizer.factory(new CoreLabelTokenFactory(), "untokenizable=noneDelete"); //noneDelete //firstDelete
tokenizer.setTokenizerFactory(ptbTokenizerFactory); tokenizer.setTokenizerFactory(ptbTokenizerFactory);
for (final List<HasWord> sentence : tokenizer) { for (final List<HasWord> sentence : tokenizer) {
taggedwordlist.add(tagger.tagSentence(sentence)); taggedwordlist.add(tagger.tagSentence(sentence));
@ -122,7 +116,6 @@ public class SentimentAnalyzerTest implements Callable<SimilarityMatrix> {
private int tokenizeCounting(List<List<TaggedWord>> taggedwordlist) { private int tokenizeCounting(List<List<TaggedWord>> taggedwordlist) {
int counter = 0; int counter = 0;
Collection<TaggedWord> taggedCollection = new ArrayList();
for (List<TaggedWord> taggedList : taggedwordlist) { for (List<TaggedWord> taggedList : taggedwordlist) {
counter += taggedList.size(); counter += taggedList.size();
} }
@ -130,28 +123,31 @@ public class SentimentAnalyzerTest implements Callable<SimilarityMatrix> {
} }
private ConcurrentMap<Integer, String> retrieveTGWListIndex(List<List<TaggedWord>> taggedwordlist) { private ConcurrentMap<Integer, String> retrieveTGWListIndex(List<List<TaggedWord>> taggedwordlist) {
ConcurrentMap<Integer, String> tgwlistIndex = new MapMaker().concurrencyLevel(2).makeMap(); ConcurrentMap<Integer, String> tgwlistIndex = new MapMaker().concurrencyLevel(5).makeMap();
taggedwordlist.forEach((TGWList) -> { for (List<TaggedWord> tGWList : taggedwordlist) {
TGWList.forEach((TaggedWord) -> { for (TaggedWord taggedWord : tGWList) {
if (!tgwlistIndex.values().contains(TaggedWord.tag()) && !TaggedWord.tag().equals(":")) { for (String str : tgwlistIndex.values()) {
tgwlistIndex.put(tgwlistIndex.size() + 1, TaggedWord.tag()); if (!taggedWord.tag().equals(str) && !taggedWord.tag().equals(":")) {
tgwlistIndex.put(tgwlistIndex.size() + 1, taggedWord.tag());
tGWList.remove(taggedWord);
}
} }
}); }
}); }
return tgwlistIndex; return tgwlistIndex;
} }
private Double iterateTrees(ConcurrentMap<Integer, Tree> sentenceConstituencyParseList2, ConcurrentMap<Integer, Tree> sentenceConstituencyParseList1, private Double iterateTrees(ConcurrentMap<Integer, Tree> sentenceConstituencyParseList2, ConcurrentMap<Integer, Tree> sentenceConstituencyParseList1,
Double score) { Double score) {
double preConstituentsScore = score; double preConstituentsScore = score;
ConcurrentMap<Integer, Integer> constituentsMap = new MapMaker().concurrencyLevel(4).makeMap(); ConcurrentMap<Integer, Integer> constituentsMap = new MapMaker().concurrencyLevel(5).makeMap();
int constituencySize = sentenceConstituencyParseList1.size() + sentenceConstituencyParseList2.size(); int constituencySize = sentenceConstituencyParseList1.size() + sentenceConstituencyParseList2.size();
for (final Tree sentenceConstituencyParse2 : sentenceConstituencyParseList2.values()) { for (final Tree sentenceConstituencyParse2 : sentenceConstituencyParseList2.values()) {
int constiRelationsize = 0; int constiRelationsize = 0;
for (final Tree sentenceConstituencyParse1 : sentenceConstituencyParseList1.values()) { for (final Tree sentenceConstituencyParse1 : sentenceConstituencyParseList1.values()) {
Set<Constituent> constinuent1 = Tdiff.markDiff(sentenceConstituencyParse1, sentenceConstituencyParse2); Set<Constituent> constinuent1 = Tdiff.markDiff(sentenceConstituencyParse1, sentenceConstituencyParse2);
Set<Constituent> constinuent2 = Tdiff.markDiff(sentenceConstituencyParse2, sentenceConstituencyParse1); Set<Constituent> constinuent2 = Tdiff.markDiff(sentenceConstituencyParse2, sentenceConstituencyParse1);
ConcurrentMap<Integer, String> constiLabels = new MapMaker().concurrencyLevel(2).makeMap(); ConcurrentMap<Integer, String> constiLabels = new MapMaker().concurrencyLevel(5).makeMap();
for (final Constituent consti : constinuent1) { for (final Constituent consti : constinuent1) {
for (final Constituent consti1 : constinuent2) { for (final Constituent consti1 : constinuent2) {
if (consti.value().equals(consti1.value()) && !constiLabels.values().contains(consti.value())) { if (consti.value().equals(consti1.value()) && !constiLabels.values().contains(consti.value())) {
@ -223,10 +219,10 @@ public class SentimentAnalyzerTest implements Callable<SimilarityMatrix> {
} }
private Double typeDependenciesGrammaticalRelation(Collection<TypedDependency> allTypedDependencies1, Collection<TypedDependency> allTypedDependencies2, private Double typeDependenciesGrammaticalRelation(Collection<TypedDependency> allTypedDependencies1, Collection<TypedDependency> allTypedDependencies2,
Double score, ConcurrentMap<Integer, GrammaticalStructure> grammaticalMap1, ConcurrentMap<Integer, GrammaticalStructure> grammaticalMap2, Double score, ConcurrentMap<Integer, GrammaticalStructure> grammaticalMap1, ConcurrentMap<Integer, GrammaticalStructure> grammaticalMap2,
ConcurrentMap<Integer, Tree> sentenceConstituencyParseList1, ConcurrentMap<Integer, Tree> sentenceConstituencyParseList2) { ConcurrentMap<Integer, Tree> sentenceConstituencyParseList1, ConcurrentMap<Integer, Tree> sentenceConstituencyParseList2) {
ConcurrentMap<Integer, Integer> alltypeDepsSizeMap = new MapMaker().concurrencyLevel(2).makeMap(); ConcurrentMap<Integer, Integer> alltypeDepsSizeMap = new MapMaker().concurrencyLevel(5).makeMap();
ConcurrentMap<Integer, Integer> summationMap = new MapMaker().concurrencyLevel(2).makeMap(); ConcurrentMap<Integer, Integer> summationMap = new MapMaker().concurrencyLevel(5).makeMap();
int relationApplicable1 = 0; int relationApplicable1 = 0;
int relationApplicable2 = 0; int relationApplicable2 = 0;
int grammaticalRelation1 = 0; int grammaticalRelation1 = 0;
@ -332,19 +328,27 @@ public class SentimentAnalyzerTest implements Callable<SimilarityMatrix> {
: (grammaticalRelation2 - grammaticalRelation1) * 500; : (grammaticalRelation2 - grammaticalRelation1) * 500;
} }
} }
ConcurrentMap<Integer, String> filerTreeContent = new MapMaker().concurrencyLevel(2).makeMap(); ConcurrentMap<Integer, String> filerTreeContent = new MapMaker().concurrencyLevel(5).makeMap();
AtomicInteger runCount1 = new AtomicInteger(0); AtomicInteger runCount1 = new AtomicInteger(0);
for (Tree sentenceConstituencyParse1 : sentenceConstituencyParseList1.values()) { for (Tree sentenceConstituencyParse1 : sentenceConstituencyParseList1.values()) {
for (Tree sentenceConstituencyParse2 : sentenceConstituencyParseList2.values()) { for (Tree sentenceConstituencyParse2 : sentenceConstituencyParseList2.values()) {
sentenceConstituencyParse1.taggedLabeledYield().forEach((LBW) -> { for (CoreLabel LBW : sentenceConstituencyParse1.taggedLabeledYield()) {
sentenceConstituencyParse2.taggedLabeledYield().stream().filter((LBW1) -> (LBW.lemma().equals(LBW1.lemma()) for (CoreLabel LBW1 : sentenceConstituencyParse2.taggedLabeledYield()) {
&& !filerTreeContent.values().contains(LBW.lemma()))).map((_item) -> { if (LBW.lemma().equals(LBW1.lemma())) {
filerTreeContent.put(filerTreeContent.size() + 1, LBW.lemma()); boolean found = false;
return _item; for (String str : filerTreeContent.values()) {
}).forEachOrdered((_item) -> { if (str.equals(LBW.lemma())) {
runCount1.getAndIncrement(); found = true;
}); break;
}); }
}
if (!found) {
filerTreeContent.put(filerTreeContent.size() + 1, LBW.lemma());
runCount1.getAndIncrement();
}
}
}
}
} }
} }
score += runCount1.get() * 250; score += runCount1.get() * 250;
@ -384,10 +388,10 @@ public class SentimentAnalyzerTest implements Callable<SimilarityMatrix> {
int iterationOverHeat = 0; int iterationOverHeat = 0;
double scoreFallback = score; double scoreFallback = score;
for (SimpleMatrix simpleSMX2 : simpleSMXlist2.values()) { for (SimpleMatrix simpleSMX2 : simpleSMXlist2.values()) {
ConcurrentMap<Integer, Double> AccumulateDotMap = new MapMaker().concurrencyLevel(2).makeMap(); ConcurrentMap<Integer, Double> AccumulateDotMap = new MapMaker().concurrencyLevel(5).makeMap();
ConcurrentMap<Integer, Double> subtractorMap = new MapMaker().concurrencyLevel(2).makeMap(); ConcurrentMap<Integer, Double> subtractorMap = new MapMaker().concurrencyLevel(5).makeMap();
ConcurrentMap<Integer, Double> dotPredictions = new MapMaker().concurrencyLevel(2).makeMap(); ConcurrentMap<Integer, Double> dotPredictions = new MapMaker().concurrencyLevel(5).makeMap();
ConcurrentMap<Integer, Double> DotOverTransfer = new MapMaker().concurrencyLevel(2).makeMap(); ConcurrentMap<Integer, Double> DotOverTransfer = new MapMaker().concurrencyLevel(5).makeMap();
Double totalSubtraction = 0.0; Double totalSubtraction = 0.0;
Double largest = 10.0; Double largest = 10.0;
Double shortest = 100.0; Double shortest = 100.0;
@ -538,10 +542,10 @@ public class SentimentAnalyzerTest implements Callable<SimilarityMatrix> {
} }
private Double simpleRNNMaxtrixVectors(Double score, ConcurrentMap<Integer, SimpleMatrix> simpleSMXlistVector1, ConcurrentMap<Integer, SimpleMatrix> simpleSMXlistVector2) { private Double simpleRNNMaxtrixVectors(Double score, ConcurrentMap<Integer, SimpleMatrix> simpleSMXlistVector1, ConcurrentMap<Integer, SimpleMatrix> simpleSMXlistVector2) {
ConcurrentMap<Integer, Double> elementSumCounter = new MapMaker().concurrencyLevel(3).makeMap(); ConcurrentMap<Integer, Double> elementSumCounter = new MapMaker().concurrencyLevel(5).makeMap();
ConcurrentMap<Integer, Double> dotMap = new MapMaker().concurrencyLevel(3).makeMap(); ConcurrentMap<Integer, Double> dotMap = new MapMaker().concurrencyLevel(5).makeMap();
ConcurrentMap<Integer, Double> elementSumMap = new MapMaker().concurrencyLevel(3).makeMap(); ConcurrentMap<Integer, Double> elementSumMap = new MapMaker().concurrencyLevel(5).makeMap();
ConcurrentMap<Integer, Double> dotSumMap = new MapMaker().concurrencyLevel(3).makeMap(); ConcurrentMap<Integer, Double> dotSumMap = new MapMaker().concurrencyLevel(5).makeMap();
Double preDot = 0.0; Double preDot = 0.0;
Double postDot = 0.0; Double postDot = 0.0;
int iterateSize = simpleSMXlistVector1.values().size() + simpleSMXlistVector2.values().size(); int iterateSize = simpleSMXlistVector1.values().size() + simpleSMXlistVector2.values().size();
@ -718,7 +722,7 @@ public class SentimentAnalyzerTest implements Callable<SimilarityMatrix> {
} }
private final Map.Entry<Double, Map.Entry<SentimentValueCache, SentimentValueCache>> classifyRawEvaluation(Double score, SentimentValueCache cacheSentimentLocal1, private final Map.Entry<Double, Map.Entry<SentimentValueCache, SentimentValueCache>> classifyRawEvaluation(Double score, SentimentValueCache cacheSentimentLocal1,
SentimentValueCache cacheSentimentLocal2) { SentimentValueCache cacheSentimentLocal2) {
if (cacheSentiment1 == null || cacheSentiment2 == null) { if (cacheSentiment1 == null || cacheSentiment2 == null) {
DocumentReaderAndWriter<CoreLabel> readerAndWriter = classifier.makePlainTextReaderAndWriter(); DocumentReaderAndWriter<CoreLabel> readerAndWriter = classifier.makePlainTextReaderAndWriter();
if (cacheSentiment1 == null) { if (cacheSentiment1 == null) {
@ -731,7 +735,7 @@ public class SentimentAnalyzerTest implements Callable<SimilarityMatrix> {
final List classifyRaw1 = cacheSentiment1 == null ? cacheSentimentLocal1.getClassifyRaw() : cacheSentiment1.getClassifyRaw(); final List classifyRaw1 = cacheSentiment1 == null ? cacheSentimentLocal1.getClassifyRaw() : cacheSentiment1.getClassifyRaw();
final List classifyRaw2 = cacheSentiment2 == null ? cacheSentimentLocal2.getClassifyRaw() : cacheSentiment2.getClassifyRaw(); final List classifyRaw2 = cacheSentiment2 == null ? cacheSentimentLocal2.getClassifyRaw() : cacheSentiment2.getClassifyRaw();
score -= (classifyRaw1.size() > classifyRaw2.size() ? classifyRaw1.size() - classifyRaw2.size() : classifyRaw2.size() - classifyRaw1.size()) * 200; score -= (classifyRaw1.size() > classifyRaw2.size() ? classifyRaw1.size() - classifyRaw2.size() : classifyRaw2.size() - classifyRaw1.size()) * 200;
Map.Entry< Double, Map.Entry<SentimentValueCache, SentimentValueCache>> entry Map.Entry<Double, Map.Entry<SentimentValueCache, SentimentValueCache>> entry
= new AbstractMap.SimpleEntry(score, new AbstractMap.SimpleEntry(cacheSentimentLocal1, cacheSentimentLocal2)); = new AbstractMap.SimpleEntry(score, new AbstractMap.SimpleEntry(cacheSentimentLocal1, cacheSentimentLocal2));
return entry; return entry;
} }
@ -845,7 +849,7 @@ public class SentimentAnalyzerTest implements Callable<SimilarityMatrix> {
} }
private Double entryCountsScoring(Double score, SentimentValueCache cacheSentimentLocal1, SentimentValueCache cacheSentimentLocal2) { private Double entryCountsScoring(Double score, SentimentValueCache cacheSentimentLocal1, SentimentValueCache cacheSentimentLocal2) {
ConcurrentMap<Integer, Integer> countsMap = new MapMaker().concurrencyLevel(2).makeMap(); ConcurrentMap<Integer, Integer> countsMap = new MapMaker().concurrencyLevel(5).makeMap();
int totalsize = cacheSentimentLocal1.getEntryCounts().values().size() + cacheSentimentLocal2.getEntryCounts().values().size(); int totalsize = cacheSentimentLocal1.getEntryCounts().values().size() + cacheSentimentLocal2.getEntryCounts().values().size();
for (int counts : cacheSentimentLocal1.getEntryCounts().values()) { for (int counts : cacheSentimentLocal1.getEntryCounts().values()) {
for (int counts1 : cacheSentimentLocal2.getEntryCounts().values()) { for (int counts1 : cacheSentimentLocal2.getEntryCounts().values()) {
@ -930,7 +934,7 @@ public class SentimentAnalyzerTest implements Callable<SimilarityMatrix> {
} }
private Double strTokensMapScoring(Double score, SentimentValueCache cacheSentimentLocal1, SentimentValueCache cacheSentimentLocal2) { private Double strTokensMapScoring(Double score, SentimentValueCache cacheSentimentLocal1, SentimentValueCache cacheSentimentLocal2) {
ConcurrentMap<Integer, String> strtokensMap = new MapMaker().concurrencyLevel(2).makeMap(); ConcurrentMap<Integer, String> strtokensMap = new MapMaker().concurrencyLevel(5).makeMap();
for (String strTokeniPart1 : cacheSentimentLocal1.getstrTokenGetiPart().values()) { for (String strTokeniPart1 : cacheSentimentLocal1.getstrTokenGetiPart().values()) {
for (String strTokeniPart2 : cacheSentimentLocal2.getstrTokenGetiPart().values()) { for (String strTokeniPart2 : cacheSentimentLocal2.getstrTokenGetiPart().values()) {
if (strTokeniPart1.equals(strTokeniPart2) && !strtokensMap.values().contains(strTokeniPart2)) { if (strTokeniPart1.equals(strTokeniPart2) && !strtokensMap.values().contains(strTokeniPart2)) {
@ -959,7 +963,7 @@ public class SentimentAnalyzerTest implements Callable<SimilarityMatrix> {
if (tokenEntry1 * 2 != tokenEntry2 && tokenEntry2 * 2 != tokenEntry1) { if (tokenEntry1 * 2 != tokenEntry2 && tokenEntry2 * 2 != tokenEntry1) {
boundaryLeaks = true; boundaryLeaks = true;
} }
ConcurrentMap<Integer, String> entryTokenMap = new MapMaker().concurrencyLevel(2).makeMap(); ConcurrentMap<Integer, String> entryTokenMap = new MapMaker().concurrencyLevel(5).makeMap();
for (String strTokenEntry1 : cacheSentimentLocal1.getstrTokenGetEntry().values()) { for (String strTokenEntry1 : cacheSentimentLocal1.getstrTokenGetEntry().values()) {
for (String strTokenEntry2 : cacheSentimentLocal2.getstrTokenGetEntry().values()) { for (String strTokenEntry2 : cacheSentimentLocal2.getstrTokenGetEntry().values()) {
if (!entryTokenMap.values().contains(strTokenEntry2)) { if (!entryTokenMap.values().contains(strTokenEntry2)) {
@ -979,7 +983,7 @@ public class SentimentAnalyzerTest implements Callable<SimilarityMatrix> {
} }
private Double strTokenMapTagsScoring(Double score, SentimentValueCache cacheSentimentLocal1, SentimentValueCache cacheSentimentLocal2) { private Double strTokenMapTagsScoring(Double score, SentimentValueCache cacheSentimentLocal1, SentimentValueCache cacheSentimentLocal2) {
ConcurrentMap<Integer, String> iTokenMapTagsMap = new MapMaker().concurrencyLevel(2).makeMap(); ConcurrentMap<Integer, String> iTokenMapTagsMap = new MapMaker().concurrencyLevel(5).makeMap();
for (String strmapTag : cacheSentimentLocal1.getITokenMapTag().values()) { for (String strmapTag : cacheSentimentLocal1.getITokenMapTag().values()) {
for (String strmapTag1 : cacheSentimentLocal2.getITokenMapTag().values()) { for (String strmapTag1 : cacheSentimentLocal2.getITokenMapTag().values()) {
if (strmapTag.equals(strmapTag1) && !iTokenMapTagsMap.values().contains(strmapTag1)) { if (strmapTag.equals(strmapTag1) && !iTokenMapTagsMap.values().contains(strmapTag1)) {
@ -1028,7 +1032,7 @@ public class SentimentAnalyzerTest implements Callable<SimilarityMatrix> {
} }
private Double tokenStemmingMapScoring(Double score, SentimentValueCache cacheSentimentLocal1, SentimentValueCache cacheSentimentLocal2) { private Double tokenStemmingMapScoring(Double score, SentimentValueCache cacheSentimentLocal1, SentimentValueCache cacheSentimentLocal2) {
ConcurrentMap<Integer, String> tokenStemmingMap = new MapMaker().concurrencyLevel(2).makeMap(); ConcurrentMap<Integer, String> tokenStemmingMap = new MapMaker().concurrencyLevel(5).makeMap();
for (String strTokenStem : cacheSentimentLocal1.getstrTokenStems().values()) { for (String strTokenStem : cacheSentimentLocal1.getstrTokenStems().values()) {
for (String strTokenStem1 : cacheSentimentLocal2.getstrTokenStems().values()) { for (String strTokenStem1 : cacheSentimentLocal2.getstrTokenStems().values()) {
if (strTokenStem.equals(strTokenStem1) && !tokenStemmingMap.values().contains(strTokenStem)) { if (strTokenStem.equals(strTokenStem1) && !tokenStemmingMap.values().contains(strTokenStem)) {
@ -1233,18 +1237,25 @@ public class SentimentAnalyzerTest implements Callable<SimilarityMatrix> {
private Double tgwListScoreIncrementer(Double score, SentimentValueCache cacheSentimentLocal1, SentimentValueCache cacheSentimentLocal2) { private Double tgwListScoreIncrementer(Double score, SentimentValueCache cacheSentimentLocal1, SentimentValueCache cacheSentimentLocal2) {
AtomicInteger runCount = new AtomicInteger(0); AtomicInteger runCount = new AtomicInteger(0);
cacheSentimentLocal1.getTgwlistIndex().values().forEach(TaggedWord -> { for (String taggedWord : cacheSentimentLocal1.getTgwlistIndex().values()) {
if (!cacheSentimentLocal2.getTgwlistIndex().values().contains(TaggedWord)) { boolean found = false;
cacheSentimentLocal2.addTgwlistIndex(TaggedWord); for (String taggedWord1 : cacheSentimentLocal2.getTgwlistIndex().values()) {
if (taggedWord.equals(taggedWord1)) {
found = true;
break;
}
}
if (!found) {
cacheSentimentLocal2.addTgwlistIndex(taggedWord);
runCount.getAndIncrement(); runCount.getAndIncrement();
} }
}); }
score += runCount.get() * 64; score += runCount.get() * 64;
return score; return score;
} }
@Override
public final SimilarityMatrix call() { public final SimilarityMatrix callSMX() {
Double score = -100.0; Double score = -100.0;
SentimentValueCache cacheSentimentLocal1 = null; SentimentValueCache cacheSentimentLocal1 = null;
SentimentValueCache cacheSentimentLocal2 = null; SentimentValueCache cacheSentimentLocal2 = null;
@ -1258,7 +1269,8 @@ public class SentimentAnalyzerTest implements Callable<SimilarityMatrix> {
cacheSentimentLocal2 = initializeCacheSetup(str1, cacheSentimentLocal2); cacheSentimentLocal2 = initializeCacheSetup(str1, cacheSentimentLocal2);
} }
} catch (Exception ex) { } catch (Exception ex) {
System.out.println("ex 1: " + ex.getMessage() + " " + ex.getLocalizedMessage() + " "
+ ex.getCause());
} }
try { try {
counter1 = cacheSentiment1 == null ? cacheSentimentLocal1.getCounter() : cacheSentiment1.getCounter(); counter1 = cacheSentiment1 == null ? cacheSentimentLocal1.getCounter() : cacheSentiment1.getCounter();
@ -1267,7 +1279,8 @@ public class SentimentAnalyzerTest implements Callable<SimilarityMatrix> {
final int overValue = (counter1 >= counter2 ? counter1 - counter2 : counter2 - counter1) * 32; final int overValue = (counter1 >= counter2 ? counter1 - counter2 : counter2 - counter1) * 32;
score -= overValue; score -= overValue;
} catch (Exception ex) { } catch (Exception ex) {
System.out.println("ex 2: " + ex.getMessage() + " " + ex.getLocalizedMessage() + " "
+ ex.getCause());
} }
try { try {
if (cacheSentiment1 == null) { if (cacheSentiment1 == null) {
@ -1283,7 +1296,8 @@ public class SentimentAnalyzerTest implements Callable<SimilarityMatrix> {
} }
} }
} catch (Exception ex) { } catch (Exception ex) {
System.out.println("ex 3: " + ex.getMessage() + " " + ex.getLocalizedMessage() + " "
+ ex.getCause());
} }
try { try {
score = tgwListScoreIncrementer(score, cacheSentiment1 == null score = tgwListScoreIncrementer(score, cacheSentiment1 == null
@ -1295,7 +1309,8 @@ public class SentimentAnalyzerTest implements Callable<SimilarityMatrix> {
cacheSentimentLocal2 = GrammaticStructureSetup(cacheSentimentLocal2, pipelineAnnotation2); cacheSentimentLocal2 = GrammaticStructureSetup(cacheSentimentLocal2, pipelineAnnotation2);
} }
} catch (Exception ex) { } catch (Exception ex) {
System.out.println("ex 4: " + ex.getMessage() + " " + ex.getLocalizedMessage() + " "
+ ex.getCause());
} }
ConcurrentMap<Integer, Tree> sentenceConstituencyParseList1 = null; ConcurrentMap<Integer, Tree> sentenceConstituencyParseList1 = null;
ConcurrentMap<Integer, Tree> sentenceConstituencyParseList2 = null; ConcurrentMap<Integer, Tree> sentenceConstituencyParseList2 = null;
@ -1306,7 +1321,8 @@ public class SentimentAnalyzerTest implements Callable<SimilarityMatrix> {
? cacheSentimentLocal1.getSentenceConstituencyParseList() : cacheSentiment1.getSentenceConstituencyParseList(); ? cacheSentimentLocal1.getSentenceConstituencyParseList() : cacheSentiment1.getSentenceConstituencyParseList();
score = iterateTrees(sentenceConstituencyParseList2, sentenceConstituencyParseList1, score); score = iterateTrees(sentenceConstituencyParseList2, sentenceConstituencyParseList1, score);
} catch (Exception ex) { } catch (Exception ex) {
System.out.println("ex 5: " + ex.getMessage() + " " + ex.getLocalizedMessage() + " "
+ ex.getCause());
} }
try { try {
Collection<TypedDependency> allTypedDependencies2 = cacheSentiment2 == null ? cacheSentimentLocal2.getAllTypedDependencies() Collection<TypedDependency> allTypedDependencies2 = cacheSentiment2 == null ? cacheSentimentLocal2.getAllTypedDependencies()
@ -1319,7 +1335,8 @@ public class SentimentAnalyzerTest implements Callable<SimilarityMatrix> {
score = typeDependenciesGrammaticalRelation(allTypedDependencies1, allTypedDependencies2, score, grammaticalMap1, grammaticalMap2, score = typeDependenciesGrammaticalRelation(allTypedDependencies1, allTypedDependencies2, score, grammaticalMap1, grammaticalMap2,
sentenceConstituencyParseList1, sentenceConstituencyParseList2); sentenceConstituencyParseList1, sentenceConstituencyParseList2);
} catch (Exception ex) { } catch (Exception ex) {
System.out.println("ex 6: " + ex.getMessage() + " " + ex.getLocalizedMessage() + " "
+ ex.getCause());
} }
try { try {
if (cacheSentiment1 == null) { if (cacheSentiment1 == null) {
@ -1329,7 +1346,8 @@ public class SentimentAnalyzerTest implements Callable<SimilarityMatrix> {
cacheSentimentLocal2 = sentimentCoreAnnotationSetup(pipelineAnnotation2Sentiment, cacheSentimentLocal2); cacheSentimentLocal2 = sentimentCoreAnnotationSetup(pipelineAnnotation2Sentiment, cacheSentimentLocal2);
} }
} catch (Exception ex) { } catch (Exception ex) {
System.out.println("ex 7: " + ex.getMessage() + " " + ex.getLocalizedMessage() + " "
+ ex.getCause());
} }
try { try {
final ConcurrentMap<Integer, SimpleMatrix> simpleSMXlist1 = cacheSentiment1 == null final ConcurrentMap<Integer, SimpleMatrix> simpleSMXlist1 = cacheSentiment1 == null
@ -1343,7 +1361,8 @@ public class SentimentAnalyzerTest implements Callable<SimilarityMatrix> {
score = simpleRNNMatrixCalculations(score, simpleSMXlist1, simpleSMXlist2); score = simpleRNNMatrixCalculations(score, simpleSMXlist1, simpleSMXlist2);
score = simpleRNNMaxtrixVectors(score, simpleSMXlistVector1, simpleSMXlistVector2); score = simpleRNNMaxtrixVectors(score, simpleSMXlistVector1, simpleSMXlistVector2);
} catch (Exception ex) { } catch (Exception ex) {
System.out.println("ex 8: " + ex.getMessage() + " " + ex.getLocalizedMessage() + " "
+ ex.getCause());
} }
try { try {
int sentiment1 = cacheSentiment1 == null ? cacheSentimentLocal1.getRnnPrediectClassMap().size() : cacheSentiment1.getRnnPrediectClassMap().size(); int sentiment1 = cacheSentiment1 == null ? cacheSentimentLocal1.getRnnPrediectClassMap().size() : cacheSentiment1.getRnnPrediectClassMap().size();
@ -1359,7 +1378,8 @@ public class SentimentAnalyzerTest implements Callable<SimilarityMatrix> {
cacheSentimentLocal2 = classifyRawEvaluationEntry.getValue().getValue(); cacheSentimentLocal2 = classifyRawEvaluationEntry.getValue().getValue();
} }
} catch (Exception ex) { } catch (Exception ex) {
System.out.println("ex 9: " + ex.getMessage() + " " + ex.getLocalizedMessage() + " "
+ ex.getCause());
} }
try { try {
if (cacheSentiment1 == null) { if (cacheSentiment1 == null) {
@ -1370,97 +1390,52 @@ public class SentimentAnalyzerTest implements Callable<SimilarityMatrix> {
} }
score = sentimentMatrixVariances(score, cacheSentiment1 == null ? cacheSentimentLocal1.getLongest() : cacheSentiment1.getLongest(), score = sentimentMatrixVariances(score, cacheSentiment1 == null ? cacheSentimentLocal1.getLongest() : cacheSentiment1.getLongest(),
cacheSentiment2 == null ? cacheSentimentLocal2.getLongest() : cacheSentiment2.getLongest(), cacheSentiment1 == null cacheSentiment2 == null ? cacheSentimentLocal2.getLongest() : cacheSentiment2.getLongest(), cacheSentiment1 == null
? cacheSentimentLocal1.getMainSentiment() : cacheSentiment1.getMainSentiment(), cacheSentiment2 == null ? cacheSentimentLocal1.getMainSentiment() : cacheSentiment1.getMainSentiment(), cacheSentiment2 == null
? cacheSentimentLocal2.getMainSentiment() : cacheSentiment2.getMainSentiment()); ? cacheSentimentLocal2.getMainSentiment() : cacheSentiment2.getMainSentiment());
} catch (Exception ex) { } catch (Exception ex) {
System.out.println("ex 10: " + ex.getMessage() + " " + ex.getLocalizedMessage() + " "
+ ex.getCause());
} }
try { if (cacheSentiment1 == null) {
if (cacheSentiment1 == null) { cacheSentimentLocal1 = jmweAnnotationSetup(jmweStrAnnotation1, cacheSentimentLocal1);
cacheSentimentLocal1 = jmweAnnotationSetup(jmweStrAnnotation1, cacheSentimentLocal1);
}
if (cacheSentiment2 == null) {
cacheSentimentLocal2 = jmweAnnotationSetup(jmweStrAnnotation2, cacheSentimentLocal2);
}
} catch (Exception ex) {
} }
if (cacheSentiment2 == null) {
cacheSentimentLocal2 = jmweAnnotationSetup(jmweStrAnnotation2, cacheSentimentLocal2);
}
SentimentValueCache scoringCache1 = cacheSentiment1 == null ? cacheSentimentLocal1 : cacheSentiment1; SentimentValueCache scoringCache1 = cacheSentiment1 == null ? cacheSentimentLocal1 : cacheSentiment1;
SentimentValueCache scoringCache2 = cacheSentiment2 == null ? cacheSentimentLocal2 : cacheSentiment2; SentimentValueCache scoringCache2 = cacheSentiment2 == null ? cacheSentimentLocal2 : cacheSentiment2;
try { score = entryCountsRelation(score, scoringCache1, scoringCache2);
score = entryCountsRelation(score, scoringCache1, scoringCache2); score = entryCountsScoring(score, scoringCache1, scoringCache2);
} catch (Exception ex) {
} score = tokenEntryPosScoring(score, scoringCache1, scoringCache2);
try {
score = entryCountsScoring(score, scoringCache1, scoringCache2);
} catch (Exception ex) {
} score = unmarkedPatternCounterScoring(score, scoringCache1, scoringCache2);
try {
score = tokenEntryPosScoring(score, scoringCache1, scoringCache2);
} catch (Exception ex) {
} score = markedContiniousCounterScoring(score, scoringCache1, scoringCache2);
try {
score = unmarkedPatternCounterScoring(score, scoringCache1, scoringCache2);
} catch (Exception ex) {
} score = strTokensMapScoring(score, scoringCache1, scoringCache2);
try {
score = markedContiniousCounterScoring(score, scoringCache1, scoringCache2);
} catch (Exception ex) {
} score = strTokenEntryScoring(score, scoringCache1, scoringCache2);
try {
score = strTokensMapScoring(score, scoringCache1, scoringCache2);
} catch (Exception ex) {
} score = strTokenMapTagsScoring(score, scoringCache1, scoringCache2);
try {
score = strTokenEntryScoring(score, scoringCache1, scoringCache2);
} catch (Exception ex) {
} score = tokenformSizeScoring(score, scoringCache1, scoringCache2);
try {
score = strTokenMapTagsScoring(score, scoringCache1, scoringCache2);
} catch (Exception ex) {
} score = tokenStemmingMapScoring(score, scoringCache1, scoringCache2);
try {
score = tokenformSizeScoring(score, scoringCache1, scoringCache2);
} catch (Exception ex) {
} score = inflectedCounterScoring(score, scoringCache1, scoringCache2);
try {
score = tokenStemmingMapScoring(score, scoringCache1, scoringCache2);
} catch (Exception ex) {
} score = annotatorCountScoring(score, scoringCache1, scoringCache2);
try {
score = inflectedCounterScoring(score, scoringCache1, scoringCache2);
} catch (Exception ex) {
} score = tokensCounterScoring(score, scoringCache1, scoringCache2);
try {
score = annotatorCountScoring(score, scoringCache1, scoringCache2);
} catch (Exception ex) {
} LevenshteinDistance leven = new LevenshteinDistance(str, str1);
try { double SentenceScoreDiff = leven.computeLevenshteinDistance();
score = tokensCounterScoring(score, scoringCache1, scoringCache2); SentenceScoreDiff *= 15;
} catch (Exception ex) { score -= SentenceScoreDiff;
}
try {
LevenshteinDistance leven = new LevenshteinDistance(str, str1);
double SentenceScoreDiff = leven.computeLevenshteinDistance();
SentenceScoreDiff *= 15;
score -= SentenceScoreDiff;
} catch (Exception ex) {
}
try { try {
if (cacheSentiment1 == null) { if (cacheSentiment1 == null) {
cacheSentimentLocal1 = setupNEREntitiesAndTokenTags(pipelineCoreDcoument1, cacheSentimentLocal1); cacheSentimentLocal1 = setupNEREntitiesAndTokenTags(pipelineCoreDcoument1, cacheSentimentLocal1);
@ -1471,7 +1446,8 @@ public class SentimentAnalyzerTest implements Callable<SimilarityMatrix> {
score = nerEntitiesAndTokenScoring(score, cacheSentiment1 == null ? cacheSentimentLocal1 : cacheSentiment1, cacheSentiment2 == null score = nerEntitiesAndTokenScoring(score, cacheSentiment1 == null ? cacheSentimentLocal1 : cacheSentiment1, cacheSentiment2 == null
? cacheSentimentLocal2 : cacheSentiment2); ? cacheSentimentLocal2 : cacheSentiment2);
} catch (Exception ex) { } catch (Exception ex) {
System.out.println("ex 11: " + ex.getMessage() + " " + ex.getLocalizedMessage() + " "
+ ex.getCause());
} }
try { try {
if (cacheSentiment1 == null) { if (cacheSentiment1 == null) {
@ -1483,24 +1459,17 @@ public class SentimentAnalyzerTest implements Callable<SimilarityMatrix> {
score = stopWordTokenLemmaScoring(score, cacheSentiment1 == null ? cacheSentimentLocal1 : cacheSentiment1, cacheSentiment2 == null score = stopWordTokenLemmaScoring(score, cacheSentiment1 == null ? cacheSentimentLocal1 : cacheSentiment1, cacheSentiment2 == null
? cacheSentimentLocal2 : cacheSentiment2); ? cacheSentimentLocal2 : cacheSentiment2);
} catch (Exception ex) { } catch (Exception ex) {
System.out.println("ex 12: " + ex.getMessage() + " " + ex.getLocalizedMessage() + " "
} + ex.getCause());
try {
score = stopwordTokenPairCounterScoring(score, cacheSentiment1 == null ? cacheSentimentLocal1 : cacheSentiment1, cacheSentiment2 == null
? cacheSentimentLocal2 : cacheSentiment2);
} catch (Exception ex) {
} }
score = stopwordTokenPairCounterScoring(score, cacheSentiment1 == null ? cacheSentimentLocal1 : cacheSentiment1, cacheSentiment2 == null
? cacheSentimentLocal2 : cacheSentiment2);
smxParam.setDistance(score); smxParam.setDistance(score);
try { if (cacheSentiment1 == null) {
if (cacheSentiment1 == null) { smxParam.setCacheValue1(cacheSentimentLocal1);
smxParam.setCacheValue1(cacheSentimentLocal1); }
} if (cacheSentiment2 == null) {
if (cacheSentiment2 == null) { smxParam.setCacheValue2(cacheSentimentLocal2);
smxParam.setCacheValue2(cacheSentimentLocal2);
}
} catch (Exception ex) {
} }
return smxParam; return smxParam;
} }

View File

@ -14,28 +14,85 @@ screen -X -S (number1) quit
*/ */
package PresentationLayer; package PresentationLayer;
import DataLayer.settings;
import FunctionLayer.CustomError;
import FunctionLayer.Datahandler; import FunctionLayer.Datahandler;
import FunctionLayer.DoStuff;
import FunctionLayer.PipelineJMWESingleton; import FunctionLayer.PipelineJMWESingleton;
import com.sun.tools.javac.util.List;
import discord4j.core.DiscordClient; import discord4j.core.DiscordClient;
import discord4j.core.GatewayDiscordClient; import discord4j.core.GatewayDiscordClient;
import discord4j.core.event.domain.message.MessageCreateEvent;
import java.io.IOException; import java.io.IOException;
import java.io.UnsupportedEncodingException;
import java.net.*;
import java.sql.SQLException; import java.sql.SQLException;
import java.util.Timer; import java.util.ArrayList;
import java.util.TimerTask;
import java.util.logging.Level; import java.util.logging.Level;
import java.util.logging.Logger; import java.util.logging.Logger;
import DataLayer.settings;
import discord4j.common.util.Snowflake;
import discord4j.core.event.domain.message.MessageCreateEvent;
import java.math.BigInteger;
/** /**
*
* @author install1 * @author install1
*/ */
public class DiscordHandler { public class DiscordHandler {
private static void receiveAndSendPacket(DatagramSocket serverSocket, InetAddress ipAddress, int port) throws IOException, CustomError {
byte[] receiveData = new byte[4096];
DatagramPacket receivePacket = new DatagramPacket(receiveData, receiveData.length);
try {
serverSocket.receive(receivePacket);
} catch (IOException e) {
e.printStackTrace();
}
String sentence = new String(receivePacket.getData(), 0,
receivePacket.getLength());
sentence = sentence.replace("clientmessage:", "");
String getResponseMsg = Datahandler.instance.getResponseMsg(sentence);
byte[] sendData = getResponseMsg.getBytes("UTF-8");
int deliver_port = 0;
switch (port) {
case 48470:
deliver_port = 48479;
break;
case 48471:
deliver_port = 48482;
break;
case 48472:
deliver_port = 48476;
break;
case 48473:
deliver_port = 48481;
break;
}
DatagramPacket sendPacket = new DatagramPacket(sendData, sendData.length, ipAddress, deliver_port);
serverSocket.send(sendPacket);
//System.out.println("receiveAndSendPacket send message to port: " + deliver_port);
}
public static void handleUDPTraffic(int port) {
try (DatagramSocket serverSocket = new DatagramSocket(port)) {
String hostIP = "";
if (port == 48473 || port == 48471) {
hostIP = "51.15.159.31";
} else {
hostIP = "195.154.53.196";
}
InetAddress ipAddress = InetAddress.getByName(hostIP);//used ip'
while (true) {
receiveAndSendPacket(serverSocket, ipAddress, port);
}
} catch (SocketException | UnknownHostException e) {
e.printStackTrace();
} catch (UnsupportedEncodingException e) {
e.printStackTrace();
} catch (CustomError customError) {
customError.printStackTrace();
} catch (IOException e) {
e.printStackTrace();
}
}
public static void main(String[] args) { public static void main(String[] args) {
try { try {
Datahandler.instance.initiateMYSQL(); Datahandler.instance.initiateMYSQL();
@ -56,9 +113,15 @@ public class DiscordHandler {
final DiscordClient client = DiscordClient.create(token); final DiscordClient client = DiscordClient.create(token);
final GatewayDiscordClient gateway = client.login().block(); final GatewayDiscordClient gateway = client.login().block();
String usernameBot = gateway.getSelf().block().getUsername(); String usernameBot = gateway.getSelf().block().getUsername();
new Thread(() -> { int autismbotCount = 4;
Datahandler.instance.update_autismo_socket_msg(); //make sure not to use ports that are already occupied.
}).start(); for (int i = 0; i < autismbotCount; i++) {
final int j = i;
new Thread(() -> {
List<Integer> ports = List.of(48470, 48471, 48472, 48473);
handleUDPTraffic(ports.get(j));
}).start();
}
gateway.on(MessageCreateEvent.class).subscribe(event -> { gateway.on(MessageCreateEvent.class).subscribe(event -> {
if (!FunctionLayer.DoStuff.isOccupied()) { if (!FunctionLayer.DoStuff.isOccupied()) {
FunctionLayer.DoStuff.doStuff(event, usernameBot); FunctionLayer.DoStuff.doStuff(event, usernameBot);