some current attempts with coroutines but more has to be changed again i suppose
This commit is contained in:
parent
14e70db53c
commit
3fd5693986
@ -23,56 +23,61 @@ import edu.stanford.nlp.trees.TreebankLanguagePack
|
||||
import kotlinx.coroutines.*
|
||||
import java.io.IOException
|
||||
import java.io.UnsupportedEncodingException
|
||||
import java.lang.Runnable
|
||||
import java.net.DatagramPacket
|
||||
import java.net.DatagramSocket
|
||||
import java.net.InetAddress
|
||||
import java.net.SocketException
|
||||
import java.net.*
|
||||
import java.sql.SQLException
|
||||
import java.util.*
|
||||
import java.util.concurrent.*
|
||||
import java.util.concurrent.ConcurrentMap
|
||||
import java.util.concurrent.CountDownLatch
|
||||
import java.util.concurrent.TimeUnit
|
||||
import java.util.function.Consumer
|
||||
import java.util.logging.Level
|
||||
import java.util.logging.Logger
|
||||
import kotlin.collections.ArrayList
|
||||
|
||||
|
||||
/**
|
||||
*
|
||||
* @author install1
|
||||
*/
|
||||
class Datahandler {
|
||||
private val lHMSMX: LinkedHashMap<Any?, Any?> = LinkedHashMap<Any?, Any?>()
|
||||
public class Datahandler {
|
||||
private val stopwatch: Stopwatch
|
||||
fun shiftReduceParserInitiate() {
|
||||
fun shiftReduceParserInitiate() = runBlocking {
|
||||
val cdl = CountDownLatch(2)
|
||||
Thread(Runnable {
|
||||
try {
|
||||
classifier = CRFClassifier.getClassifierNoExceptions(nerModel)
|
||||
} catch (ex: ClassCastException) {
|
||||
Logger.getLogger(Datahandler::class.java.name).log(Level.SEVERE, null, ex)
|
||||
coroutineScope {
|
||||
val job = launch(Dispatchers.Default) {
|
||||
propsSentiment.setProperty("parse.model", lexParserEnglishRNN)
|
||||
propsSentiment.setProperty("sentiment.model", sentimentModel)
|
||||
propsSentiment.setProperty("parse.maxlen", "90")
|
||||
propsSentiment.setProperty("threads", "5")
|
||||
propsSentiment.setProperty("pos.maxlen", "90")
|
||||
propsSentiment.setProperty("tokenize.maxlen", "90")
|
||||
propsSentiment.setProperty("ssplit.maxlen", "90")
|
||||
propsSentiment.setProperty("annotators", "tokenize,ssplit,pos,parse,sentiment,lemma,stopword") //coref too expensive memorywise
|
||||
propsSentiment.setProperty("customAnnotatorClass.stopword", "FunctionLayer.StopwordAnnotator")
|
||||
propsSentiment.setProperty(StopwordAnnotator.STOPWORDS_LIST, customStopWordList)
|
||||
propsSentiment.setProperty("tokenize.options", "untokenizable=firstKeep")
|
||||
pipelineSentiment = StanfordCoreNLP(propsSentiment)
|
||||
tagger = MaxentTagger(taggerPath)
|
||||
lp = LexicalizedParser.loadModel(lexParserEnglishRNN, *options)
|
||||
tlp = lp.getOp().langpack()
|
||||
gsf = tlp.grammaticalStructureFactory()
|
||||
cdl.countDown()
|
||||
yield()
|
||||
}
|
||||
cdl.countDown()
|
||||
}).start()
|
||||
Thread(Runnable {
|
||||
propsSentiment.setProperty("parse.model", lexParserEnglishRNN)
|
||||
propsSentiment.setProperty("sentiment.model", sentimentModel)
|
||||
propsSentiment.setProperty("parse.maxlen", "90")
|
||||
propsSentiment.setProperty("threads", "5")
|
||||
propsSentiment.setProperty("pos.maxlen", "90")
|
||||
propsSentiment.setProperty("tokenize.maxlen", "90")
|
||||
propsSentiment.setProperty("ssplit.maxlen", "90")
|
||||
propsSentiment.setProperty("annotators", "tokenize,ssplit,pos,parse,sentiment,lemma,stopword") //coref too expensive memorywise
|
||||
propsSentiment.setProperty("customAnnotatorClass.stopword", "FunctionLayer.StopwordAnnotator")
|
||||
propsSentiment.setProperty(StopwordAnnotator.STOPWORDS_LIST, customStopWordList)
|
||||
propsSentiment.setProperty("tokenize.options", "untokenizable=firstKeep")
|
||||
pipelineSentiment = StanfordCoreNLP(propsSentiment)
|
||||
tagger = MaxentTagger(taggerPath)
|
||||
cdl.countDown()
|
||||
}).start()
|
||||
lp = LexicalizedParser.loadModel(lexParserEnglishRNN, *options)
|
||||
|
||||
tlp = lp.getOp().langpack()
|
||||
gsf = tlp.grammaticalStructureFactory()
|
||||
job.join()
|
||||
}
|
||||
coroutineScope {
|
||||
val job = launch(Dispatchers.Default) {
|
||||
try {
|
||||
classifier = CRFClassifier.getClassifierNoExceptions(nerModel)
|
||||
} catch (ex: ClassCastException) {
|
||||
Logger.getLogger(Datahandler::class.java.name).log(Level.SEVERE, null, ex)
|
||||
}
|
||||
cdl.countDown()
|
||||
yield()
|
||||
}
|
||||
job.join()
|
||||
}
|
||||
try {
|
||||
cdl.await()
|
||||
} catch (ex: InterruptedException) {
|
||||
@ -98,7 +103,6 @@ class Datahandler {
|
||||
try {
|
||||
DataMapper.createTables()
|
||||
stringCache.putAll(cache)
|
||||
// lHMSMX = DataMapper.getAllRelationScores();
|
||||
} catch (ex: CustomError) {
|
||||
Logger.getLogger(Datahandler::class.java
|
||||
.name).log(Level.SEVERE, null, ex)
|
||||
@ -114,70 +118,62 @@ class Datahandler {
|
||||
}
|
||||
}
|
||||
|
||||
fun instantiateAnnotationMap() {
|
||||
fun instantiateAnnotationMap() = runBlocking {
|
||||
if (!stringCache.isEmpty()) {
|
||||
val Annotationspipeline = MapMaker().concurrencyLevel(4).makeMap<String?, Annotation>()
|
||||
val AnnotationspipelineSentiment = MapMaker().concurrencyLevel(4).makeMap<String?, Annotation>()
|
||||
runBlocking {
|
||||
val job = launch(Dispatchers.Default) {
|
||||
for (str in stringCache.values) {
|
||||
val Annotationspipeline = MapMaker().concurrencyLevel(5).makeMap<String?, Annotation>()
|
||||
val AnnotationspipelineSentiment = MapMaker().concurrencyLevel(5).makeMap<String?, Annotation>()
|
||||
coroutineScope {
|
||||
for (str in stringCache.values) {
|
||||
val job = launch(Dispatchers.Default) {
|
||||
val strAnno = Annotation(str)
|
||||
strAnno.compact()
|
||||
Annotationspipeline[str] = strAnno
|
||||
val strAnno2 = Annotation(str)
|
||||
strAnno2.compact()
|
||||
AnnotationspipelineSentiment[str] = strAnno2
|
||||
yield()
|
||||
}
|
||||
yield()
|
||||
job.join();
|
||||
}
|
||||
job.join();
|
||||
}
|
||||
System.out.println("PRE getMultipleCoreDocumentsWaySuggestion lag")
|
||||
val coreDocumentpipelineMap = getMultipleCoreDocumentsWaySuggestion(stringCache.values, pipeline)
|
||||
pipeline.annotate(Annotationspipeline.values)
|
||||
pipelineSentiment!!.annotate(AnnotationspipelineSentiment.values)
|
||||
runBlocking {
|
||||
val job = launch(Dispatchers.Default) {
|
||||
for (i in Annotationspipeline.entries) {
|
||||
//System.out.println("post getMultipleCoreDocumentsWaySuggestion instantiateAnnotationMap lag")
|
||||
pipeline.annotate(Annotationspipeline.values, 4)
|
||||
pipelineSentiment!!.annotate(AnnotationspipelineSentiment.values, 4)
|
||||
//System.out.println("reached second job instantiateAnnotationMap lag");
|
||||
coroutineScope {
|
||||
for (i in Annotationspipeline.entries) {
|
||||
val job = launch(Dispatchers.Default) {
|
||||
i.value.compact()
|
||||
pipelineAnnotationCache[i.key] = i.value
|
||||
yield()
|
||||
}
|
||||
yield()
|
||||
job.join();
|
||||
}
|
||||
job.join()
|
||||
}
|
||||
runBlocking {
|
||||
val job = launch(Dispatchers.Default) {
|
||||
for (i in AnnotationspipelineSentiment.entries) {
|
||||
|
||||
for (i in AnnotationspipelineSentiment.entries) {
|
||||
val job = launch(Dispatchers.Default) {
|
||||
i.value.compact()
|
||||
pipelineSentimentAnnotationCache[i.key] = i.value
|
||||
yield()
|
||||
}
|
||||
yield()
|
||||
job.join();
|
||||
}
|
||||
job.join()
|
||||
}
|
||||
runBlocking {
|
||||
val job = launch(Dispatchers.Default) {
|
||||
for (i in coreDocumentpipelineMap.entries) {
|
||||
coreDocumentAnnotationCache[i.key] = i.value
|
||||
}
|
||||
yield()
|
||||
}
|
||||
job.join()
|
||||
System.out.println("post Annotationspipeline lag")
|
||||
for (i in coreDocumentpipelineMap.entries) {
|
||||
coreDocumentAnnotationCache[i.key] = i.value
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
private fun futuresReturnOverallEvaluation(similarityMatrixes: List<SimilarityMatrix?>): ConcurrentMap<Int?, String?> {
|
||||
var strmapreturn = MapMaker().concurrencyLevel(6).makeMap<Int?, String?>()
|
||||
if (!similarityMatrixes.isEmpty()) {
|
||||
var iterator = 0
|
||||
for (SMX in similarityMatrixes) {
|
||||
val scoreRelationNewMsgToRecentMsg = SMX!!.distance
|
||||
if (scoreRelationNewMsgToRecentMsg > 0.0) {
|
||||
strmapreturn = addSMXToMapReturn(strmapreturn, SMX)
|
||||
}
|
||||
//System.out.println("scoreRelationNewMsgToRecentMsg: " + scoreRelationNewMsgToRecentMsg + "\niterator: " + iterator);
|
||||
iterator++
|
||||
strmapreturn = addSMXToMapReturn(strmapreturn, SMX)
|
||||
}
|
||||
}
|
||||
return strmapreturn
|
||||
@ -199,25 +195,33 @@ class Datahandler {
|
||||
return strmapreturn
|
||||
}
|
||||
|
||||
private fun checkForNullValues(index: String?): Boolean {
|
||||
if (jmweAnnotationCache[index] != null && pipelineAnnotationCache[index] != null
|
||||
&& pipelineSentimentAnnotationCache[index] != null &&
|
||||
coreDocumentAnnotationCache[index] != null) {
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
private fun StrComparringNoSentenceRelationMap(strCacheLocal: ConcurrentMap<Int, String?>, strCollection: Collection<String?>, localJMWEMap: ConcurrentMap<String, Annotation>,
|
||||
localPipelineAnnotation: ConcurrentMap<String?, Annotation>, localPipelineSentimentAnnotation: ConcurrentMap<String?, Annotation>,
|
||||
localCoreDocumentMap: ConcurrentMap<String, CoreDocument>): List<SimilarityMatrix?> {
|
||||
|
||||
//TODO here
|
||||
val distance_requirement = 15500.0
|
||||
private suspend fun StrComparringNoSentenceRelationMap(strCacheLocal: ConcurrentMap<Int, String?>, strCollection: Collection<String?>, localJMWEMap: ConcurrentMap<String, Annotation>,
|
||||
localPipelineAnnotation: ConcurrentMap<String?, Annotation>, localPipelineSentimentAnnotation: ConcurrentMap<String?, Annotation>,
|
||||
localCoreDocumentMap: ConcurrentMap<String, CoreDocument>): List<SimilarityMatrix?> {
|
||||
val distance_requirement = 10500.0
|
||||
val prefix_size = 150
|
||||
val smxReturnList: ArrayList<SimilarityMatrix> = ArrayList<SimilarityMatrix>()
|
||||
runBlocking {
|
||||
val job = launch(Dispatchers.Default) {
|
||||
for (j in strCollection) {
|
||||
|
||||
coroutineScope {
|
||||
for (j in strCollection) {
|
||||
val job = launch(Dispatchers.Default) {
|
||||
for (i in strCollection) {
|
||||
if (j != i) {
|
||||
val SMXInit = SimilarityMatrix(j, i)
|
||||
val sentimentCacheStr1 = sentimentCachingMap.getOrDefault(i, null)
|
||||
val sentimentCacheStr = sentimentCachingMap.getOrDefault(j, null)
|
||||
var sentimentAnalyzerTest: SentimentAnalyzerTest? = null
|
||||
if (stringCache.size < prefix_size) {
|
||||
val checkedVal: Boolean = checkForNullValues(i)
|
||||
if (stringCache.size < prefix_size || !checkedVal) {
|
||||
sentimentAnalyzerTest = SentimentAnalyzerTest(j, i, SMXInit,
|
||||
localJMWEMap[j], localJMWEMap[i], localPipelineAnnotation[j],
|
||||
localPipelineAnnotation[i], localPipelineSentimentAnnotation[j],
|
||||
@ -230,67 +234,71 @@ class Datahandler {
|
||||
pipelineSentimentAnnotationCache[i], localCoreDocumentMap[j],
|
||||
coreDocumentAnnotationCache[i], sentimentCacheStr, sentimentCacheStr1)
|
||||
}
|
||||
val call = sentimentAnalyzerTest.call();
|
||||
val call = sentimentAnalyzerTest.callSMX();
|
||||
if (call != null && call.distance > distance_requirement) {
|
||||
smxReturnList.add(call)
|
||||
}
|
||||
}
|
||||
}
|
||||
yield()
|
||||
}
|
||||
yield()
|
||||
job.join()
|
||||
}
|
||||
job.join()
|
||||
}
|
||||
|
||||
return smxReturnList
|
||||
}
|
||||
|
||||
private fun stringIteratorComparator(strmap: ConcurrentMap<Int?, String?>,
|
||||
strCacheLocal: ConcurrentMap<Int, String?>, localJMWEMap: ConcurrentMap<String, Annotation>,
|
||||
localPipelineAnnotation: ConcurrentMap<String?, Annotation>, localPipelineSentimentAnnotation: ConcurrentMap<String?, Annotation>,
|
||||
localCoreDocumentMap: ConcurrentMap<String, CoreDocument>): ConcurrentMap<Int?, String?> {
|
||||
private suspend fun stringIteratorComparator(strmap: ConcurrentMap<Int?, String?>,
|
||||
strCacheLocal: ConcurrentMap<Int, String?>, localJMWEMap: ConcurrentMap<String, Annotation>,
|
||||
localPipelineAnnotation: ConcurrentMap<String?, Annotation>, localPipelineSentimentAnnotation: ConcurrentMap<String?, Annotation>,
|
||||
localCoreDocumentMap: ConcurrentMap<String, CoreDocument>): ConcurrentMap<Int?, String?> {
|
||||
//System.out.println("strmap siuze: " + strmap.size());
|
||||
val ComparringNoSentenceRelationMap: List<SimilarityMatrix> = StrComparringNoSentenceRelationMap(strCacheLocal, strmap.values,
|
||||
localJMWEMap, localPipelineAnnotation, localPipelineSentimentAnnotation, localCoreDocumentMap) as List<SimilarityMatrix>
|
||||
Collections.sort(ComparringNoSentenceRelationMap, Comparator<SimilarityMatrix> { e1: SimilarityMatrix, e2: SimilarityMatrix -> e1.primaryString.compareTo(e2.primaryString) })
|
||||
//System.out.println("strmapreturn size: " + strmapreturn.size());
|
||||
System.out.println("ComparringNoSentenceRelationMap size: " + ComparringNoSentenceRelationMap.size);
|
||||
return futuresReturnOverallEvaluation(ComparringNoSentenceRelationMap)
|
||||
}
|
||||
|
||||
private fun removeNonSensicalStrings(strmap: ConcurrentMap<Int?, String?>): ConcurrentMap<Int?, String?> {
|
||||
private suspend fun removeNonSensicalStrings(strmap: ConcurrentMap<Int?, String?>): ConcurrentMap<Int?, String?> {
|
||||
val strCacheLocal = stringCache
|
||||
val localJMWEMap = getMultipleJMWEAnnotation(strmap.values)
|
||||
val localPipelineAnnotation = getMultiplePipelineAnnotation(strmap.values)
|
||||
System.out.println("str size post getMultiplePipelineAnnotation: " + strmap.size)
|
||||
val localPipelineSentimentAnnotation = getMultiplePipelineSentimentAnnotation(strmap.values)
|
||||
val localCoreDocumentMap = getMultipleCoreDocumentsWaySuggestion(strmap.values, pipeline)
|
||||
System.out.println("strmap size pre stringIteratorComparator: " + strmap.size)
|
||||
return stringIteratorComparator(strmap, strCacheLocal, localJMWEMap, localPipelineAnnotation, localPipelineSentimentAnnotation, localCoreDocumentMap)
|
||||
}
|
||||
|
||||
@Synchronized
|
||||
@Throws(CustomError::class)
|
||||
fun checkIfUpdateStrings() {
|
||||
fun checkIfUpdateStrings() = runBlocking {
|
||||
if (stopwatch.elapsed(TimeUnit.SECONDS) >= EXPIRE_TIME_IN_SECONDS || !stopwatch.isRunning) {
|
||||
var str = MessageResponseHandler.getStr()
|
||||
println("str size: " + str.size)
|
||||
str = filterContent(str)
|
||||
System.out.println("str size post filtercontent: " + str.size)
|
||||
str = removeNonSensicalStrings(str)
|
||||
//System.out.println("removeNonSensicalStrings str size POST: " + str.size() + "\n");
|
||||
System.out.println("removeNonSensicalStrings str size POST: " + str.size + "\n");
|
||||
str = annotationCacheUpdate(str)
|
||||
println("""
|
||||
annotationCacheUpdate str size POST: ${str.size}
|
||||
|
||||
""".trimIndent())
|
||||
val strf = str
|
||||
if (!stringCache.isEmpty()) {
|
||||
Thread(Runnable {
|
||||
try {
|
||||
DataMapper.InsertMYSQLStrings(strf)
|
||||
} catch (ex: CustomError) {
|
||||
Logger.getLogger(Datahandler::class.java
|
||||
.name).log(Level.SEVERE, null, ex)
|
||||
coroutineScope {
|
||||
val job = launch(Dispatchers.IO) {
|
||||
try {
|
||||
DataMapper.InsertMYSQLStrings(strf)
|
||||
} catch (ex: CustomError) {
|
||||
Logger.getLogger(Datahandler::class.java
|
||||
.name).log(Level.SEVERE, null, ex)
|
||||
}
|
||||
MessageResponseHandler.setStr(MapMaker().concurrencyLevel(6).makeMap())
|
||||
yield()
|
||||
}
|
||||
MessageResponseHandler.setStr(MapMaker().concurrencyLevel(6).makeMap())
|
||||
}).start()
|
||||
job.join()
|
||||
}
|
||||
} else {
|
||||
try {
|
||||
DataMapper.InsertMYSQLStrings(strf)
|
||||
@ -317,63 +325,59 @@ class Datahandler {
|
||||
return str
|
||||
}
|
||||
|
||||
private fun getResponseFutures(strF: String): String {
|
||||
private suspend fun getResponseFutures(strF: String): String {
|
||||
val values_copy: List<String?> = ArrayList(stringCache.values)
|
||||
Collections.shuffle(values_copy)
|
||||
Collections.sort<String>(values_copy) { o1, o2 -> o2.length - o1.length }
|
||||
var preRelationUserCounters = -155000.0
|
||||
val concurrentRelations: MutableList<String?> = arrayListOf()
|
||||
runBlocking {
|
||||
val job = launch(Dispatchers.Default) {
|
||||
for (str1 in values_copy) {
|
||||
if (strF != str1) {
|
||||
val sentimentCacheStr1 = sentimentCachingMap.getOrDefault(str1, null)
|
||||
val worker: Callable<SimilarityMatrix> = SentimentAnalyzerTest(strF, str1, SimilarityMatrix(strF, str1),
|
||||
val SB = StringBuilder()
|
||||
coroutineScope {
|
||||
for (str1 in values_copy) {
|
||||
if (strF != str1) {
|
||||
val job = launch(Dispatchers.Default) {
|
||||
var sentimentCacheStr1 = sentimentCachingMap.getOrDefault(str1, null)
|
||||
var sentimentAnalyzerTest = SentimentAnalyzerTest(strF, str1, SimilarityMatrix(strF, str1),
|
||||
strAnnoJMWE, jmweAnnotationCache[str1], strAnno,
|
||||
pipelineAnnotationCache[str1], strAnnoSentiment,
|
||||
pipelineSentimentAnnotationCache[str1], coreDoc, coreDocumentAnnotationCache[str1],
|
||||
null, sentimentCacheStr1)
|
||||
try {
|
||||
val getSMX = worker.call()
|
||||
if (getSMX != null) {
|
||||
val scoreRelationLastUserMsg = getSMX.distance
|
||||
if (scoreRelationLastUserMsg > preRelationUserCounters) {
|
||||
preRelationUserCounters = scoreRelationLastUserMsg
|
||||
concurrentRelations.add(getSMX.secondaryString)
|
||||
}
|
||||
|
||||
var getSMX: SimilarityMatrix = sentimentAnalyzerTest.callSMX()
|
||||
if (getSMX != null) {
|
||||
val scoreRelationLastUserMsg = getSMX.distance
|
||||
if (scoreRelationLastUserMsg > preRelationUserCounters) {
|
||||
preRelationUserCounters = scoreRelationLastUserMsg
|
||||
concurrentRelations.add(getSMX.secondaryString)
|
||||
}
|
||||
} catch (ex: Exception) {
|
||||
Logger.getLogger(Datahandler::class.java.name).log(Level.SEVERE, null, ex)
|
||||
}
|
||||
yield()
|
||||
}
|
||||
job.join()
|
||||
}
|
||||
}
|
||||
val randomLenghtPermit = strF.length * (Math.random() * Math.random() * Math.random() * (Math.random() * 10))
|
||||
Collections.reverse(concurrentRelations)
|
||||
if (!concurrentRelations.isEmpty()) {
|
||||
val firstRelation = concurrentRelations[0]
|
||||
|
||||
val job1 = launch(Dispatchers.Default) {
|
||||
for (secondaryRelation in concurrentRelations) {
|
||||
if (SB.toString().length > randomLenghtPermit && !SB.toString().isEmpty()) {
|
||||
break
|
||||
}
|
||||
val append = appendToString(firstRelation, secondaryRelation)
|
||||
if (append) {
|
||||
SB.append(secondaryRelation).append(" ")
|
||||
}
|
||||
}
|
||||
yield()
|
||||
}
|
||||
yield()
|
||||
job1.join()
|
||||
}
|
||||
job.join()
|
||||
}
|
||||
|
||||
val SB = StringBuilder()
|
||||
val randomLenghtPermit = strF.length * (Math.random() * Math.random() * Math.random() * 5)
|
||||
Collections.reverse(concurrentRelations)
|
||||
if (concurrentRelations.isEmpty()) {
|
||||
if (SB.toString().isEmpty()) {
|
||||
return "failure, preventing stuckness"
|
||||
}
|
||||
|
||||
val firstRelation = concurrentRelations[0]
|
||||
runBlocking {
|
||||
val job = launch(Dispatchers.Default) {
|
||||
for (secondaryRelation in concurrentRelations) {
|
||||
if (SB.toString().length > randomLenghtPermit && !SB.toString().isEmpty()) {
|
||||
break
|
||||
}
|
||||
val append = appendToString(firstRelation, secondaryRelation)
|
||||
if (append) {
|
||||
SB.append(secondaryRelation).append(" ")
|
||||
}
|
||||
}
|
||||
yield()
|
||||
}
|
||||
job.join()
|
||||
}
|
||||
return SB.toString()
|
||||
}
|
||||
|
||||
@ -387,57 +391,73 @@ class Datahandler {
|
||||
} else false
|
||||
}
|
||||
|
||||
@Throws(CustomError::class)
|
||||
fun getResponseMsg(str: String): String {
|
||||
val strF = trimString(str)
|
||||
getSingularAnnotation(strF)
|
||||
return getResponseFutures(strF)
|
||||
val responseFutures: String
|
||||
runBlocking {
|
||||
val strF = trimString(str)
|
||||
getSingularAnnotation(strF)
|
||||
responseFutures = getResponseFutures(strF)
|
||||
}
|
||||
return responseFutures
|
||||
}
|
||||
|
||||
fun getSingularAnnotation(str: String?) {
|
||||
strAnno = Annotation(str)
|
||||
strAnno!!.compact()
|
||||
pipeline.annotate(strAnno)
|
||||
strAnnoSentiment = Annotation(str)
|
||||
strAnnoSentiment!!.compact()
|
||||
pipelineSentiment!!.annotate(strAnnoSentiment)
|
||||
val notactualList: MutableList<String?> = arrayListOf()
|
||||
notactualList.add(str)
|
||||
val jmweAnnotation = PipelineJMWESingleton.INSTANCE.getJMWEAnnotation(notactualList)
|
||||
strAnnoJMWE = jmweAnnotation.values.iterator().next()
|
||||
strAnnoJMWE.compact()
|
||||
val coreDocument = CoreDocument(str)
|
||||
pipeline.annotate(coreDocument)
|
||||
coreDoc = coreDocument
|
||||
suspend fun getSingularAnnotation(str: String?) {
|
||||
coroutineScope {
|
||||
val job = launch(Dispatchers.Default) {
|
||||
strAnno = Annotation(str)
|
||||
strAnno!!.compact()
|
||||
pipeline.annotate(strAnno)
|
||||
yield()
|
||||
}
|
||||
job.join()
|
||||
|
||||
val job1 = launch(Dispatchers.Default) {
|
||||
strAnnoSentiment = Annotation(str)
|
||||
strAnnoSentiment!!.compact()
|
||||
pipelineSentiment!!.annotate(strAnnoSentiment)
|
||||
val notactualList: MutableList<String?> = arrayListOf()
|
||||
notactualList.add(str)
|
||||
val jmweAnnotation = PipelineJMWESingleton.INSTANCE.getJMWEAnnotation(notactualList)
|
||||
strAnnoJMWE = jmweAnnotation.values.iterator().next()
|
||||
strAnnoJMWE.compact()
|
||||
yield()
|
||||
}
|
||||
job1.join()
|
||||
|
||||
val job3 = launch(Dispatchers.Default) {
|
||||
val coreDocument = CoreDocument(str)
|
||||
pipeline.annotate(coreDocument)
|
||||
coreDoc = coreDocument
|
||||
yield()
|
||||
}
|
||||
job3.join()
|
||||
}
|
||||
}
|
||||
|
||||
private fun getScoreRelationStrF(str: String?, mostRecentMsg: String?): Double {
|
||||
val SMX = SimilarityMatrix(str, mostRecentMsg)
|
||||
val cacheSentiment1 = sentimentCachingMap.getOrDefault(str, null)
|
||||
val cacheSentiment2 = sentimentCachingMap.getOrDefault(mostRecentMsg, null)
|
||||
val worker: Callable<SimilarityMatrix> = SentimentAnalyzerTest(str, mostRecentMsg, SMX,
|
||||
val sentimentAnalyzerTest = SentimentAnalyzerTest(str, mostRecentMsg, SMX,
|
||||
strAnnoJMWE, jmweAnnotationCache[mostRecentMsg], strAnno,
|
||||
pipelineAnnotationCache[mostRecentMsg], strAnnoSentiment,
|
||||
pipelineSentimentAnnotationCache[mostRecentMsg], coreDoc, coreDocumentAnnotationCache[mostRecentMsg], cacheSentiment1, cacheSentiment2)
|
||||
var callSMX: SimilarityMatrix? = null
|
||||
try {
|
||||
callSMX = worker.call()
|
||||
} catch (ex: Exception) {
|
||||
Logger.getLogger(Datahandler::class.java
|
||||
.name).log(Level.SEVERE, null, ex)
|
||||
}
|
||||
return callSMX?.distance ?: 0.0
|
||||
pipelineSentimentAnnotationCache[mostRecentMsg], coreDoc,
|
||||
coreDocumentAnnotationCache[mostRecentMsg],
|
||||
cacheSentiment1, cacheSentiment2)
|
||||
val callSMX = sentimentAnalyzerTest.callSMX()
|
||||
return callSMX.distance ?: 0.0
|
||||
}
|
||||
|
||||
private fun annotationCacheUpdate(strmap: ConcurrentMap<Int?, String?>): ConcurrentMap<Int?, String?> {
|
||||
suspend private fun annotationCacheUpdate(strmap: ConcurrentMap<Int?, String?>): ConcurrentMap<Int?, String?> {
|
||||
val jmweAnnotation = PipelineJMWESingleton.INSTANCE.getJMWEAnnotation(strmap.values)
|
||||
for ((key, value) in jmweAnnotation) {
|
||||
jmweAnnotationCache[key] = value
|
||||
}
|
||||
val Annotationspipeline = MapMaker().concurrencyLevel(6).makeMap<String?, Annotation>()
|
||||
val AnnotationspipelineSentiment = MapMaker().concurrencyLevel(6).makeMap<String?, Annotation>()
|
||||
val Annotationspipeline = MapMaker().concurrencyLevel(5).makeMap<String?, Annotation>()
|
||||
val AnnotationspipelineSentiment = MapMaker().concurrencyLevel(5).makeMap<String?, Annotation>()
|
||||
val coreDocumentpipelineMap = getMultipleCoreDocumentsWaySuggestion(strmap.values, pipeline)
|
||||
runBlocking {
|
||||
|
||||
coroutineScope {
|
||||
val job = launch(Dispatchers.Default) {
|
||||
for (str in strmap.values) {
|
||||
val strAnno1 = Annotation(str)
|
||||
@ -448,11 +468,11 @@ class Datahandler {
|
||||
}
|
||||
yield()
|
||||
}
|
||||
pipeline.annotate(Annotationspipeline.values, 5)
|
||||
pipelineSentiment!!.annotate(AnnotationspipelineSentiment.values, 5)
|
||||
job.join()
|
||||
}
|
||||
pipeline.annotate(Annotationspipeline.values)
|
||||
pipelineSentiment!!.annotate(AnnotationspipelineSentiment.values)
|
||||
runBlocking {
|
||||
coroutineScope {
|
||||
val job = launch(Dispatchers.Default) {
|
||||
for (pipelineEntry in Annotationspipeline.entries) {
|
||||
if (pipelineEntry != null) {
|
||||
@ -463,18 +483,7 @@ class Datahandler {
|
||||
}
|
||||
job.join()
|
||||
}
|
||||
runBlocking {
|
||||
val job = launch(Dispatchers.Default) {
|
||||
for (pipelineEntry in AnnotationspipelineSentiment.entries) {
|
||||
if (pipelineEntry != null) {
|
||||
pipelineSentimentAnnotationCache[pipelineEntry.key] = pipelineEntry.value
|
||||
}
|
||||
}
|
||||
yield()
|
||||
}
|
||||
job.join()
|
||||
}
|
||||
runBlocking {
|
||||
coroutineScope {
|
||||
val job = launch(Dispatchers.Default) {
|
||||
for (coreDocumentEntry in coreDocumentpipelineMap.entries) {
|
||||
coreDocumentAnnotationCache[coreDocumentEntry.key] = coreDocumentEntry.value
|
||||
@ -483,50 +492,20 @@ class Datahandler {
|
||||
}
|
||||
job.join()
|
||||
}
|
||||
coroutineScope {
|
||||
val job1 = launch(Dispatchers.Default) {
|
||||
for (pipelineEntry in AnnotationspipelineSentiment.entries) {
|
||||
if (pipelineEntry != null) {
|
||||
pipelineSentimentAnnotationCache[pipelineEntry.key] = pipelineEntry.value
|
||||
}
|
||||
}
|
||||
yield()
|
||||
}
|
||||
job1.join()
|
||||
}
|
||||
return strmap
|
||||
}
|
||||
|
||||
val messageOverHead: Int
|
||||
get() = stringCache.values.size - stringCache.values.size / 10
|
||||
|
||||
fun update_autismo_socket_msg() {
|
||||
try {
|
||||
try {
|
||||
DatagramSocket(48480).use { serverSocket ->
|
||||
DatagramSocket(48471).use { serverSocket1 ->
|
||||
val receiveData = ByteArray(4096)
|
||||
val IPAddress = InetAddress.getByName("135.125.188.157") //later moving autism bot 1 and 3 to OVH from SYS
|
||||
var receivePacket = DatagramPacket(receiveData, receiveData.size)
|
||||
while (true) {
|
||||
serverSocket.receive(receivePacket)
|
||||
var sentence = String(receivePacket.data, 0, receivePacket.length)
|
||||
sentence = sentence.replace("clientmessage:", "")
|
||||
var getResponseMsg = getResponseMsg(sentence)
|
||||
var sendData = getResponseMsg.toByteArray(charset("UTF-8"))
|
||||
var sendPacket = DatagramPacket(sendData, sendData.size, IPAddress, 48479)
|
||||
serverSocket.send(sendPacket)
|
||||
receivePacket = DatagramPacket(receiveData, receiveData.size)
|
||||
serverSocket1.receive(receivePacket)
|
||||
sentence = String(receivePacket.data, 0, receivePacket.length)
|
||||
sentence = sentence.replace("clientmessage:", "")
|
||||
getResponseMsg = getResponseMsg(sentence)
|
||||
sendData = getResponseMsg.toByteArray(charset("UTF-8"))
|
||||
sendPacket = DatagramPacket(sendData, sendData.size, IPAddress, 48476)
|
||||
serverSocket1.send(sendPacket)
|
||||
}
|
||||
}
|
||||
}
|
||||
} catch (ex: CustomError) {
|
||||
Logger.getLogger(Datahandler::class.java.name).log(Level.SEVERE, null, ex)
|
||||
}
|
||||
} catch (ex: SocketException) {
|
||||
Logger.getLogger(Datahandler::class.java.name).log(Level.SEVERE, null, ex)
|
||||
} catch (ex: UnsupportedEncodingException) {
|
||||
Logger.getLogger(Datahandler::class.java.name).log(Level.SEVERE, null, ex)
|
||||
} catch (ex: IOException) {
|
||||
Logger.getLogger(Datahandler::class.java.name).log(Level.SEVERE, null, ex)
|
||||
}
|
||||
}
|
||||
|
||||
private class AnnotationCollector<T> : Consumer<T> {
|
||||
val annotationsT: MutableList<T?> = arrayListOf()
|
||||
@ -604,160 +583,56 @@ class Datahandler {
|
||||
props.setProperty("ner.combinationMode", "HIGH_RECALL")
|
||||
props.setProperty("regexner.ignorecase", "true")
|
||||
props.setProperty("ner.fine.regexner.ignorecase", "true")
|
||||
props.setProperty("tokenize.options", "untokenizable=firstDelete")
|
||||
props.setProperty("tokenize.options", "untokenizable=firstKeep")
|
||||
return StanfordCoreNLP(props)
|
||||
}
|
||||
|
||||
@JvmStatic
|
||||
fun getClassifier(): AbstractSequenceClassifier<CoreLabel>? {
|
||||
fun getClassifier(): AbstractSequenceClassifier<CoreLabel> {
|
||||
return classifier
|
||||
}
|
||||
|
||||
fun setClassifier(classifier: AbstractSequenceClassifier<CoreLabel>?) {
|
||||
if (classifier != null) {
|
||||
Companion.classifier = classifier
|
||||
}
|
||||
}
|
||||
|
||||
private fun getMultipleJMWEAnnotation(str: Collection<String?>): ConcurrentMap<String, Annotation> {
|
||||
return PipelineJMWESingleton.INSTANCE.getJMWEAnnotation(str)
|
||||
}
|
||||
|
||||
private fun getMultiplePipelineAnnotation(str: Collection<String?>): ConcurrentMap<String?, Annotation> {
|
||||
val pipelineAnnotationMap = MapMaker().concurrencyLevel(2).makeMap<String?, Annotation>()
|
||||
val pipelineAnnotationMap = MapMaker().concurrencyLevel(5).makeMap<String?, Annotation>()
|
||||
for (str1 in str) {
|
||||
val strAnno1 = Annotation(str1)
|
||||
pipelineAnnotationMap[str1] = strAnno1
|
||||
}
|
||||
pipeline.annotate(pipelineAnnotationMap.values)
|
||||
pipeline.annotate(pipelineAnnotationMap.values, 5)
|
||||
return pipelineAnnotationMap
|
||||
}
|
||||
|
||||
private fun getMultiplePipelineSentimentAnnotation(str: Collection<String?>): ConcurrentMap<String?, Annotation> {
|
||||
val pipelineAnnotationMap = MapMaker().concurrencyLevel(2).makeMap<String?, Annotation>()
|
||||
val pipelineAnnotationMap = MapMaker().concurrencyLevel(5).makeMap<String?, Annotation>()
|
||||
for (str1 in str) {
|
||||
val strAnno1 = Annotation(str1)
|
||||
pipelineAnnotationMap[str1] = strAnno1
|
||||
}
|
||||
pipelineSentiment!!.annotate(pipelineAnnotationMap.values)
|
||||
pipelineSentiment?.annotate(pipelineAnnotationMap.values, 5)
|
||||
return pipelineAnnotationMap
|
||||
}
|
||||
|
||||
fun filterContent(str: ConcurrentMap<Int?, String?>): ConcurrentMap<Int?, String?> {
|
||||
val strlistreturn = MapMaker().concurrencyLevel(2).makeMap<Int?, String?>()
|
||||
str.values.forEach(Consumer {
|
||||
var str1: String = ""
|
||||
if (!str1.isEmpty() && str1.length > 3) {
|
||||
str1 = str1.trim { it <= ' ' }
|
||||
if (str1.contains("PM*")) {
|
||||
str1 = str1.substring(str1.indexOf("PM*") + 3)
|
||||
}
|
||||
if (str1.contains("AM*")) {
|
||||
str1 = str1.substring(str1.indexOf("AM*") + 3)
|
||||
}
|
||||
/*
|
||||
if (str1.contains("?") || str1.contains("°"))
|
||||
{
|
||||
if (!str1.contains("http"))
|
||||
{
|
||||
str1 = str1.replace("?", " <:wlenny:514861023002624001> ");
|
||||
str1 = str1.replace("°", " <:wlenny:514861023002624001> ");
|
||||
val strlistreturn = MapMaker().concurrencyLevel(5).makeMap<Int?, String?>()
|
||||
for (str1: String? in str.values) {
|
||||
if (!str1?.isEmpty()!! && str1.length > 3) {
|
||||
var str1Local: String = str1.trim();
|
||||
if (str1Local.length > 2 && !str1Local.startsWith("!")) {
|
||||
strlistreturn[strlistreturn.size] = str1Local
|
||||
}
|
||||
}
|
||||
*/if (str1.contains("(Counter-Terrorist)")) {
|
||||
str1 = str1.replace("(Counter-Terrorist)", " ")
|
||||
}
|
||||
if (str1.contains("(Terrorist)")) {
|
||||
str1 = str1.replace("(Terrorist)", " ")
|
||||
}
|
||||
if (str1.contains("(Spectator)")) {
|
||||
str1 = str1.replace("(Spectator)", " ")
|
||||
}
|
||||
if (str1.contains("*DEAD*")) {
|
||||
str1 = str1.replace("*DEAD*", " ")
|
||||
}
|
||||
if (str1.contains("{red}")) {
|
||||
str1 = str1.replace("{red}", " ")
|
||||
}
|
||||
if (str1.contains("{orange}")) {
|
||||
str1 = str1.replace("{orange}", " ")
|
||||
}
|
||||
if (str1.contains("{yellow}")) {
|
||||
str1 = str1.replace("{yellow}", " ")
|
||||
}
|
||||
if (str1.contains("{green}")) {
|
||||
str1 = str1.replace("{green}", " ")
|
||||
}
|
||||
if (str1.contains("{lightblue}")) {
|
||||
str1 = str1.replace("{lightblue}", " ")
|
||||
}
|
||||
if (str1.contains("{blue}")) {
|
||||
str1 = str1.replace("{blue}", " ")
|
||||
}
|
||||
if (str1.contains("{purple}")) {
|
||||
str1 = str1.replace("{purple}", " ")
|
||||
}
|
||||
if (str1.contains("{white}")) {
|
||||
str1 = str1.replace("{white}", " ")
|
||||
}
|
||||
if (str1.contains("{fullblue}")) {
|
||||
str1 = str1.replace("{fullblue}", " ")
|
||||
}
|
||||
if (str1.contains("{cyan}")) {
|
||||
str1 = str1.replace("{cyan}", " ")
|
||||
}
|
||||
if (str1.contains("{lime}")) {
|
||||
str1 = str1.replace("{lime}", " ")
|
||||
}
|
||||
if (str1.contains("{deeppink}")) {
|
||||
str1 = str1.replace("{deeppink}", " ")
|
||||
}
|
||||
if (str1.contains("{slategray}")) {
|
||||
str1 = str1.replace("{slategray}", " ")
|
||||
}
|
||||
if (str1.contains("{dodgerblue}")) {
|
||||
str1 = str1.replace("{dodgerblue}", " ")
|
||||
}
|
||||
if (str1.contains("{black}")) {
|
||||
str1 = str1.replace("{black}", " ")
|
||||
}
|
||||
if (str1.contains("{orangered}")) {
|
||||
str1 = str1.replace("{orangered}", " ")
|
||||
}
|
||||
if (str1.contains("{darkorchid}")) {
|
||||
str1 = str1.replace("{darkorchid}", " ")
|
||||
}
|
||||
if (str1.contains("{pink}")) {
|
||||
str1 = str1.replace("{pink}", " ")
|
||||
}
|
||||
if (str1.contains("{lightyellow}")) {
|
||||
str1 = str1.replace("{lightyellow}", " ")
|
||||
}
|
||||
if (str1.contains("{chocolate}")) {
|
||||
str1 = str1.replace("{chocolate}", " ")
|
||||
}
|
||||
if (str1.contains("{beige}")) {
|
||||
str1 = str1.replace("{beige}", " ")
|
||||
}
|
||||
if (str1.contains("{azure}")) {
|
||||
str1 = str1.replace("{azure}", " ")
|
||||
}
|
||||
if (str1.contains("{yellowgreen}")) {
|
||||
str1 = str1.replace("{yellowgreen}", " ")
|
||||
}
|
||||
str1 = str1.trim { it <= ' ' }
|
||||
if (str1.length > 2 && !str1.startsWith("!")) {
|
||||
strlistreturn[strlistreturn.size] = str1
|
||||
}
|
||||
}
|
||||
})
|
||||
}
|
||||
return strlistreturn
|
||||
}
|
||||
|
||||
fun getMultipleCoreDocumentsWaySuggestion(str: Collection<String?>, localNLP: StanfordCoreNLP): ConcurrentMap<String, CoreDocument> {
|
||||
suspend fun getMultipleCoreDocumentsWaySuggestion(str: Collection<String?>, localNLP: StanfordCoreNLP): ConcurrentMap<String, CoreDocument> {
|
||||
val annCollector: AnnotationCollector<Annotation?> = AnnotationCollector<Annotation?>()
|
||||
val annotationreturnMap = MapMaker().concurrencyLevel(6).makeMap<String, CoreDocument>()
|
||||
runBlocking {
|
||||
coroutineScope {
|
||||
val job = launch(Dispatchers.Default) {
|
||||
for (exampleString in str) {
|
||||
localNLP.annotate(Annotation(exampleString), annCollector)
|
||||
@ -772,7 +647,7 @@ class Datahandler {
|
||||
} catch (ex: InterruptedException) {
|
||||
Logger.getLogger(Datahandler::class.java.name).log(Level.SEVERE, null, ex)
|
||||
}
|
||||
runBlocking {
|
||||
coroutineScope {
|
||||
val job1 = launch(Dispatchers.Default) {
|
||||
for (ann in annCollector.annotationsT) {
|
||||
if (ann != null) {
|
||||
@ -796,9 +671,9 @@ class Datahandler {
|
||||
|
||||
init {
|
||||
stopwatch = Stopwatch.createUnstarted()
|
||||
jmweAnnotationCache = MapMaker().concurrencyLevel(4).makeMap<String, Annotation>()
|
||||
pipelineAnnotationCache = MapMaker().concurrencyLevel(4).makeMap<String, Annotation>()
|
||||
pipelineSentimentAnnotationCache = MapMaker().concurrencyLevel(4).makeMap<String, Annotation>()
|
||||
jmweAnnotationCache = MapMaker().concurrencyLevel(5).makeMap<String, Annotation>()
|
||||
pipelineAnnotationCache = MapMaker().concurrencyLevel(5).makeMap<String, Annotation>()
|
||||
pipelineSentimentAnnotationCache = MapMaker().concurrencyLevel(5).makeMap<String, Annotation>()
|
||||
coreDocumentAnnotationCache = MapMaker().concurrencyLevel(5).makeMap<String, CoreDocument>()
|
||||
}
|
||||
}
|
@ -9,17 +9,18 @@ import PresentationLayer.DiscordHandler;
|
||||
import discord4j.core.event.domain.message.MessageCreateEvent;
|
||||
import discord4j.core.object.entity.User;
|
||||
import discord4j.core.object.entity.channel.TextChannel;
|
||||
|
||||
import java.math.BigInteger;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import java.util.logging.Level;
|
||||
import java.util.logging.Logger;
|
||||
|
||||
import reactor.core.publisher.Flux;
|
||||
import reactor.core.publisher.Mono;
|
||||
|
||||
|
||||
/**
|
||||
*
|
||||
* @author install1
|
||||
*/
|
||||
public class DoStuff {
|
||||
@ -59,8 +60,7 @@ public class DoStuff {
|
||||
List<User> blockLast = event.getMessage().getUserMentions().buffer().blockLast();
|
||||
String content = event.getMessage().getContent();
|
||||
if (!channelpermissionsDenied) {
|
||||
if (blockLast != null)
|
||||
{
|
||||
if (blockLast != null) {
|
||||
for (User user : blockLast) {
|
||||
content = content.replace(user.getId().asString(), "");
|
||||
}
|
||||
@ -68,35 +68,24 @@ public class DoStuff {
|
||||
MessageResponseHandler.getMessage(content);
|
||||
}
|
||||
boolean mentionedBot = false;
|
||||
if (blockLast != null){
|
||||
for (User user : blockLast)
|
||||
{
|
||||
if (user.getUsername().equals(usernameBot))
|
||||
{
|
||||
if (blockLast != null) {
|
||||
for (User user : blockLast) {
|
||||
if (user.getUsername().equals(usernameBot)) {
|
||||
mentionedBot = true;
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
if (mentionedBot || channelName.contains("general-autism")) {
|
||||
try {
|
||||
String ResponseStr;
|
||||
ResponseStr = MessageResponseHandler.selectReponseMessage(content, username);
|
||||
if (!ResponseStr.isEmpty()) {
|
||||
System.out.print("\nResponseStr3: " + ResponseStr + "\n");
|
||||
event.getMessage().getChannel().block().createMessage(ResponseStr).block();
|
||||
}
|
||||
} catch (CustomError ex) {
|
||||
Logger.getLogger(DoStuff.class.getName()).log(Level.SEVERE, null, ex);
|
||||
String ResponseStr;
|
||||
ResponseStr = MessageResponseHandler.selectReponseMessage(content, username);
|
||||
if (!ResponseStr.isEmpty()) {
|
||||
System.out.print("\nResponseStr3: " + ResponseStr + "\n");
|
||||
event.getMessage().getChannel().block().createMessage(ResponseStr).block();
|
||||
}
|
||||
|
||||
}
|
||||
new Thread(() -> {
|
||||
try {
|
||||
Datahandler.instance.checkIfUpdateStrings();
|
||||
} catch (CustomError ex) {
|
||||
Logger.getLogger(DiscordHandler.class.getName()).log(Level.SEVERE, null, ex);
|
||||
}
|
||||
Datahandler.instance.checkIfUpdateStrings();
|
||||
}).start();
|
||||
occupied = false;
|
||||
}
|
||||
|
@ -8,21 +8,30 @@ package FunctionLayer;
|
||||
import com.google.common.collect.MapMaker;
|
||||
import edu.stanford.nlp.pipeline.CoreDocument;
|
||||
import edu.stanford.nlp.pipeline.CoreEntityMention;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
import java.util.concurrent.ConcurrentMap;
|
||||
import java.util.regex.Matcher;
|
||||
import java.util.regex.Pattern;
|
||||
|
||||
/**
|
||||
*
|
||||
* @author install1
|
||||
*/
|
||||
public class MessageResponseHandler {
|
||||
|
||||
private static ConcurrentMap<Integer, String> str = new MapMaker().concurrencyLevel(2).makeMap();
|
||||
private static ConcurrentMap<Integer, String> str = new MapMaker().concurrencyLevel(6).makeMap();
|
||||
|
||||
public static ConcurrentMap<Integer, String> getStr() {
|
||||
ArrayList<String> arrayList = new ArrayList(str.values());
|
||||
Collections.sort(arrayList, (o1, o2) -> o2.length() - o1.length());
|
||||
int iterator = 0;
|
||||
for (String str1 : arrayList) {
|
||||
str.put(iterator, str1);
|
||||
iterator++;
|
||||
}
|
||||
return str;
|
||||
}
|
||||
|
||||
@ -43,7 +52,7 @@ public class MessageResponseHandler {
|
||||
}
|
||||
}
|
||||
|
||||
public static String selectReponseMessage(String toString, String personName) throws CustomError {
|
||||
public static String selectReponseMessage(String toString, String personName) {
|
||||
String getResponseMsg = Datahandler.instance.getResponseMsg(toString);
|
||||
getResponseMsg = checkPersonPresentInSentence(personName, getResponseMsg, toString);
|
||||
return getResponseMsg;
|
||||
@ -87,9 +96,4 @@ public class MessageResponseHandler {
|
||||
}
|
||||
return responseMsg;
|
||||
}
|
||||
|
||||
public static int getOverHead() {
|
||||
int getResponseMsgOverHead = Datahandler.instance.getMessageOverHead();
|
||||
return getResponseMsgOverHead;
|
||||
}
|
||||
}
|
||||
|
@ -24,6 +24,7 @@ import edu.stanford.nlp.ling.JMWEAnnotation;
|
||||
import edu.stanford.nlp.pipeline.Annotation;
|
||||
import edu.stanford.nlp.pipeline.StanfordCoreNLP;
|
||||
import edu.stanford.nlp.util.CoreMap;
|
||||
|
||||
import java.io.File;
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
@ -33,7 +34,6 @@ import java.util.Properties;
|
||||
import java.util.concurrent.ConcurrentMap;
|
||||
|
||||
/**
|
||||
*
|
||||
* @author install1
|
||||
*/
|
||||
//maybe not public?
|
||||
@ -65,12 +65,13 @@ public class PipelineJMWESingleton {
|
||||
throw new RuntimeException("unable to open IMWEIndex index: " + e + "\n");
|
||||
}
|
||||
IMWEDetector detector = getDetector(index, detectorName);
|
||||
ConcurrentMap<String, Annotation> returnAnnotations = new MapMaker().concurrencyLevel(2).makeMap();
|
||||
ConcurrentMap<String, Annotation> returnAnnotations = new MapMaker().concurrencyLevel(5).makeMap();
|
||||
strvalues.forEach(str -> {
|
||||
Annotation annoStr = new Annotation(str);
|
||||
returnAnnotations.put(str, annoStr);
|
||||
});
|
||||
localNLP.annotate(returnAnnotations.values());
|
||||
localNLP.annotate(returnAnnotations.values(), 4);
|
||||
|
||||
returnAnnotations.values().parallelStream().forEach(annoStr -> {
|
||||
for (CoreMap sentence : annoStr.get(CoreAnnotations.SentencesAnnotation.class)) {
|
||||
List<IMWE<IToken>> mwes = getjMWEInSentence(sentence, index, detector, verbose);
|
||||
@ -85,8 +86,8 @@ public class PipelineJMWESingleton {
|
||||
Properties propsJMWE;
|
||||
propsJMWE = new Properties();
|
||||
propsJMWE.setProperty("annotators", "tokenize,ssplit,pos,lemma");
|
||||
propsJMWE.setProperty("tokenize.options", "untokenizable=firstDelete");
|
||||
propsJMWE.setProperty("threads", "25");
|
||||
propsJMWE.setProperty("tokenize.options", "untokenizable=firstKeep");
|
||||
propsJMWE.setProperty("threads", "5");
|
||||
propsJMWE.setProperty("pos.maxlen", "90");
|
||||
propsJMWE.setProperty("tokenize.maxlen", "90");
|
||||
propsJMWE.setProperty("ssplit.maxlen", "90");
|
||||
@ -124,7 +125,7 @@ public class PipelineJMWESingleton {
|
||||
}
|
||||
|
||||
public List<IMWE<IToken>> getjMWEInSentence(CoreMap sentence, IMWEIndex index, IMWEDetector detector,
|
||||
boolean verbose) {
|
||||
boolean verbose) {
|
||||
List<IToken> tokens = getITokens(sentence.get(CoreAnnotations.TokensAnnotation.class));
|
||||
List<IMWE<IToken>> mwes = detector.detect(tokens);
|
||||
if (verbose) {
|
||||
|
@ -37,6 +37,7 @@ import edu.stanford.nlp.trees.TypedDependency;
|
||||
import edu.stanford.nlp.trees.tregex.gui.Tdiff;
|
||||
import edu.stanford.nlp.util.CoreMap;
|
||||
import edu.stanford.nlp.util.Pair;
|
||||
|
||||
import java.io.StringReader;
|
||||
import java.util.AbstractMap;
|
||||
import java.util.ArrayList;
|
||||
@ -49,6 +50,7 @@ import java.util.Set;
|
||||
import java.util.concurrent.Callable;
|
||||
import java.util.concurrent.ConcurrentMap;
|
||||
import java.util.concurrent.atomic.AtomicInteger;
|
||||
|
||||
import org.apache.lucene.analysis.core.StopAnalyzer;
|
||||
import org.ejml.simple.SimpleMatrix;
|
||||
|
||||
@ -57,11 +59,11 @@ import org.ejml.simple.SimpleMatrix;
|
||||
* To change this template file, choose Tools | Templates
|
||||
* and open the template in the editor.
|
||||
*/
|
||||
|
||||
/**
|
||||
*
|
||||
* @author install1
|
||||
*/
|
||||
public class SentimentAnalyzerTest implements Callable<SimilarityMatrix> {
|
||||
public class SentimentAnalyzerTest {
|
||||
|
||||
private final SimilarityMatrix smxParam;
|
||||
private final String str;
|
||||
@ -80,17 +82,9 @@ public class SentimentAnalyzerTest implements Callable<SimilarityMatrix> {
|
||||
private SentimentValueCache cacheSentiment1;
|
||||
private SentimentValueCache cacheSentiment2;
|
||||
|
||||
public final SentimentValueCache getCacheSentiment1() {
|
||||
return cacheSentiment1;
|
||||
}
|
||||
|
||||
public final SentimentValueCache getCacheSentiment2() {
|
||||
return cacheSentiment2;
|
||||
}
|
||||
|
||||
public SentimentAnalyzerTest(String str, String str1, SimilarityMatrix smxParam, Annotation str1Annotation, Annotation str2Annotation,
|
||||
Annotation strPipeline1, Annotation strPipeline2, Annotation strPipeSentiment1, Annotation strPipeSentiment2,
|
||||
CoreDocument pipelineCoreDcoument1, CoreDocument pipelineCoreDcoument2, SentimentValueCache cacheValue1, SentimentValueCache cacheValue2) {
|
||||
Annotation strPipeline1, Annotation strPipeline2, Annotation strPipeSentiment1, Annotation strPipeSentiment2,
|
||||
CoreDocument pipelineCoreDcoument1, CoreDocument pipelineCoreDcoument2, SentimentValueCache cacheValue1, SentimentValueCache cacheValue2) {
|
||||
this.str = str;
|
||||
this.str1 = str1;
|
||||
this.smxParam = smxParam;
|
||||
@ -112,7 +106,7 @@ public class SentimentAnalyzerTest implements Callable<SimilarityMatrix> {
|
||||
private List<List<TaggedWord>> getTaggedWordList(String message) {
|
||||
List<List<TaggedWord>> taggedwordlist = new ArrayList();
|
||||
DocumentPreprocessor tokenizer = new DocumentPreprocessor(new StringReader(message));
|
||||
TokenizerFactory<CoreLabel> ptbTokenizerFactory = PTBTokenizer.factory(new CoreLabelTokenFactory(), "untokenizable=firstDelete"); //noneDelete
|
||||
TokenizerFactory<CoreLabel> ptbTokenizerFactory = PTBTokenizer.factory(new CoreLabelTokenFactory(), "untokenizable=noneDelete"); //noneDelete //firstDelete
|
||||
tokenizer.setTokenizerFactory(ptbTokenizerFactory);
|
||||
for (final List<HasWord> sentence : tokenizer) {
|
||||
taggedwordlist.add(tagger.tagSentence(sentence));
|
||||
@ -122,7 +116,6 @@ public class SentimentAnalyzerTest implements Callable<SimilarityMatrix> {
|
||||
|
||||
private int tokenizeCounting(List<List<TaggedWord>> taggedwordlist) {
|
||||
int counter = 0;
|
||||
Collection<TaggedWord> taggedCollection = new ArrayList();
|
||||
for (List<TaggedWord> taggedList : taggedwordlist) {
|
||||
counter += taggedList.size();
|
||||
}
|
||||
@ -130,28 +123,31 @@ public class SentimentAnalyzerTest implements Callable<SimilarityMatrix> {
|
||||
}
|
||||
|
||||
private ConcurrentMap<Integer, String> retrieveTGWListIndex(List<List<TaggedWord>> taggedwordlist) {
|
||||
ConcurrentMap<Integer, String> tgwlistIndex = new MapMaker().concurrencyLevel(2).makeMap();
|
||||
taggedwordlist.forEach((TGWList) -> {
|
||||
TGWList.forEach((TaggedWord) -> {
|
||||
if (!tgwlistIndex.values().contains(TaggedWord.tag()) && !TaggedWord.tag().equals(":")) {
|
||||
tgwlistIndex.put(tgwlistIndex.size() + 1, TaggedWord.tag());
|
||||
ConcurrentMap<Integer, String> tgwlistIndex = new MapMaker().concurrencyLevel(5).makeMap();
|
||||
for (List<TaggedWord> tGWList : taggedwordlist) {
|
||||
for (TaggedWord taggedWord : tGWList) {
|
||||
for (String str : tgwlistIndex.values()) {
|
||||
if (!taggedWord.tag().equals(str) && !taggedWord.tag().equals(":")) {
|
||||
tgwlistIndex.put(tgwlistIndex.size() + 1, taggedWord.tag());
|
||||
tGWList.remove(taggedWord);
|
||||
}
|
||||
}
|
||||
});
|
||||
});
|
||||
}
|
||||
}
|
||||
return tgwlistIndex;
|
||||
}
|
||||
|
||||
private Double iterateTrees(ConcurrentMap<Integer, Tree> sentenceConstituencyParseList2, ConcurrentMap<Integer, Tree> sentenceConstituencyParseList1,
|
||||
Double score) {
|
||||
Double score) {
|
||||
double preConstituentsScore = score;
|
||||
ConcurrentMap<Integer, Integer> constituentsMap = new MapMaker().concurrencyLevel(4).makeMap();
|
||||
ConcurrentMap<Integer, Integer> constituentsMap = new MapMaker().concurrencyLevel(5).makeMap();
|
||||
int constituencySize = sentenceConstituencyParseList1.size() + sentenceConstituencyParseList2.size();
|
||||
for (final Tree sentenceConstituencyParse2 : sentenceConstituencyParseList2.values()) {
|
||||
int constiRelationsize = 0;
|
||||
for (final Tree sentenceConstituencyParse1 : sentenceConstituencyParseList1.values()) {
|
||||
Set<Constituent> constinuent1 = Tdiff.markDiff(sentenceConstituencyParse1, sentenceConstituencyParse2);
|
||||
Set<Constituent> constinuent2 = Tdiff.markDiff(sentenceConstituencyParse2, sentenceConstituencyParse1);
|
||||
ConcurrentMap<Integer, String> constiLabels = new MapMaker().concurrencyLevel(2).makeMap();
|
||||
ConcurrentMap<Integer, String> constiLabels = new MapMaker().concurrencyLevel(5).makeMap();
|
||||
for (final Constituent consti : constinuent1) {
|
||||
for (final Constituent consti1 : constinuent2) {
|
||||
if (consti.value().equals(consti1.value()) && !constiLabels.values().contains(consti.value())) {
|
||||
@ -223,10 +219,10 @@ public class SentimentAnalyzerTest implements Callable<SimilarityMatrix> {
|
||||
}
|
||||
|
||||
private Double typeDependenciesGrammaticalRelation(Collection<TypedDependency> allTypedDependencies1, Collection<TypedDependency> allTypedDependencies2,
|
||||
Double score, ConcurrentMap<Integer, GrammaticalStructure> grammaticalMap1, ConcurrentMap<Integer, GrammaticalStructure> grammaticalMap2,
|
||||
ConcurrentMap<Integer, Tree> sentenceConstituencyParseList1, ConcurrentMap<Integer, Tree> sentenceConstituencyParseList2) {
|
||||
ConcurrentMap<Integer, Integer> alltypeDepsSizeMap = new MapMaker().concurrencyLevel(2).makeMap();
|
||||
ConcurrentMap<Integer, Integer> summationMap = new MapMaker().concurrencyLevel(2).makeMap();
|
||||
Double score, ConcurrentMap<Integer, GrammaticalStructure> grammaticalMap1, ConcurrentMap<Integer, GrammaticalStructure> grammaticalMap2,
|
||||
ConcurrentMap<Integer, Tree> sentenceConstituencyParseList1, ConcurrentMap<Integer, Tree> sentenceConstituencyParseList2) {
|
||||
ConcurrentMap<Integer, Integer> alltypeDepsSizeMap = new MapMaker().concurrencyLevel(5).makeMap();
|
||||
ConcurrentMap<Integer, Integer> summationMap = new MapMaker().concurrencyLevel(5).makeMap();
|
||||
int relationApplicable1 = 0;
|
||||
int relationApplicable2 = 0;
|
||||
int grammaticalRelation1 = 0;
|
||||
@ -332,19 +328,27 @@ public class SentimentAnalyzerTest implements Callable<SimilarityMatrix> {
|
||||
: (grammaticalRelation2 - grammaticalRelation1) * 500;
|
||||
}
|
||||
}
|
||||
ConcurrentMap<Integer, String> filerTreeContent = new MapMaker().concurrencyLevel(2).makeMap();
|
||||
ConcurrentMap<Integer, String> filerTreeContent = new MapMaker().concurrencyLevel(5).makeMap();
|
||||
AtomicInteger runCount1 = new AtomicInteger(0);
|
||||
for (Tree sentenceConstituencyParse1 : sentenceConstituencyParseList1.values()) {
|
||||
for (Tree sentenceConstituencyParse2 : sentenceConstituencyParseList2.values()) {
|
||||
sentenceConstituencyParse1.taggedLabeledYield().forEach((LBW) -> {
|
||||
sentenceConstituencyParse2.taggedLabeledYield().stream().filter((LBW1) -> (LBW.lemma().equals(LBW1.lemma())
|
||||
&& !filerTreeContent.values().contains(LBW.lemma()))).map((_item) -> {
|
||||
filerTreeContent.put(filerTreeContent.size() + 1, LBW.lemma());
|
||||
return _item;
|
||||
}).forEachOrdered((_item) -> {
|
||||
runCount1.getAndIncrement();
|
||||
});
|
||||
});
|
||||
for (CoreLabel LBW : sentenceConstituencyParse1.taggedLabeledYield()) {
|
||||
for (CoreLabel LBW1 : sentenceConstituencyParse2.taggedLabeledYield()) {
|
||||
if (LBW.lemma().equals(LBW1.lemma())) {
|
||||
boolean found = false;
|
||||
for (String str : filerTreeContent.values()) {
|
||||
if (str.equals(LBW.lemma())) {
|
||||
found = true;
|
||||
break;
|
||||
}
|
||||
}
|
||||
if (!found) {
|
||||
filerTreeContent.put(filerTreeContent.size() + 1, LBW.lemma());
|
||||
runCount1.getAndIncrement();
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
score += runCount1.get() * 250;
|
||||
@ -384,10 +388,10 @@ public class SentimentAnalyzerTest implements Callable<SimilarityMatrix> {
|
||||
int iterationOverHeat = 0;
|
||||
double scoreFallback = score;
|
||||
for (SimpleMatrix simpleSMX2 : simpleSMXlist2.values()) {
|
||||
ConcurrentMap<Integer, Double> AccumulateDotMap = new MapMaker().concurrencyLevel(2).makeMap();
|
||||
ConcurrentMap<Integer, Double> subtractorMap = new MapMaker().concurrencyLevel(2).makeMap();
|
||||
ConcurrentMap<Integer, Double> dotPredictions = new MapMaker().concurrencyLevel(2).makeMap();
|
||||
ConcurrentMap<Integer, Double> DotOverTransfer = new MapMaker().concurrencyLevel(2).makeMap();
|
||||
ConcurrentMap<Integer, Double> AccumulateDotMap = new MapMaker().concurrencyLevel(5).makeMap();
|
||||
ConcurrentMap<Integer, Double> subtractorMap = new MapMaker().concurrencyLevel(5).makeMap();
|
||||
ConcurrentMap<Integer, Double> dotPredictions = new MapMaker().concurrencyLevel(5).makeMap();
|
||||
ConcurrentMap<Integer, Double> DotOverTransfer = new MapMaker().concurrencyLevel(5).makeMap();
|
||||
Double totalSubtraction = 0.0;
|
||||
Double largest = 10.0;
|
||||
Double shortest = 100.0;
|
||||
@ -538,10 +542,10 @@ public class SentimentAnalyzerTest implements Callable<SimilarityMatrix> {
|
||||
}
|
||||
|
||||
private Double simpleRNNMaxtrixVectors(Double score, ConcurrentMap<Integer, SimpleMatrix> simpleSMXlistVector1, ConcurrentMap<Integer, SimpleMatrix> simpleSMXlistVector2) {
|
||||
ConcurrentMap<Integer, Double> elementSumCounter = new MapMaker().concurrencyLevel(3).makeMap();
|
||||
ConcurrentMap<Integer, Double> dotMap = new MapMaker().concurrencyLevel(3).makeMap();
|
||||
ConcurrentMap<Integer, Double> elementSumMap = new MapMaker().concurrencyLevel(3).makeMap();
|
||||
ConcurrentMap<Integer, Double> dotSumMap = new MapMaker().concurrencyLevel(3).makeMap();
|
||||
ConcurrentMap<Integer, Double> elementSumCounter = new MapMaker().concurrencyLevel(5).makeMap();
|
||||
ConcurrentMap<Integer, Double> dotMap = new MapMaker().concurrencyLevel(5).makeMap();
|
||||
ConcurrentMap<Integer, Double> elementSumMap = new MapMaker().concurrencyLevel(5).makeMap();
|
||||
ConcurrentMap<Integer, Double> dotSumMap = new MapMaker().concurrencyLevel(5).makeMap();
|
||||
Double preDot = 0.0;
|
||||
Double postDot = 0.0;
|
||||
int iterateSize = simpleSMXlistVector1.values().size() + simpleSMXlistVector2.values().size();
|
||||
@ -718,7 +722,7 @@ public class SentimentAnalyzerTest implements Callable<SimilarityMatrix> {
|
||||
}
|
||||
|
||||
private final Map.Entry<Double, Map.Entry<SentimentValueCache, SentimentValueCache>> classifyRawEvaluation(Double score, SentimentValueCache cacheSentimentLocal1,
|
||||
SentimentValueCache cacheSentimentLocal2) {
|
||||
SentimentValueCache cacheSentimentLocal2) {
|
||||
if (cacheSentiment1 == null || cacheSentiment2 == null) {
|
||||
DocumentReaderAndWriter<CoreLabel> readerAndWriter = classifier.makePlainTextReaderAndWriter();
|
||||
if (cacheSentiment1 == null) {
|
||||
@ -731,7 +735,7 @@ public class SentimentAnalyzerTest implements Callable<SimilarityMatrix> {
|
||||
final List classifyRaw1 = cacheSentiment1 == null ? cacheSentimentLocal1.getClassifyRaw() : cacheSentiment1.getClassifyRaw();
|
||||
final List classifyRaw2 = cacheSentiment2 == null ? cacheSentimentLocal2.getClassifyRaw() : cacheSentiment2.getClassifyRaw();
|
||||
score -= (classifyRaw1.size() > classifyRaw2.size() ? classifyRaw1.size() - classifyRaw2.size() : classifyRaw2.size() - classifyRaw1.size()) * 200;
|
||||
Map.Entry< Double, Map.Entry<SentimentValueCache, SentimentValueCache>> entry
|
||||
Map.Entry<Double, Map.Entry<SentimentValueCache, SentimentValueCache>> entry
|
||||
= new AbstractMap.SimpleEntry(score, new AbstractMap.SimpleEntry(cacheSentimentLocal1, cacheSentimentLocal2));
|
||||
return entry;
|
||||
}
|
||||
@ -845,7 +849,7 @@ public class SentimentAnalyzerTest implements Callable<SimilarityMatrix> {
|
||||
}
|
||||
|
||||
private Double entryCountsScoring(Double score, SentimentValueCache cacheSentimentLocal1, SentimentValueCache cacheSentimentLocal2) {
|
||||
ConcurrentMap<Integer, Integer> countsMap = new MapMaker().concurrencyLevel(2).makeMap();
|
||||
ConcurrentMap<Integer, Integer> countsMap = new MapMaker().concurrencyLevel(5).makeMap();
|
||||
int totalsize = cacheSentimentLocal1.getEntryCounts().values().size() + cacheSentimentLocal2.getEntryCounts().values().size();
|
||||
for (int counts : cacheSentimentLocal1.getEntryCounts().values()) {
|
||||
for (int counts1 : cacheSentimentLocal2.getEntryCounts().values()) {
|
||||
@ -930,7 +934,7 @@ public class SentimentAnalyzerTest implements Callable<SimilarityMatrix> {
|
||||
}
|
||||
|
||||
private Double strTokensMapScoring(Double score, SentimentValueCache cacheSentimentLocal1, SentimentValueCache cacheSentimentLocal2) {
|
||||
ConcurrentMap<Integer, String> strtokensMap = new MapMaker().concurrencyLevel(2).makeMap();
|
||||
ConcurrentMap<Integer, String> strtokensMap = new MapMaker().concurrencyLevel(5).makeMap();
|
||||
for (String strTokeniPart1 : cacheSentimentLocal1.getstrTokenGetiPart().values()) {
|
||||
for (String strTokeniPart2 : cacheSentimentLocal2.getstrTokenGetiPart().values()) {
|
||||
if (strTokeniPart1.equals(strTokeniPart2) && !strtokensMap.values().contains(strTokeniPart2)) {
|
||||
@ -959,7 +963,7 @@ public class SentimentAnalyzerTest implements Callable<SimilarityMatrix> {
|
||||
if (tokenEntry1 * 2 != tokenEntry2 && tokenEntry2 * 2 != tokenEntry1) {
|
||||
boundaryLeaks = true;
|
||||
}
|
||||
ConcurrentMap<Integer, String> entryTokenMap = new MapMaker().concurrencyLevel(2).makeMap();
|
||||
ConcurrentMap<Integer, String> entryTokenMap = new MapMaker().concurrencyLevel(5).makeMap();
|
||||
for (String strTokenEntry1 : cacheSentimentLocal1.getstrTokenGetEntry().values()) {
|
||||
for (String strTokenEntry2 : cacheSentimentLocal2.getstrTokenGetEntry().values()) {
|
||||
if (!entryTokenMap.values().contains(strTokenEntry2)) {
|
||||
@ -979,7 +983,7 @@ public class SentimentAnalyzerTest implements Callable<SimilarityMatrix> {
|
||||
}
|
||||
|
||||
private Double strTokenMapTagsScoring(Double score, SentimentValueCache cacheSentimentLocal1, SentimentValueCache cacheSentimentLocal2) {
|
||||
ConcurrentMap<Integer, String> iTokenMapTagsMap = new MapMaker().concurrencyLevel(2).makeMap();
|
||||
ConcurrentMap<Integer, String> iTokenMapTagsMap = new MapMaker().concurrencyLevel(5).makeMap();
|
||||
for (String strmapTag : cacheSentimentLocal1.getITokenMapTag().values()) {
|
||||
for (String strmapTag1 : cacheSentimentLocal2.getITokenMapTag().values()) {
|
||||
if (strmapTag.equals(strmapTag1) && !iTokenMapTagsMap.values().contains(strmapTag1)) {
|
||||
@ -1028,7 +1032,7 @@ public class SentimentAnalyzerTest implements Callable<SimilarityMatrix> {
|
||||
}
|
||||
|
||||
private Double tokenStemmingMapScoring(Double score, SentimentValueCache cacheSentimentLocal1, SentimentValueCache cacheSentimentLocal2) {
|
||||
ConcurrentMap<Integer, String> tokenStemmingMap = new MapMaker().concurrencyLevel(2).makeMap();
|
||||
ConcurrentMap<Integer, String> tokenStemmingMap = new MapMaker().concurrencyLevel(5).makeMap();
|
||||
for (String strTokenStem : cacheSentimentLocal1.getstrTokenStems().values()) {
|
||||
for (String strTokenStem1 : cacheSentimentLocal2.getstrTokenStems().values()) {
|
||||
if (strTokenStem.equals(strTokenStem1) && !tokenStemmingMap.values().contains(strTokenStem)) {
|
||||
@ -1233,18 +1237,25 @@ public class SentimentAnalyzerTest implements Callable<SimilarityMatrix> {
|
||||
|
||||
private Double tgwListScoreIncrementer(Double score, SentimentValueCache cacheSentimentLocal1, SentimentValueCache cacheSentimentLocal2) {
|
||||
AtomicInteger runCount = new AtomicInteger(0);
|
||||
cacheSentimentLocal1.getTgwlistIndex().values().forEach(TaggedWord -> {
|
||||
if (!cacheSentimentLocal2.getTgwlistIndex().values().contains(TaggedWord)) {
|
||||
cacheSentimentLocal2.addTgwlistIndex(TaggedWord);
|
||||
for (String taggedWord : cacheSentimentLocal1.getTgwlistIndex().values()) {
|
||||
boolean found = false;
|
||||
for (String taggedWord1 : cacheSentimentLocal2.getTgwlistIndex().values()) {
|
||||
if (taggedWord.equals(taggedWord1)) {
|
||||
found = true;
|
||||
break;
|
||||
}
|
||||
}
|
||||
if (!found) {
|
||||
cacheSentimentLocal2.addTgwlistIndex(taggedWord);
|
||||
runCount.getAndIncrement();
|
||||
}
|
||||
});
|
||||
}
|
||||
score += runCount.get() * 64;
|
||||
return score;
|
||||
}
|
||||
|
||||
@Override
|
||||
public final SimilarityMatrix call() {
|
||||
|
||||
public final SimilarityMatrix callSMX() {
|
||||
Double score = -100.0;
|
||||
SentimentValueCache cacheSentimentLocal1 = null;
|
||||
SentimentValueCache cacheSentimentLocal2 = null;
|
||||
@ -1258,7 +1269,8 @@ public class SentimentAnalyzerTest implements Callable<SimilarityMatrix> {
|
||||
cacheSentimentLocal2 = initializeCacheSetup(str1, cacheSentimentLocal2);
|
||||
}
|
||||
} catch (Exception ex) {
|
||||
|
||||
System.out.println("ex 1: " + ex.getMessage() + " " + ex.getLocalizedMessage() + " "
|
||||
+ ex.getCause());
|
||||
}
|
||||
try {
|
||||
counter1 = cacheSentiment1 == null ? cacheSentimentLocal1.getCounter() : cacheSentiment1.getCounter();
|
||||
@ -1267,7 +1279,8 @@ public class SentimentAnalyzerTest implements Callable<SimilarityMatrix> {
|
||||
final int overValue = (counter1 >= counter2 ? counter1 - counter2 : counter2 - counter1) * 32;
|
||||
score -= overValue;
|
||||
} catch (Exception ex) {
|
||||
|
||||
System.out.println("ex 2: " + ex.getMessage() + " " + ex.getLocalizedMessage() + " "
|
||||
+ ex.getCause());
|
||||
}
|
||||
try {
|
||||
if (cacheSentiment1 == null) {
|
||||
@ -1283,7 +1296,8 @@ public class SentimentAnalyzerTest implements Callable<SimilarityMatrix> {
|
||||
}
|
||||
}
|
||||
} catch (Exception ex) {
|
||||
|
||||
System.out.println("ex 3: " + ex.getMessage() + " " + ex.getLocalizedMessage() + " "
|
||||
+ ex.getCause());
|
||||
}
|
||||
try {
|
||||
score = tgwListScoreIncrementer(score, cacheSentiment1 == null
|
||||
@ -1295,7 +1309,8 @@ public class SentimentAnalyzerTest implements Callable<SimilarityMatrix> {
|
||||
cacheSentimentLocal2 = GrammaticStructureSetup(cacheSentimentLocal2, pipelineAnnotation2);
|
||||
}
|
||||
} catch (Exception ex) {
|
||||
|
||||
System.out.println("ex 4: " + ex.getMessage() + " " + ex.getLocalizedMessage() + " "
|
||||
+ ex.getCause());
|
||||
}
|
||||
ConcurrentMap<Integer, Tree> sentenceConstituencyParseList1 = null;
|
||||
ConcurrentMap<Integer, Tree> sentenceConstituencyParseList2 = null;
|
||||
@ -1306,7 +1321,8 @@ public class SentimentAnalyzerTest implements Callable<SimilarityMatrix> {
|
||||
? cacheSentimentLocal1.getSentenceConstituencyParseList() : cacheSentiment1.getSentenceConstituencyParseList();
|
||||
score = iterateTrees(sentenceConstituencyParseList2, sentenceConstituencyParseList1, score);
|
||||
} catch (Exception ex) {
|
||||
|
||||
System.out.println("ex 5: " + ex.getMessage() + " " + ex.getLocalizedMessage() + " "
|
||||
+ ex.getCause());
|
||||
}
|
||||
try {
|
||||
Collection<TypedDependency> allTypedDependencies2 = cacheSentiment2 == null ? cacheSentimentLocal2.getAllTypedDependencies()
|
||||
@ -1319,7 +1335,8 @@ public class SentimentAnalyzerTest implements Callable<SimilarityMatrix> {
|
||||
score = typeDependenciesGrammaticalRelation(allTypedDependencies1, allTypedDependencies2, score, grammaticalMap1, grammaticalMap2,
|
||||
sentenceConstituencyParseList1, sentenceConstituencyParseList2);
|
||||
} catch (Exception ex) {
|
||||
|
||||
System.out.println("ex 6: " + ex.getMessage() + " " + ex.getLocalizedMessage() + " "
|
||||
+ ex.getCause());
|
||||
}
|
||||
try {
|
||||
if (cacheSentiment1 == null) {
|
||||
@ -1329,7 +1346,8 @@ public class SentimentAnalyzerTest implements Callable<SimilarityMatrix> {
|
||||
cacheSentimentLocal2 = sentimentCoreAnnotationSetup(pipelineAnnotation2Sentiment, cacheSentimentLocal2);
|
||||
}
|
||||
} catch (Exception ex) {
|
||||
|
||||
System.out.println("ex 7: " + ex.getMessage() + " " + ex.getLocalizedMessage() + " "
|
||||
+ ex.getCause());
|
||||
}
|
||||
try {
|
||||
final ConcurrentMap<Integer, SimpleMatrix> simpleSMXlist1 = cacheSentiment1 == null
|
||||
@ -1343,7 +1361,8 @@ public class SentimentAnalyzerTest implements Callable<SimilarityMatrix> {
|
||||
score = simpleRNNMatrixCalculations(score, simpleSMXlist1, simpleSMXlist2);
|
||||
score = simpleRNNMaxtrixVectors(score, simpleSMXlistVector1, simpleSMXlistVector2);
|
||||
} catch (Exception ex) {
|
||||
|
||||
System.out.println("ex 8: " + ex.getMessage() + " " + ex.getLocalizedMessage() + " "
|
||||
+ ex.getCause());
|
||||
}
|
||||
try {
|
||||
int sentiment1 = cacheSentiment1 == null ? cacheSentimentLocal1.getRnnPrediectClassMap().size() : cacheSentiment1.getRnnPrediectClassMap().size();
|
||||
@ -1359,7 +1378,8 @@ public class SentimentAnalyzerTest implements Callable<SimilarityMatrix> {
|
||||
cacheSentimentLocal2 = classifyRawEvaluationEntry.getValue().getValue();
|
||||
}
|
||||
} catch (Exception ex) {
|
||||
|
||||
System.out.println("ex 9: " + ex.getMessage() + " " + ex.getLocalizedMessage() + " "
|
||||
+ ex.getCause());
|
||||
}
|
||||
try {
|
||||
if (cacheSentiment1 == null) {
|
||||
@ -1370,97 +1390,52 @@ public class SentimentAnalyzerTest implements Callable<SimilarityMatrix> {
|
||||
}
|
||||
score = sentimentMatrixVariances(score, cacheSentiment1 == null ? cacheSentimentLocal1.getLongest() : cacheSentiment1.getLongest(),
|
||||
cacheSentiment2 == null ? cacheSentimentLocal2.getLongest() : cacheSentiment2.getLongest(), cacheSentiment1 == null
|
||||
? cacheSentimentLocal1.getMainSentiment() : cacheSentiment1.getMainSentiment(), cacheSentiment2 == null
|
||||
? cacheSentimentLocal2.getMainSentiment() : cacheSentiment2.getMainSentiment());
|
||||
? cacheSentimentLocal1.getMainSentiment() : cacheSentiment1.getMainSentiment(), cacheSentiment2 == null
|
||||
? cacheSentimentLocal2.getMainSentiment() : cacheSentiment2.getMainSentiment());
|
||||
} catch (Exception ex) {
|
||||
|
||||
System.out.println("ex 10: " + ex.getMessage() + " " + ex.getLocalizedMessage() + " "
|
||||
+ ex.getCause());
|
||||
}
|
||||
try {
|
||||
if (cacheSentiment1 == null) {
|
||||
cacheSentimentLocal1 = jmweAnnotationSetup(jmweStrAnnotation1, cacheSentimentLocal1);
|
||||
}
|
||||
if (cacheSentiment2 == null) {
|
||||
cacheSentimentLocal2 = jmweAnnotationSetup(jmweStrAnnotation2, cacheSentimentLocal2);
|
||||
}
|
||||
} catch (Exception ex) {
|
||||
|
||||
if (cacheSentiment1 == null) {
|
||||
cacheSentimentLocal1 = jmweAnnotationSetup(jmweStrAnnotation1, cacheSentimentLocal1);
|
||||
}
|
||||
if (cacheSentiment2 == null) {
|
||||
cacheSentimentLocal2 = jmweAnnotationSetup(jmweStrAnnotation2, cacheSentimentLocal2);
|
||||
}
|
||||
|
||||
|
||||
SentimentValueCache scoringCache1 = cacheSentiment1 == null ? cacheSentimentLocal1 : cacheSentiment1;
|
||||
SentimentValueCache scoringCache2 = cacheSentiment2 == null ? cacheSentimentLocal2 : cacheSentiment2;
|
||||
try {
|
||||
score = entryCountsRelation(score, scoringCache1, scoringCache2);
|
||||
} catch (Exception ex) {
|
||||
score = entryCountsRelation(score, scoringCache1, scoringCache2);
|
||||
score = entryCountsScoring(score, scoringCache1, scoringCache2);
|
||||
|
||||
}
|
||||
try {
|
||||
score = entryCountsScoring(score, scoringCache1, scoringCache2);
|
||||
} catch (Exception ex) {
|
||||
score = tokenEntryPosScoring(score, scoringCache1, scoringCache2);
|
||||
|
||||
}
|
||||
try {
|
||||
score = tokenEntryPosScoring(score, scoringCache1, scoringCache2);
|
||||
} catch (Exception ex) {
|
||||
score = unmarkedPatternCounterScoring(score, scoringCache1, scoringCache2);
|
||||
|
||||
}
|
||||
try {
|
||||
score = unmarkedPatternCounterScoring(score, scoringCache1, scoringCache2);
|
||||
} catch (Exception ex) {
|
||||
score = markedContiniousCounterScoring(score, scoringCache1, scoringCache2);
|
||||
|
||||
}
|
||||
try {
|
||||
score = markedContiniousCounterScoring(score, scoringCache1, scoringCache2);
|
||||
} catch (Exception ex) {
|
||||
score = strTokensMapScoring(score, scoringCache1, scoringCache2);
|
||||
|
||||
}
|
||||
try {
|
||||
score = strTokensMapScoring(score, scoringCache1, scoringCache2);
|
||||
} catch (Exception ex) {
|
||||
score = strTokenEntryScoring(score, scoringCache1, scoringCache2);
|
||||
|
||||
}
|
||||
try {
|
||||
score = strTokenEntryScoring(score, scoringCache1, scoringCache2);
|
||||
} catch (Exception ex) {
|
||||
score = strTokenMapTagsScoring(score, scoringCache1, scoringCache2);
|
||||
|
||||
}
|
||||
try {
|
||||
score = strTokenMapTagsScoring(score, scoringCache1, scoringCache2);
|
||||
} catch (Exception ex) {
|
||||
score = tokenformSizeScoring(score, scoringCache1, scoringCache2);
|
||||
|
||||
}
|
||||
try {
|
||||
score = tokenformSizeScoring(score, scoringCache1, scoringCache2);
|
||||
} catch (Exception ex) {
|
||||
score = tokenStemmingMapScoring(score, scoringCache1, scoringCache2);
|
||||
|
||||
}
|
||||
try {
|
||||
score = tokenStemmingMapScoring(score, scoringCache1, scoringCache2);
|
||||
} catch (Exception ex) {
|
||||
score = inflectedCounterScoring(score, scoringCache1, scoringCache2);
|
||||
|
||||
}
|
||||
try {
|
||||
score = inflectedCounterScoring(score, scoringCache1, scoringCache2);
|
||||
} catch (Exception ex) {
|
||||
score = annotatorCountScoring(score, scoringCache1, scoringCache2);
|
||||
|
||||
}
|
||||
try {
|
||||
score = annotatorCountScoring(score, scoringCache1, scoringCache2);
|
||||
} catch (Exception ex) {
|
||||
score = tokensCounterScoring(score, scoringCache1, scoringCache2);
|
||||
|
||||
}
|
||||
try {
|
||||
score = tokensCounterScoring(score, scoringCache1, scoringCache2);
|
||||
} catch (Exception ex) {
|
||||
LevenshteinDistance leven = new LevenshteinDistance(str, str1);
|
||||
double SentenceScoreDiff = leven.computeLevenshteinDistance();
|
||||
SentenceScoreDiff *= 15;
|
||||
score -= SentenceScoreDiff;
|
||||
|
||||
}
|
||||
try {
|
||||
LevenshteinDistance leven = new LevenshteinDistance(str, str1);
|
||||
double SentenceScoreDiff = leven.computeLevenshteinDistance();
|
||||
SentenceScoreDiff *= 15;
|
||||
score -= SentenceScoreDiff;
|
||||
} catch (Exception ex) {
|
||||
|
||||
}
|
||||
try {
|
||||
if (cacheSentiment1 == null) {
|
||||
cacheSentimentLocal1 = setupNEREntitiesAndTokenTags(pipelineCoreDcoument1, cacheSentimentLocal1);
|
||||
@ -1471,7 +1446,8 @@ public class SentimentAnalyzerTest implements Callable<SimilarityMatrix> {
|
||||
score = nerEntitiesAndTokenScoring(score, cacheSentiment1 == null ? cacheSentimentLocal1 : cacheSentiment1, cacheSentiment2 == null
|
||||
? cacheSentimentLocal2 : cacheSentiment2);
|
||||
} catch (Exception ex) {
|
||||
|
||||
System.out.println("ex 11: " + ex.getMessage() + " " + ex.getLocalizedMessage() + " "
|
||||
+ ex.getCause());
|
||||
}
|
||||
try {
|
||||
if (cacheSentiment1 == null) {
|
||||
@ -1483,24 +1459,17 @@ public class SentimentAnalyzerTest implements Callable<SimilarityMatrix> {
|
||||
score = stopWordTokenLemmaScoring(score, cacheSentiment1 == null ? cacheSentimentLocal1 : cacheSentiment1, cacheSentiment2 == null
|
||||
? cacheSentimentLocal2 : cacheSentiment2);
|
||||
} catch (Exception ex) {
|
||||
|
||||
}
|
||||
try {
|
||||
score = stopwordTokenPairCounterScoring(score, cacheSentiment1 == null ? cacheSentimentLocal1 : cacheSentiment1, cacheSentiment2 == null
|
||||
? cacheSentimentLocal2 : cacheSentiment2);
|
||||
} catch (Exception ex) {
|
||||
|
||||
System.out.println("ex 12: " + ex.getMessage() + " " + ex.getLocalizedMessage() + " "
|
||||
+ ex.getCause());
|
||||
}
|
||||
score = stopwordTokenPairCounterScoring(score, cacheSentiment1 == null ? cacheSentimentLocal1 : cacheSentiment1, cacheSentiment2 == null
|
||||
? cacheSentimentLocal2 : cacheSentiment2);
|
||||
smxParam.setDistance(score);
|
||||
try {
|
||||
if (cacheSentiment1 == null) {
|
||||
smxParam.setCacheValue1(cacheSentimentLocal1);
|
||||
}
|
||||
if (cacheSentiment2 == null) {
|
||||
smxParam.setCacheValue2(cacheSentimentLocal2);
|
||||
}
|
||||
} catch (Exception ex) {
|
||||
|
||||
if (cacheSentiment1 == null) {
|
||||
smxParam.setCacheValue1(cacheSentimentLocal1);
|
||||
}
|
||||
if (cacheSentiment2 == null) {
|
||||
smxParam.setCacheValue2(cacheSentimentLocal2);
|
||||
}
|
||||
return smxParam;
|
||||
}
|
||||
|
@ -14,28 +14,85 @@ screen -X -S (number1) quit
|
||||
*/
|
||||
package PresentationLayer;
|
||||
|
||||
import DataLayer.settings;
|
||||
import FunctionLayer.CustomError;
|
||||
import FunctionLayer.Datahandler;
|
||||
import FunctionLayer.DoStuff;
|
||||
import FunctionLayer.PipelineJMWESingleton;
|
||||
import com.sun.tools.javac.util.List;
|
||||
import discord4j.core.DiscordClient;
|
||||
import discord4j.core.GatewayDiscordClient;
|
||||
import discord4j.core.event.domain.message.MessageCreateEvent;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.io.UnsupportedEncodingException;
|
||||
import java.net.*;
|
||||
import java.sql.SQLException;
|
||||
import java.util.Timer;
|
||||
import java.util.TimerTask;
|
||||
import java.util.ArrayList;
|
||||
import java.util.logging.Level;
|
||||
import java.util.logging.Logger;
|
||||
import DataLayer.settings;
|
||||
import discord4j.common.util.Snowflake;
|
||||
import discord4j.core.event.domain.message.MessageCreateEvent;
|
||||
import java.math.BigInteger;
|
||||
|
||||
|
||||
/**
|
||||
*
|
||||
* @author install1
|
||||
*/
|
||||
public class DiscordHandler {
|
||||
|
||||
private static void receiveAndSendPacket(DatagramSocket serverSocket, InetAddress ipAddress, int port) throws IOException, CustomError {
|
||||
byte[] receiveData = new byte[4096];
|
||||
DatagramPacket receivePacket = new DatagramPacket(receiveData, receiveData.length);
|
||||
try {
|
||||
serverSocket.receive(receivePacket);
|
||||
} catch (IOException e) {
|
||||
e.printStackTrace();
|
||||
}
|
||||
String sentence = new String(receivePacket.getData(), 0,
|
||||
receivePacket.getLength());
|
||||
sentence = sentence.replace("clientmessage:", "");
|
||||
String getResponseMsg = Datahandler.instance.getResponseMsg(sentence);
|
||||
byte[] sendData = getResponseMsg.getBytes("UTF-8");
|
||||
int deliver_port = 0;
|
||||
switch (port) {
|
||||
case 48470:
|
||||
deliver_port = 48479;
|
||||
break;
|
||||
case 48471:
|
||||
deliver_port = 48482;
|
||||
break;
|
||||
case 48472:
|
||||
deliver_port = 48476;
|
||||
break;
|
||||
case 48473:
|
||||
deliver_port = 48481;
|
||||
break;
|
||||
}
|
||||
DatagramPacket sendPacket = new DatagramPacket(sendData, sendData.length, ipAddress, deliver_port);
|
||||
serverSocket.send(sendPacket);
|
||||
//System.out.println("receiveAndSendPacket send message to port: " + deliver_port);
|
||||
}
|
||||
|
||||
public static void handleUDPTraffic(int port) {
|
||||
try (DatagramSocket serverSocket = new DatagramSocket(port)) {
|
||||
String hostIP = "";
|
||||
if (port == 48473 || port == 48471) {
|
||||
hostIP = "51.15.159.31";
|
||||
} else {
|
||||
hostIP = "195.154.53.196";
|
||||
}
|
||||
InetAddress ipAddress = InetAddress.getByName(hostIP);//used ip'
|
||||
while (true) {
|
||||
receiveAndSendPacket(serverSocket, ipAddress, port);
|
||||
}
|
||||
} catch (SocketException | UnknownHostException e) {
|
||||
e.printStackTrace();
|
||||
} catch (UnsupportedEncodingException e) {
|
||||
e.printStackTrace();
|
||||
} catch (CustomError customError) {
|
||||
customError.printStackTrace();
|
||||
} catch (IOException e) {
|
||||
e.printStackTrace();
|
||||
}
|
||||
}
|
||||
|
||||
public static void main(String[] args) {
|
||||
try {
|
||||
Datahandler.instance.initiateMYSQL();
|
||||
@ -56,9 +113,15 @@ public class DiscordHandler {
|
||||
final DiscordClient client = DiscordClient.create(token);
|
||||
final GatewayDiscordClient gateway = client.login().block();
|
||||
String usernameBot = gateway.getSelf().block().getUsername();
|
||||
new Thread(() -> {
|
||||
Datahandler.instance.update_autismo_socket_msg();
|
||||
}).start();
|
||||
int autismbotCount = 4;
|
||||
//make sure not to use ports that are already occupied.
|
||||
for (int i = 0; i < autismbotCount; i++) {
|
||||
final int j = i;
|
||||
new Thread(() -> {
|
||||
List<Integer> ports = List.of(48470, 48471, 48472, 48473);
|
||||
handleUDPTraffic(ports.get(j));
|
||||
}).start();
|
||||
}
|
||||
gateway.on(MessageCreateEvent.class).subscribe(event -> {
|
||||
if (!FunctionLayer.DoStuff.isOccupied()) {
|
||||
FunctionLayer.DoStuff.doStuff(event, usernameBot);
|
||||
|
Loading…
Reference in New Issue
Block a user