reduced threads for secondary processing and added threadpool for udp messages. furthermore moved logic around so response concat can be valued

This commit is contained in:
christian 2021-12-07 22:26:19 +01:00
parent e9969f836e
commit e234ce0109
3 changed files with 298 additions and 275 deletions

View File

@ -89,7 +89,7 @@ public class DataMapper {
ResultSet l_rsSearch = null; ResultSet l_rsSearch = null;
String CountSQL = "select count(*) from Sentences"; String CountSQL = "select count(*) from Sentences";
String l_sSQL = "delete from Sentences\n" + String l_sSQL = "delete from Sentences\n" +
" where DATE(last_used) < DATE_SUB(CURDATE(), INTERVAL 32 DAY)\n" + " where DATE(last_used) < DATE_SUB(CURDATE(), INTERVAL 10 DAY)\n" +
" order by last_used asc limit 2"; " order by last_used asc limit 2";
try { try {
l_cCon = DBCPDataSource.getConnection(); l_cCon = DBCPDataSource.getConnection();

View File

@ -6,7 +6,6 @@ import edu.mit.jmwe.data.IMWE;
import edu.mit.jmwe.data.IToken; import edu.mit.jmwe.data.IToken;
import edu.stanford.nlp.ie.AbstractSequenceClassifier; import edu.stanford.nlp.ie.AbstractSequenceClassifier;
import edu.stanford.nlp.ie.crf.CRFClassifier; import edu.stanford.nlp.ie.crf.CRFClassifier;
import edu.stanford.nlp.ie.machinereading.structure.EntityMention;
import edu.stanford.nlp.ling.CoreAnnotations; import edu.stanford.nlp.ling.CoreAnnotations;
import edu.stanford.nlp.ling.CoreLabel; import edu.stanford.nlp.ling.CoreLabel;
import edu.stanford.nlp.ling.TaggedWord; import edu.stanford.nlp.ling.TaggedWord;
@ -18,7 +17,6 @@ import edu.stanford.nlp.pipeline.StanfordCoreNLP;
import edu.stanford.nlp.tagger.maxent.MaxentTagger; import edu.stanford.nlp.tagger.maxent.MaxentTagger;
import edu.stanford.nlp.trees.*; import edu.stanford.nlp.trees.*;
import edu.stanford.nlp.util.CoreMap; import edu.stanford.nlp.util.CoreMap;
import kotlinx.coroutines.*;
import org.ejml.simple.SimpleMatrix; import org.ejml.simple.SimpleMatrix;
import java.util.*; import java.util.*;
@ -202,91 +200,21 @@ public class Datahandler {
} }
} }
public String getResponseFutures(String strF, StanfordCoreNLP stanfordCoreNLP, StanfordCoreNLP stanfordCoreNLPSentiment) { private SentimentAnalyzerTest getReponseFuturesHelper(String strF, String str1, StanfordCoreNLP stanfordCoreNLP,
StanfordCoreNLP stanfordCoreNLPSentiment,
Annotation strAnno = new Annotation(strF); List<CoreMap> coreMaps1, Annotation strAnno,
strAnno.compact(); Annotation strAnnoSentiment, CoreDocument coreDocument
stanfordCoreNLP.annotate(strAnno); , Integer tokenizeCountingF, List<List<TaggedWord>> taggedWordListF, ArrayList<TypedDependency> typedDependenciesF
, ArrayList<Integer> rnnCoreAnnotationsPredictedF, ArrayList<SimpleMatrix> simpleMatricesF
Annotation strAnnoSentiment = new Annotation(strF); , ArrayList<SimpleMatrix> simpleMatricesNodevectorsF, List<String> listF, Integer longestF, List<CoreMap> sentencesF
strAnnoSentiment.compact(); , List<CoreMap> sentencesSentimentF, ArrayList<Tree> treesF, ArrayList<GrammaticalStructure> grammaticalStructuresF
stanfordCoreNLPSentiment.annotate(strAnnoSentiment); , Integer sentimentLongestF, List<IMWE<IToken>> imwesF, Integer inflectedCounterNegativeF, Integer inflectedCounterPositiveF
, ArrayList<String> tokenEntryF, Integer unmarkedPatternCounterF, ArrayList<String> strTokensIpartFormF, ArrayList<String> tokenFormsF
Annotation annotation = new Annotation(strF); , ArrayList<Integer> intTokenEntyCountsF, Integer markedContinuousCounterF, ArrayList<String> ITokenTagsF
stanfordCoreNLP.annotate(annotation); , ArrayList<String> strTokenEntryGetPOSF, ArrayList<String> retrieveTGWListF, Integer pairCounterF
CoreDocument coreDocument = new CoreDocument(annotation); , Integer tokensCounterF, ArrayList<String> stopWordLemmaF, ArrayList<String> nerEntitiesF
, ArrayList<String> stopWordTokenF, ArrayList<String> entityTokenTagsF, ArrayList<String> nerEntitiesTypeF
List<String> ues_copy = new ArrayList(DataMapper.getAllStrings()); , Integer anotatorcounterF, ArrayList<String> strTokenStemsF) {
double preRelationUserCounters = -155000.0;
ArrayList<String> concurrentRelations = new ArrayList();
StringBuilder SB = new StringBuilder();
Annotation jmweAnnotationF = PipelineJMWESingleton.INSTANCE.getJMWEAnnotation(strF);
Integer tokenizeCountingF = null;
List<List<TaggedWord>> taggedWordListF = null;
java.util.ArrayList<String> retrieveTGWListF = null;
List<CoreMap> sentencesF = null;
List<CoreMap> sentencesSentimentF = null;
List<CoreMap> coreMaps1 = jmweAnnotationF.get(CoreAnnotations.SentencesAnnotation.class);
java.util.ArrayList<Tree> treesF = null;
ArrayList<GrammaticalStructure> grammaticalStructuresF = null;
java.util.ArrayList<TypedDependency> typedDependenciesF = null;
java.util.ArrayList<Integer> rnnCoreAnnotationsPredictedF = null;
java.util.ArrayList<SimpleMatrix> simpleMatricesF = null;
java.util.ArrayList<SimpleMatrix> simpleMatricesNodevectorsF = null;
List<String> listF = null;
Integer longestF = null;
Integer sentimentLongestF = null;
List<IMWE<IToken>> imwesF = null;
Integer InflectedCounterNegativeF = null;
Integer InflectedCounterPositiveF = null;
ArrayList<String> tokenEntryF = null;
Integer MarkedContinuousCounterF = null;
Integer UnmarkedPatternCounterF = null;
ArrayList<String> strTokensIpartFormF = null;
java.util.ArrayList<String> tokenFormsF = null;
ArrayList<String> strTokenEntryGetPOSF = null;
java.util.ArrayList<Integer> intTokenEntyCountsF = null;
ArrayList<String> ITokenTagsF = null;
java.util.ArrayList<String> strTokenStemsF = null;
Integer AnotatorcounterF = null;
Integer TokensCounterF = null;
java.util.ArrayList<String> entityTokenTagsF = null;
java.util.ArrayList<String> nerEntitiesF = null;
java.util.ArrayList<String> nerEntitiesTypeF = null;
java.util.ArrayList<String> stopWordTokenF = null;
java.util.ArrayList<String> stopWordLemmaF = null;
Integer PairCounterF = null;
for (String str1 : ues_copy) {
if (strF != str1) {
Annotation annotation2 = pipelineSentimentAnnotationCache.getOrDefault(str1, null); Annotation annotation2 = pipelineSentimentAnnotationCache.getOrDefault(str1, null);
Annotation annotation4 = pipelineAnnotationCache.getOrDefault(str1, null); Annotation annotation4 = pipelineAnnotationCache.getOrDefault(str1, null);
CoreDocument coreDocument1 = coreDocumentAnnotationCache.getOrDefault(str1, null); CoreDocument coreDocument1 = coreDocumentAnnotationCache.getOrDefault(str1, null);
@ -359,16 +287,16 @@ public class Datahandler {
typedDependencies1, rnnCoreAnnotationsPredictedF, rnnCoreAnnotationsPredicted1, typedDependencies1, rnnCoreAnnotationsPredictedF, rnnCoreAnnotationsPredicted1,
simpleMatricesF, simpleMatrices1, simpleMatricesNodevectorsF, simpleMatricesNodevectors1, simpleMatricesF, simpleMatrices1, simpleMatricesNodevectorsF, simpleMatricesNodevectors1,
listF, list1, longestF, longest1, sentimentLongestF, sentimentLongest1, imwesF, listF, list1, longestF, longest1, sentimentLongestF, sentimentLongest1, imwesF,
imwes1, InflectedCounterNegativeF, InflectedCounterNegative1, InflectedCounterPositiveF, imwes1, inflectedCounterNegativeF, InflectedCounterNegative1, inflectedCounterPositiveF,
InflectedCounterPositive1, tokenEntryF, tokenEntry1, MarkedContinuousCounterF, InflectedCounterPositive1, tokenEntryF, tokenEntry1, markedContinuousCounterF,
MarkedContinuousCounter1, UnmarkedPatternCounterF, UnmarkedPatternCounter1, MarkedContinuousCounter1, unmarkedPatternCounterF, UnmarkedPatternCounter1,
strTokensIpartFormF, strTokensIpartForm1, tokenFormsF, tokenForms1, strTokensIpartFormF, strTokensIpartForm1, tokenFormsF, tokenForms1,
strTokenEntryGetPOSF, strTokenEntryGetPOS1, intTokenEntyCountsF, strTokenEntryGetPOSF, strTokenEntryGetPOS1, intTokenEntyCountsF,
intTokenEntyCounts1, ITokenTagsF, ITokenTags1, strTokenStemsF, strTokenStems1, intTokenEntyCounts1, ITokenTagsF, ITokenTags1, strTokenStemsF, strTokenStems1,
AnotatorcounterF, Anotatorcounter1, TokensCounterF, TokensCounter1, anotatorcounterF, Anotatorcounter1, tokensCounterF, TokensCounter1,
entityTokenTagsF, entityTokenTags1, nerEntitiesF, nerEntities1, nerEntitiesTypeF, entityTokenTagsF, entityTokenTags1, nerEntitiesF, nerEntities1, nerEntitiesTypeF,
nerEntitiesType1, stopWordTokenF, stopWordToken1, stopWordLemmaF, stopWordLemma1, nerEntitiesType1, stopWordTokenF, stopWordToken1, stopWordLemmaF, stopWordLemma1,
PairCounterF, PairCounter1 pairCounterF, PairCounter1
); );
if (tokenizeCounting == null) { if (tokenizeCounting == null) {
tokenizeCountingHashMap.put(str1, SMX.getTokenizeCounting()); tokenizeCountingHashMap.put(str1, SMX.getTokenizeCounting());
@ -376,203 +304,271 @@ public class Datahandler {
if (taggedWordList1 == null) { if (taggedWordList1 == null) {
taggedWordListHashMap.put(str1, SMX.getTaggedWordList1()); taggedWordListHashMap.put(str1, SMX.getTaggedWordList1());
} }
if (retrieveTGWList1 == null) {
retrieveTGWListHashMap.put(str1, SMX.getRetrieveTGWList1());
}
if (sentence1 == null) {
sentences1HashMap.put(str1, SMX.getSentences1());
}
if (sentenceSentiment1 == null) {
sentencesSentimentHashMap.put(str1, SMX.getSentencesSentiment1());
}
if (trees1 == null) {
trees1HashMap.put(str1, SMX.getTrees1());
}
if (grammaticalStructures1 == null) {
grammaticalStructureHashMap.put(str1, SMX.getGrammaticalStructures1());
}
if (typedDependencies1 == null) {
typedDependenciesHashMap.put(str1, SMX.getTypedDependencies1());
}
if (rnnCoreAnnotationsPredicted1 == null) {
rnnCoreAnnotationsPredictedHashMap.put(str1, SMX.getRnnCoreAnnotationsPredicted1());
}
if (simpleMatrices1 == null) {
simpleMatricesHashMap.put(str1, SMX.getSimpleMatrices1());
}
if (simpleMatricesNodevectors1 == null) {
simpleMatricesNodevectorsHashMap.put(str1, SMX.getSimpleMatricesNodevectors1());
}
if (list1 == null) {
listHashMap.put(str1, SMX.getList1());
}
if (longest1 == null) {
longestHashMap.put(str1, SMX.getLongest1());
}
if (sentimentLongest1 == null) {
sentimentHashMap.put(str1, SMX.getSentimentLongest1());
}
if (imwes1 == null) {
imwesHashMap.put(str1, SMX.getImwes1());
}
if (InflectedCounterNegative1 == null) {
InflectedCounterNegativeHashMap.put(str1, SMX.getInflectedCounterNegative1());
}
if (InflectedCounterPositive1 == null) {
InflectedCounterPositiveHashMap.put(str1, SMX.getInflectedCounterPositive1());
}
if (tokenEntry1 == null) {
tokenEntryHashMap.put(str1, SMX.getTokenEntry1());
}
if (MarkedContinuousCounter1 == null) {
MarkedContinuousCounterHashMap.put(str1, SMX.getMarkedContinuousCounter1());
}
if (UnmarkedPatternCounter1 == null) {
UnmarkedPatternCounterHashMap.put(str1, SMX.getUnmarkedPatternCounter1());
}
if (strTokensIpartForm1 == null) {
strTokensIpartFormHashMap.put(str1, SMX.getStrTokensIpartForm1());
}
if (tokenForms1 == null) {
tokenFormsHashMap.put(str1, SMX.getTokenForms1());
}
if (strTokenEntryGetPOS1 == null) {
strTokenEntryGetPOSHashMap.put(str1, SMX.getStrTokenEntryGetPOS1());
}
if (intTokenEntyCounts1 == null) {
intTokenEntyCountsHashMap.put(str1, SMX.getIntTokenEntyCounts1());
}
if (ITokenTags1 == null) {
ITokenTagsHashMap.put(str1, SMX.getITokenTags1());
}
if (strTokenStems1 == null) {
strTokenStemsHashMap.put(str1, SMX.getStrTokenStems1());
}
if (Anotatorcounter1 == null) {
AnotatorcounterHashMap.put(str1, SMX.getAnotatorcounter1());
}
if (TokensCounter1 == null) {
TokensCounterHashMap.put(str1, SMX.getTokensCounter1());
}
if (entityTokenTags1 == null) {
entityTokenTagsHashMap.put(str1, SMX.getEntityTokenTags1());
}
if (nerEntities1 == null) {
nerEntitiesHashMap.put(str1, SMX.getNerEntities1());
}
if (nerEntitiesType1 == null) {
nerEntitiesTypeHashMap.put(str1, SMX.getNerEntitiesType1());
}
if (stopWordToken1 == null) {
stopWordTokenHashMap.put(str1, SMX.getStopWordToken1());
}
if (stopWordLemma1 == null) {
stopWordLemmaHashMap.put(str1, SMX.getStopWordLemma1());
}
if (PairCounter1 == null) {
PairCounterHashMap.put(str1, SMX.getPairCounter1());
}
return SMX;
}
public String getResponseFutures(String strF, StanfordCoreNLP stanfordCoreNLP, StanfordCoreNLP stanfordCoreNLPSentiment) {
Annotation strAnno = new Annotation(strF);
strAnno.compact();
stanfordCoreNLP.annotate(strAnno);
Annotation strAnnoSentiment = new Annotation(strF);
strAnnoSentiment.compact();
stanfordCoreNLPSentiment.annotate(strAnnoSentiment);
Annotation annotation = new Annotation(strF);
stanfordCoreNLP.annotate(annotation);
CoreDocument coreDocument = new CoreDocument(annotation);
Annotation jmweAnnotationF = PipelineJMWESingleton.INSTANCE.getJMWEAnnotation(strF);
List<CoreMap> coreMaps1 = jmweAnnotationF.get(CoreAnnotations.SentencesAnnotation.class);
Integer tokenizeCountingF = null;
List<List<TaggedWord>> taggedWordListF = null;
java.util.ArrayList<String> retrieveTGWListF = null;
List<CoreMap> sentencesF = null;
List<CoreMap> sentencesSentimentF = null;
java.util.ArrayList<Tree> treesF = null;
ArrayList<GrammaticalStructure> grammaticalStructuresF = null;
java.util.ArrayList<TypedDependency> typedDependenciesF = null;
java.util.ArrayList<Integer> rnnCoreAnnotationsPredictedF = null;
java.util.ArrayList<SimpleMatrix> simpleMatricesF = null;
java.util.ArrayList<SimpleMatrix> simpleMatricesNodevectorsF = null;
List<String> listF = null;
Integer longestF = null;
Integer sentimentLongestF = null;
List<IMWE<IToken>> imwesF = null;
Integer InflectedCounterNegativeF = null;
Integer InflectedCounterPositiveF = null;
ArrayList<String> tokenEntryF = null;
Integer MarkedContinuousCounterF = null;
Integer UnmarkedPatternCounterF = null;
ArrayList<String> strTokensIpartFormF = null;
java.util.ArrayList<String> tokenFormsF = null;
ArrayList<String> strTokenEntryGetPOSF = null;
java.util.ArrayList<Integer> intTokenEntyCountsF = null;
ArrayList<String> ITokenTagsF = null;
java.util.ArrayList<String> strTokenStemsF = null;
Integer AnotatorcounterF = null;
Integer TokensCounterF = null;
java.util.ArrayList<String> entityTokenTagsF = null;
java.util.ArrayList<String> nerEntitiesF = null;
java.util.ArrayList<String> nerEntitiesTypeF = null;
java.util.ArrayList<String> stopWordTokenF = null;
java.util.ArrayList<String> stopWordLemmaF = null;
Integer PairCounterF = null;
ArrayList<String> concurrentRelations = new ArrayList();
StringBuilder SB = new StringBuilder();
List<String> ues_copy = new ArrayList(DataMapper.getAllStrings());
double preRelationUserCounters = -155000.0;
for (String str1 : ues_copy) {
if (strF != str1) {
SentimentAnalyzerTest SMX = getReponseFuturesHelper(strF, str1, stanfordCoreNLP, stanfordCoreNLPSentiment,
coreMaps1, strAnno, strAnnoSentiment, coreDocument, tokenizeCountingF, taggedWordListF
, typedDependenciesF, rnnCoreAnnotationsPredictedF, simpleMatricesF, simpleMatricesNodevectorsF
, listF, longestF, sentencesF, sentencesSentimentF, treesF, grammaticalStructuresF, sentimentLongestF
, imwesF, InflectedCounterNegativeF, InflectedCounterPositiveF, tokenEntryF, UnmarkedPatternCounterF
, strTokensIpartFormF, tokenFormsF, intTokenEntyCountsF, MarkedContinuousCounterF, ITokenTagsF
, strTokenEntryGetPOSF, retrieveTGWListF, PairCounterF, TokensCounterF, stopWordLemmaF, nerEntitiesF
, stopWordTokenF, entityTokenTagsF, nerEntitiesTypeF, AnotatorcounterF, strTokenStemsF);
if (tokenizeCountingF == null) { if (tokenizeCountingF == null) {
tokenizeCountingF = SMX.getTokenizeCountingF(); tokenizeCountingF = SMX.getTokenizeCountingF();
} }
if (taggedWordListF == null) { if (taggedWordListF == null) {
taggedWordListF = SMX.getTaggedWordListF(); taggedWordListF = SMX.getTaggedWordListF();
} }
if (retrieveTGWListF == null) {
retrieveTGWListF = SMX.getRetrieveTGWListF();
}
if (retrieveTGWList1 == null) {
retrieveTGWListHashMap.put(str1, SMX.getRetrieveTGWList1());
}
if (sentencesF == null) {
sentencesF = SMX.getSentencesF();
}
if (sentence1 == null) {
sentences1HashMap.put(str1, SMX.getSentences1());
}
if (sentencesSentimentF == null) {
sentencesSentimentF = SMX.getSentencesSentimentF();
}
if (sentenceSentiment1 == null) {
sentencesSentimentHashMap.put(str1, SMX.getSentencesSentiment1());
}
if (treesF == null) {
treesF = SMX.getTreesF();
}
if (trees1 == null) {
trees1HashMap.put(str1, SMX.getTrees1());
}
if (grammaticalStructuresF == null) {
grammaticalStructuresF = SMX.getGrammaticalStructuresF();
}
if (grammaticalStructures1 == null) {
grammaticalStructureHashMap.put(str1, SMX.getGrammaticalStructures1());
}
if (typedDependenciesF == null) { if (typedDependenciesF == null) {
typedDependenciesF = SMX.getTypedDependenciesF(); typedDependenciesF = SMX.getTypedDependenciesF();
} }
if (typedDependencies1 == null) {
typedDependenciesHashMap.put(str1, SMX.getTypedDependencies1());
}
if (rnnCoreAnnotationsPredictedF == null) { if (rnnCoreAnnotationsPredictedF == null) {
rnnCoreAnnotationsPredictedF = SMX.getRnnCoreAnnotationsPredictedF(); rnnCoreAnnotationsPredictedF = SMX.getRnnCoreAnnotationsPredictedF();
} }
if (rnnCoreAnnotationsPredicted1 == null) {
rnnCoreAnnotationsPredictedHashMap.put(str1, SMX.getRnnCoreAnnotationsPredicted1());
}
if (simpleMatricesF == null) { if (simpleMatricesF == null) {
simpleMatricesF = SMX.getSimpleMatricesF(); simpleMatricesF = SMX.getSimpleMatricesF();
} }
if (simpleMatrices1 == null) {
simpleMatricesHashMap.put(str1, SMX.getSimpleMatrices1());
}
if (simpleMatricesNodevectorsF == null) { if (simpleMatricesNodevectorsF == null) {
simpleMatricesNodevectorsF = SMX.getSimpleMatricesNodevectorsF(); simpleMatricesNodevectorsF = SMX.getSimpleMatricesNodevectorsF();
} }
if (simpleMatricesNodevectors1 == null) {
simpleMatricesNodevectorsHashMap.put(str1, SMX.getSimpleMatricesNodevectors1());
}
if (listF == null) { if (listF == null) {
listF = SMX.getListF(); listF = SMX.getListF();
} }
if (list1 == null) {
listHashMap.put(str1, SMX.getList1());
}
if (longestF == null) { if (longestF == null) {
longestF = SMX.getLongestF(); longestF = SMX.getLongestF();
} }
if (longest1 == null) { if (sentencesF == null) {
longestHashMap.put(str1, SMX.getLongest1()); sentencesF = SMX.getSentencesF();
}
if (sentencesSentimentF == null) {
sentencesSentimentF = SMX.getSentencesSentimentF();
}
if (treesF == null) {
treesF = SMX.getTreesF();
}
if (grammaticalStructuresF == null) {
grammaticalStructuresF = SMX.getGrammaticalStructuresF();
} }
if (sentimentLongestF == null) { if (sentimentLongestF == null) {
sentimentLongestF = SMX.getSentimentLongestF(); sentimentLongestF = SMX.getSentimentLongestF();
} }
if (sentimentLongest1 == null) {
sentimentHashMap.put(str1, SMX.getSentimentLongest1());
}
if (imwesF == null) { if (imwesF == null) {
imwesF = SMX.getImwesF(); imwesF = SMX.getImwesF();
} }
if (imwes1 == null) {
imwesHashMap.put(str1, SMX.getImwes1());
}
if (InflectedCounterNegativeF == null) { if (InflectedCounterNegativeF == null) {
InflectedCounterNegativeF = SMX.getInflectedCounterNegativeF(); InflectedCounterNegativeF = SMX.getInflectedCounterNegativeF();
} }
if (InflectedCounterNegative1 == null) {
InflectedCounterNegativeHashMap.put(str1, SMX.getInflectedCounterNegative1());
}
if (InflectedCounterPositiveF == null) { if (InflectedCounterPositiveF == null) {
InflectedCounterPositiveF = SMX.getInflectedCounterPositiveF(); InflectedCounterPositiveF = SMX.getInflectedCounterPositiveF();
} }
if (InflectedCounterPositive1 == null) {
InflectedCounterPositiveHashMap.put(str1, SMX.getInflectedCounterPositive1());
}
if (tokenEntryF == null) { if (tokenEntryF == null) {
tokenEntryF = SMX.getTokenEntryF(); tokenEntryF = SMX.getTokenEntryF();
} }
if (tokenEntry1 == null) {
tokenEntryHashMap.put(str1, SMX.getTokenEntry1());
}
if (MarkedContinuousCounterF == null) {
MarkedContinuousCounterF = SMX.getMarkedContinuousCounterF();
}
if (MarkedContinuousCounter1 == null) {
MarkedContinuousCounterHashMap.put(str1, SMX.getMarkedContinuousCounter1());
}
if (UnmarkedPatternCounterF == null) { if (UnmarkedPatternCounterF == null) {
UnmarkedPatternCounterF = SMX.getUnmarkedPatternCounterF(); UnmarkedPatternCounterF = SMX.getUnmarkedPatternCounterF();
} }
if (UnmarkedPatternCounter1 == null) {
UnmarkedPatternCounterHashMap.put(str1, SMX.getUnmarkedPatternCounter1());
}
if (strTokensIpartFormF == null) { if (strTokensIpartFormF == null) {
strTokensIpartFormF = SMX.getStrTokensIpartFormF(); strTokensIpartFormF = SMX.getStrTokensIpartFormF();
} }
if (strTokensIpartForm1 == null) {
strTokensIpartFormHashMap.put(str1, SMX.getStrTokensIpartForm1());
}
if (tokenFormsF == null) { if (tokenFormsF == null) {
tokenFormsF = SMX.getTokenFormsF(); tokenFormsF = SMX.getTokenFormsF();
} }
if (tokenForms1 == null) {
tokenFormsHashMap.put(str1, SMX.getTokenForms1());
}
if (strTokenEntryGetPOSF == null) {
strTokenEntryGetPOSF = SMX.getStrTokenEntryGetPOSF();
}
if (strTokenEntryGetPOS1 == null) {
strTokenEntryGetPOSHashMap.put(str1, SMX.getStrTokenEntryGetPOS1());
}
if (intTokenEntyCountsF == null) { if (intTokenEntyCountsF == null) {
intTokenEntyCountsF = SMX.getIntTokenEntyCountsF(); intTokenEntyCountsF = SMX.getIntTokenEntyCountsF();
} }
if (intTokenEntyCounts1 == null) { if (MarkedContinuousCounterF == null) {
intTokenEntyCountsHashMap.put(str1, SMX.getIntTokenEntyCounts1()); MarkedContinuousCounterF = SMX.getMarkedContinuousCounterF();
} }
if (ITokenTagsF == null) { if (ITokenTagsF == null) {
ITokenTagsF = SMX.getITokenTagsF(); ITokenTagsF = SMX.getITokenTagsF();
} }
if (ITokenTags1 == null) { if (strTokenEntryGetPOSF == null) {
ITokenTagsHashMap.put(str1, SMX.getITokenTags1()); strTokenEntryGetPOSF = SMX.getStrTokenEntryGetPOSF();
} }
if (strTokenStemsF == null) { if (retrieveTGWListF == null) {
strTokenStemsF = SMX.getStrTokenStemsF(); retrieveTGWListF = SMX.getRetrieveTGWListF();
}
if (strTokenStems1 == null) {
strTokenStemsHashMap.put(str1, SMX.getStrTokenStems1());
}
if (AnotatorcounterF == null) {
AnotatorcounterF = SMX.getAnotatorcounterF();
}
if (Anotatorcounter1 == null) {
AnotatorcounterHashMap.put(str1, SMX.getAnotatorcounter1());
}
if (TokensCounterF == null) {
TokensCounterF = SMX.getTokensCounterF();
}
if (TokensCounter1 == null) {
TokensCounterHashMap.put(str1, SMX.getTokensCounter1());
}
if (entityTokenTagsF == null) {
entityTokenTagsF = SMX.getEntityTokenTagsF();
}
if (entityTokenTags1 == null) {
entityTokenTagsHashMap.put(str1, SMX.getEntityTokenTags1());
}
if (nerEntitiesF == null) {
nerEntitiesF = SMX.getNerEntitiesF();
}
if (nerEntities1 == null) {
nerEntitiesHashMap.put(str1, SMX.getNerEntities1());
}
if (nerEntitiesTypeF == null) {
nerEntitiesTypeF = SMX.getNerEntitiesTypeF();
}
if (nerEntitiesType1 == null) {
nerEntitiesTypeHashMap.put(str1, SMX.getNerEntitiesType1());
}
if (stopWordTokenF == null) {
stopWordTokenF = SMX.getStopWordTokenF();
}
if (stopWordToken1 == null) {
stopWordTokenHashMap.put(str1, SMX.getStopWordToken1());
}
if (stopWordLemmaF == null) {
stopWordLemmaF = SMX.getStopWordLemmaF();
}
if (stopWordLemma1 == null) {
stopWordLemmaHashMap.put(str1, SMX.getStopWordLemma1());
} }
if (PairCounterF == null) { if (PairCounterF == null) {
PairCounterF = SMX.getPairCounterF(); PairCounterF = SMX.getPairCounterF();
} }
if (PairCounter1 == null) { if (TokensCounterF == null) {
PairCounterHashMap.put(str1, SMX.getPairCounter1()); TokensCounterF = SMX.getTokensCounterF();
}
if (stopWordLemmaF == null) {
stopWordLemmaF = SMX.getStopWordLemmaF();
}
if (nerEntitiesF == null) {
nerEntitiesF = SMX.getNerEntitiesF();
}
if (stopWordTokenF == null) {
stopWordTokenF = SMX.getStopWordTokenF();
}
if (entityTokenTagsF == null) {
entityTokenTagsF = SMX.getEntityTokenTagsF();
}
if (nerEntitiesTypeF == null) {
nerEntitiesTypeF = SMX.getNerEntitiesTypeF();
}
if (AnotatorcounterF == null) {
AnotatorcounterF = SMX.getAnotatorcounterF();
}
if (strTokenStemsF == null) {
strTokenStemsF = SMX.getStrTokenStemsF();
} }
SimilarityMatrix getSMX = SMX.callSMX(); SimilarityMatrix getSMX = SMX.callSMX();
@ -583,19 +579,41 @@ public class Datahandler {
} }
} }
} }
int cacheRequirement = 6500; int cacheRequirement = 8500;
if (preRelationUserCounters > cacheRequirement && !ues_copy.contains(strF) && filterContent(strF)) { if (preRelationUserCounters > cacheRequirement && !ues_copy.contains(strF) && filterContent(strF)) {
DataMapper.InsertMYSQLStrings(strF); DataMapper.InsertMYSQLStrings(strF);
DataMapper.checkStringsToDelete(); DataMapper.checkStringsToDelete();
} }
double randomLenghtPermit = strF.length() * (Math.random() * Math.random() * Math.random() * (Math.random() * 10)); double randomLenghtPermit = strF.length() * (Math.random() * Math.random() * (Math.random() * 10));
Collections.reverse(concurrentRelations); Collections.reverse(concurrentRelations);
ArrayList<String> mysqlUpdateLastUsed = new ArrayList(); ArrayList<String> mysqlUpdateLastUsed = new ArrayList();
if (!concurrentRelations.isEmpty()) { if (!concurrentRelations.isEmpty()) {
boolean passedFirst = false;
int lastIter = 1;
for (String secondaryRelation : concurrentRelations) { for (String secondaryRelation : concurrentRelations) {
if (SB.toString().length() > randomLenghtPermit && !SB.toString().isEmpty()) { if (SB.toString().length() > randomLenghtPermit && !SB.toString().isEmpty()) {
break; break;
} }
if (passedFirst && lastIter < concurrentRelations.size()) {
String testSTR = SB.toString() + " " + secondaryRelation;
SentimentAnalyzerTest SMX = getReponseFuturesHelper(strF, testSTR, stanfordCoreNLP, stanfordCoreNLPSentiment,
coreMaps1, strAnno, strAnnoSentiment, coreDocument, tokenizeCountingF, taggedWordListF
, typedDependenciesF, rnnCoreAnnotationsPredictedF, simpleMatricesF, simpleMatricesNodevectorsF
, listF, longestF, sentencesF, sentencesSentimentF, treesF, grammaticalStructuresF, sentimentLongestF
, imwesF, InflectedCounterNegativeF, InflectedCounterPositiveF, tokenEntryF, UnmarkedPatternCounterF
, strTokensIpartFormF, tokenFormsF, intTokenEntyCountsF, MarkedContinuousCounterF, ITokenTagsF
, strTokenEntryGetPOSF, retrieveTGWListF, PairCounterF, TokensCounterF, stopWordLemmaF, nerEntitiesF
, stopWordTokenF, entityTokenTagsF, nerEntitiesTypeF, AnotatorcounterF, strTokenStemsF);
SimilarityMatrix getSMX = SMX.callSMX();
double scoreRelationLastUserMsg = getSMX.getDistance();
if (preRelationUserCounters > scoreRelationLastUserMsg) {
break;
}
}
passedFirst = true;
lastIter++;
SB.append(secondaryRelation).append(" "); SB.append(secondaryRelation).append(" ");
mysqlUpdateLastUsed.add(secondaryRelation); mysqlUpdateLastUsed.add(secondaryRelation);
} }

View File

@ -89,7 +89,8 @@ public class DiscordHandler extends ListenerAdapter {
private static StanfordCoreNLP stanfordCoreNLP; private static StanfordCoreNLP stanfordCoreNLP;
private static Datahandler datahandler; private static Datahandler datahandler;
private static StanfordCoreNLP stanfordCoreNLPSentiment; private static StanfordCoreNLP stanfordCoreNLPSentiment;
private static ExecutorService executorService = Executors.newFixedThreadPool(2); private static ExecutorService executorService = Executors.newFixedThreadPool(1);
private static ExecutorService executorServiceIngame = Executors.newFixedThreadPool(1);
//TODO add python program that edits the java code. python program just adds test if statements on //TODO add python program that edits the java code. python program just adds test if statements on
//variables until the tests pass //variables until the tests pass
@ -113,14 +114,17 @@ public class DiscordHandler extends ListenerAdapter {
//make sure not to use ports that are already occupied. //make sure not to use ports that are already occupied.
for (int i = 0; i < autismbotCount; i++) { for (int i = 0; i < autismbotCount; i++) {
final int j = i; final int j = i;
new Thread(() -> { executorServiceIngame.execute(new Runnable() {
@Override
public void run() {
ArrayList<Integer> ports = new ArrayList<Integer>(); ArrayList<Integer> ports = new ArrayList<Integer>();
ports.add(48475); ports.add(48475);
ports.add(48476); ports.add(48476);
ports.add(48477); ports.add(48477);
ports.add(48478); ports.add(48478);
handleUDPTraffic(ports.get(j), datahandler, stanfordCoreNLP, stanfordCoreNLPSentiment); handleUDPTraffic(ports.get(j), datahandler, stanfordCoreNLP, stanfordCoreNLPSentiment);
}).start(); }
});
} }
} }
@ -134,7 +138,8 @@ public class DiscordHandler extends ListenerAdapter {
content = content.replace(member.getId(), ""); content = content.replace(member.getId(), "");
} }
} }
if (username != null && !event.getAuthor().isBot() && !content.isEmpty()) { if (username != null && !event.getAuthor().isBot() && !content.isEmpty()
&& event.getMessage().getCategory() != null) {
String channelName = event.getMessage().getChannel().getName().toLowerCase(); String channelName = event.getMessage().getChannel().getName().toLowerCase();
boolean channelpermissionsDenied = false; boolean channelpermissionsDenied = false;
if (channelName.contains("suggestion-box")) { if (channelName.contains("suggestion-box")) {