update to using futures again for quicker processing

This commit is contained in:
jenz 2022-10-23 17:40:47 +02:00
parent 0cf47b8e6c
commit eb7ed76d2e
8 changed files with 414 additions and 239 deletions

View File

@ -93,7 +93,7 @@ public class DataMapper {
ResultSet resultSet = l_pStatement.executeQuery();
if (resultSet.next()) {
int count = resultSet.getInt(1);
if (count > 7000) {
if (count > 35000) {
//System.out.println("cleaning strings: " + l_sSQL);
l_pStatement = l_cCon.prepareStatement(l_sSQL);
l_pStatement.executeUpdate();

View File

@ -0,0 +1,31 @@
package DataLayer;
import FunctionLayer.Datahandler;
import edu.stanford.nlp.pipeline.StanfordCoreNLP;
import discord4j.core.event.domain.lifecycle.ReadyEvent;
import discord4j.core.event.domain.message.MessageCreateEvent;
import discord4j.core.object.entity.Message;
import net.dv8tion.jda.api.events.message.MessageReceivedEvent;
import java.io.IOException;
import java.net.*;
public class RunnerClient {
public RunnerClient(String contentF, boolean mentionedBot, String channelName, Datahandler datahandler,
StanfordCoreNLP stanfordCoreNLP, StanfordCoreNLP stanfordCoreNLPSentiment,
MessageReceivedEvent event, String username) {
if (mentionedBot || channelName.contains("general-autism")) {
String ResponseStr = datahandler.getResponseMsg(contentF, username,
stanfordCoreNLP, stanfordCoreNLPSentiment,
false);
if (!ResponseStr.isEmpty()) {
System.out.print("\nResponseStr3: " + ResponseStr + "\n");
event.getMessage().getChannel().sendMessage(ResponseStr).queue();
}
} else {
String strF = datahandler.trimString(contentF);
datahandler.getResponseFutures(strF, stanfordCoreNLP, stanfordCoreNLPSentiment);
}
}
}

View File

@ -0,0 +1,80 @@
package DataLayer;
import FunctionLayer.Datahandler;
import edu.stanford.nlp.pipeline.StanfordCoreNLP;
import java.io.IOException;
import java.io.UnsupportedEncodingException;
import java.net.*;
public class ThreadClient {
public ThreadClient(int port, Datahandler datahandler, StanfordCoreNLP stanfordCoreNLP, StanfordCoreNLP stanfordCoreNLPSentiment) {
String hostIP = new settings().getHostIP();
if (port == new settings().getHostport() || port == new settings().getHostport2()) {
hostIP = new settings().getHostIP2();
try {
InetAddress ipAddress = InetAddress.getByName(hostIP);//used ip'
try (DatagramSocket serverSocket = new DatagramSocket()) {
while (true) {
receiveAndSendPacket(serverSocket, ipAddress, port, datahandler, stanfordCoreNLP, stanfordCoreNLPSentiment);
}
} catch (SocketException e) {
e.printStackTrace();
} catch (IOException e) {
e.printStackTrace();
}
} catch (UnknownHostException e) {
e.printStackTrace();
}
}
}
private static void receiveAndSendPacket(DatagramSocket serverSocket, InetAddress ipAddress, int port,
Datahandler datahandler, StanfordCoreNLP stanfordCoreNLP, StanfordCoreNLP stanfordCoreNLPSentiment) throws
IOException {
byte[] receiveData = new byte[4096];
DatagramPacket receivePacket = new DatagramPacket(receiveData, receiveData.length);
try {
serverSocket.receive(receivePacket);
} catch (IOException e) {
e.printStackTrace();
}
new Thread(() -> {
String sentence = new String(receivePacket.getData(), 0,
receivePacket.getLength());
sentence = sentence.replace("clientmessage:", "");
String ResponseMsg = datahandler.getResponseMsg(sentence, "", stanfordCoreNLP, stanfordCoreNLPSentiment,
true);
byte[] sendData = new byte[0];
try {
sendData = ResponseMsg.getBytes("UTF-8");
} catch (UnsupportedEncodingException e) {
e.printStackTrace();
}
int deliver_port = 0;
switch (port) {
case 48475:
deliver_port = 48470;
break;
case 48476:
deliver_port = 48471;
break;
case 48477:
deliver_port = 48472;
break;
case 48478:
deliver_port = 48473;
break;
}
DatagramPacket sendPacket = new DatagramPacket(sendData, sendData.length, ipAddress, deliver_port);
try {
serverSocket.send(sendPacket);
} catch (IOException e) {
e.printStackTrace();
}
}).start();
}
}

View File

@ -20,11 +20,15 @@ import edu.stanford.nlp.util.CoreMap;
import org.ejml.simple.SimpleMatrix;
import java.util.*;
import java.util.concurrent.*;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
public class Datahandler {
private ExecutorService pool = Executors.newFixedThreadPool(7);
private CompletionService completionService = new ExecutorCompletionService(pool);
private HashMap<String, Annotation> pipelineAnnotationCache;
private HashMap<String, Annotation> pipelineSentimentAnnotationCache;
private HashMap<String, CoreDocument> coreDocumentAnnotationCache;
@ -278,7 +282,7 @@ public class Datahandler {
ArrayList<String> stopWordLemma1 = stopWordLemmaHashMap.getOrDefault(str1, null);
Integer PairCounter1 = PairCounterHashMap.getOrDefault(str1, null);
SentimentAnalyzerTest SMX = new SentimentAnalyzerTest(strF, str1, new SimilarityMatrix(strF, str1),
SentimentAnalyzerTest SMX = new SentimentAnalyzerTest(strF, str1,
coreMaps1, coreMaps2, strAnno,
pipelineAnnotationCache.get(str1), strAnnoSentiment,
pipelineSentimentAnnotationCache.get(str1), coreDocument, coreDocumentAnnotationCache.get(str1),
@ -405,6 +409,109 @@ public class Datahandler {
return SMX;
}
private class get_res implements Callable<SentimentAnalyzerTest> {
private final String strF;
private final String str1;
private final StanfordCoreNLP stanfordCoreNLP;
private final StanfordCoreNLP stanfordCoreNLPSentiment;
private final List<CoreMap> coreMaps1;
private final Annotation strAnno;
private final Annotation strAnnoSentiment;
private final CoreDocument coreDocument;
private final Integer tokenizeCountingF;
private final List<List<TaggedWord>> taggedWordListF;
private final ArrayList<TypedDependency> typedDependenciesF;
private final ArrayList<Integer> rnnCoreAnnotationsPredictedF;
private final ArrayList<SimpleMatrix> simpleMatricesF;
private final ArrayList<SimpleMatrix> simpleMatricesNodevectorsF;
private final List<String> listF;
private final Integer longestF;
private final List<CoreMap> sentencesF;
private final List<CoreMap> sentencesSentimentF;
private final ArrayList<Tree> treesF;
private final ArrayList<GrammaticalStructure> grammaticalStructuresF;
private final Integer sentimentLongestF;
private final List<IMWE<IToken>> imwesF;
private final Integer inflectedCounterNegativeF;
private final Integer inflectedCounterPositiveF;
private final ArrayList<String> tokenEntryF;
private final Integer unmarkedPatternCounterF;
private final ArrayList<String> strTokensIpartFormF;
private final ArrayList<String> tokenFormsF;
private final ArrayList<Integer> intTokenEntyCountsF;
private final Integer markedContinuousCounterF;
private final ArrayList<String> iTokenTagsF;
private final ArrayList<String> strTokenEntryGetPOSF;
private final ArrayList<String> retrieveTGWListF;
private final Integer pairCounterF;
private final Integer tokensCounterF;
private final ArrayList<String> stopWordLemmaF;
private final ArrayList<String> nerEntitiesF;
private final ArrayList<String> stopWordTokenF;
private final ArrayList<String> entityTokenTagsF;
private final ArrayList<String> nerEntitiesTypeF;
private final Integer anotatorcounterF;
private final ArrayList<String> strTokenStemsF;
public get_res(String strF, String str1, StanfordCoreNLP stanfordCoreNLP, StanfordCoreNLP stanfordCoreNLPSentiment, List<CoreMap> coreMaps1, Annotation strAnno, Annotation strAnnoSentiment, CoreDocument coreDocument, Integer tokenizeCountingF, List<List<TaggedWord>> taggedWordListF, ArrayList<TypedDependency> typedDependenciesF, ArrayList<Integer> rnnCoreAnnotationsPredictedF, ArrayList<SimpleMatrix> simpleMatricesF, ArrayList<SimpleMatrix> simpleMatricesNodevectorsF, List<String> listF, Integer longestF, List<CoreMap> sentencesF, List<CoreMap> sentencesSentimentF, ArrayList<Tree> treesF, ArrayList<GrammaticalStructure> grammaticalStructuresF, Integer sentimentLongestF, List<IMWE<IToken>> imwesF, Integer inflectedCounterNegativeF, Integer inflectedCounterPositiveF, ArrayList<String> tokenEntryF, Integer unmarkedPatternCounterF, ArrayList<String> strTokensIpartFormF, ArrayList<String> tokenFormsF, ArrayList<Integer> intTokenEntyCountsF, Integer markedContinuousCounterF, ArrayList<String> iTokenTagsF, ArrayList<String> strTokenEntryGetPOSF, ArrayList<String> retrieveTGWListF, Integer pairCounterF, Integer tokensCounterF, ArrayList<String> stopWordLemmaF, ArrayList<String> nerEntitiesF, ArrayList<String> stopWordTokenF, ArrayList<String> entityTokenTagsF, ArrayList<String> nerEntitiesTypeF, Integer anotatorcounterF, ArrayList<String> strTokenStemsF) {
this.strF = strF;
this.str1 = str1;
this.stanfordCoreNLP = stanfordCoreNLP;
this.stanfordCoreNLPSentiment = stanfordCoreNLPSentiment;
this.coreMaps1 = coreMaps1;
this.strAnno = strAnno;
this.strAnnoSentiment = strAnnoSentiment;
this.coreDocument = coreDocument;
this.tokenizeCountingF = tokenizeCountingF;
this.taggedWordListF = taggedWordListF;
this.typedDependenciesF = typedDependenciesF;
this.rnnCoreAnnotationsPredictedF = rnnCoreAnnotationsPredictedF;
this.simpleMatricesF = simpleMatricesF;
this.simpleMatricesNodevectorsF = simpleMatricesNodevectorsF;
this.listF = listF;
this.longestF = longestF;
this.sentencesF = sentencesF;
this.sentencesSentimentF = sentencesSentimentF;
this.treesF = treesF;
this.grammaticalStructuresF = grammaticalStructuresF;
this.sentimentLongestF = sentimentLongestF;
this.imwesF = imwesF;
this.inflectedCounterNegativeF = inflectedCounterNegativeF;
this.inflectedCounterPositiveF = inflectedCounterPositiveF;
this.tokenEntryF = tokenEntryF;
this.unmarkedPatternCounterF = unmarkedPatternCounterF;
this.strTokensIpartFormF = strTokensIpartFormF;
this.tokenFormsF = tokenFormsF;
this.intTokenEntyCountsF = intTokenEntyCountsF;
this.markedContinuousCounterF = markedContinuousCounterF;
this.iTokenTagsF = iTokenTagsF;
this.strTokenEntryGetPOSF = strTokenEntryGetPOSF;
this.retrieveTGWListF = retrieveTGWListF;
this.pairCounterF = pairCounterF;
this.tokensCounterF = tokensCounterF;
this.stopWordLemmaF = stopWordLemmaF;
this.nerEntitiesF = nerEntitiesF;
this.stopWordTokenF = stopWordTokenF;
this.entityTokenTagsF = entityTokenTagsF;
this.nerEntitiesTypeF = nerEntitiesTypeF;
this.anotatorcounterF = anotatorcounterF;
this.strTokenStemsF = strTokenStemsF;
}
@Override
public SentimentAnalyzerTest call() throws Exception {
return getReponseFuturesHelper(strF, str1, stanfordCoreNLP, stanfordCoreNLPSentiment,
coreMaps1, strAnno, strAnnoSentiment, coreDocument, tokenizeCountingF, taggedWordListF
, typedDependenciesF, rnnCoreAnnotationsPredictedF, simpleMatricesF, simpleMatricesNodevectorsF
, listF, longestF, sentencesF, sentencesSentimentF, treesF, grammaticalStructuresF, sentimentLongestF
, imwesF, inflectedCounterNegativeF, inflectedCounterPositiveF, tokenEntryF, unmarkedPatternCounterF
, strTokensIpartFormF, tokenFormsF, intTokenEntyCountsF, markedContinuousCounterF, iTokenTagsF
, strTokenEntryGetPOSF, retrieveTGWListF, pairCounterF, tokensCounterF, stopWordLemmaF, nerEntitiesF
, stopWordTokenF, entityTokenTagsF, nerEntitiesTypeF, anotatorcounterF, strTokenStemsF);
}
}
public String getResponseFutures(String strF, StanfordCoreNLP stanfordCoreNLP, StanfordCoreNLP stanfordCoreNLPSentiment) {
if (strResponses.getOrDefault(strF, null) == null) {
strResponses.put(strF, new ArrayList<>());
@ -463,127 +570,152 @@ public class Datahandler {
StringBuilder SB = new StringBuilder();
List<String> ues_copy = new ArrayList(DataMapper.getAllStrings());
double preRelationUserCounters = -155000.0;
//System.out.println(ues_copy.toString());
ArrayList<Future<SentimentAnalyzerTest>> futures = new ArrayList<>();
for (String str1 : ues_copy) {
if (strF != str1) {
SentimentAnalyzerTest SMX = getReponseFuturesHelper(strF, str1, stanfordCoreNLP, stanfordCoreNLPSentiment,
//critical section
Future<SentimentAnalyzerTest> submit = completionService.submit(new get_res(strF, str1, stanfordCoreNLP, stanfordCoreNLPSentiment,
coreMaps1, strAnno, strAnnoSentiment, coreDocument, tokenizeCountingF, taggedWordListF
, typedDependenciesF, rnnCoreAnnotationsPredictedF, simpleMatricesF, simpleMatricesNodevectorsF
, listF, longestF, sentencesF, sentencesSentimentF, treesF, grammaticalStructuresF, sentimentLongestF
, imwesF, InflectedCounterNegativeF, InflectedCounterPositiveF, tokenEntryF, UnmarkedPatternCounterF
, strTokensIpartFormF, tokenFormsF, intTokenEntyCountsF, MarkedContinuousCounterF, ITokenTagsF
, strTokenEntryGetPOSF, retrieveTGWListF, PairCounterF, TokensCounterF, stopWordLemmaF, nerEntitiesF
, stopWordTokenF, entityTokenTagsF, nerEntitiesTypeF, AnotatorcounterF, strTokenStemsF);
if (tokenizeCountingF == null) {
tokenizeCountingF = SMX.getTokenizeCountingF();
}
if (taggedWordListF == null) {
taggedWordListF = SMX.getTaggedWordListF();
}
if (typedDependenciesF == null) {
typedDependenciesF = SMX.getTypedDependenciesF();
}
if (rnnCoreAnnotationsPredictedF == null) {
rnnCoreAnnotationsPredictedF = SMX.getRnnCoreAnnotationsPredictedF();
}
if (simpleMatricesF == null) {
simpleMatricesF = SMX.getSimpleMatricesF();
}
if (simpleMatricesNodevectorsF == null) {
simpleMatricesNodevectorsF = SMX.getSimpleMatricesNodevectorsF();
}
if (listF == null) {
listF = SMX.getListF();
}
if (longestF == null) {
longestF = SMX.getLongestF();
}
if (sentencesF == null) {
sentencesF = SMX.getSentencesF();
}
if (sentencesSentimentF == null) {
sentencesSentimentF = SMX.getSentencesSentimentF();
}
if (treesF == null) {
treesF = SMX.getTreesF();
}
if (grammaticalStructuresF == null) {
grammaticalStructuresF = SMX.getGrammaticalStructuresF();
}
if (sentimentLongestF == null) {
sentimentLongestF = SMX.getSentimentLongestF();
}
if (imwesF == null) {
imwesF = SMX.getImwesF();
}
if (InflectedCounterNegativeF == null) {
InflectedCounterNegativeF = SMX.getInflectedCounterNegativeF();
}
if (InflectedCounterPositiveF == null) {
InflectedCounterPositiveF = SMX.getInflectedCounterPositiveF();
}
if (tokenEntryF == null) {
tokenEntryF = SMX.getTokenEntryF();
}
if (UnmarkedPatternCounterF == null) {
UnmarkedPatternCounterF = SMX.getUnmarkedPatternCounterF();
}
if (strTokensIpartFormF == null) {
strTokensIpartFormF = SMX.getStrTokensIpartFormF();
}
if (tokenFormsF == null) {
tokenFormsF = SMX.getTokenFormsF();
}
if (intTokenEntyCountsF == null) {
intTokenEntyCountsF = SMX.getIntTokenEntyCountsF();
}
if (MarkedContinuousCounterF == null) {
MarkedContinuousCounterF = SMX.getMarkedContinuousCounterF();
}
if (ITokenTagsF == null) {
ITokenTagsF = SMX.getITokenTagsF();
}
if (strTokenEntryGetPOSF == null) {
strTokenEntryGetPOSF = SMX.getStrTokenEntryGetPOSF();
}
if (retrieveTGWListF == null) {
retrieveTGWListF = SMX.getRetrieveTGWListF();
}
if (PairCounterF == null) {
PairCounterF = SMX.getPairCounterF();
}
if (TokensCounterF == null) {
TokensCounterF = SMX.getTokensCounterF();
}
if (stopWordLemmaF == null) {
stopWordLemmaF = SMX.getStopWordLemmaF();
}
if (nerEntitiesF == null) {
nerEntitiesF = SMX.getNerEntitiesF();
}
if (stopWordTokenF == null) {
stopWordTokenF = SMX.getStopWordTokenF();
}
if (entityTokenTagsF == null) {
entityTokenTagsF = SMX.getEntityTokenTagsF();
}
if (nerEntitiesTypeF == null) {
nerEntitiesTypeF = SMX.getNerEntitiesTypeF();
}
if (AnotatorcounterF == null) {
AnotatorcounterF = SMX.getAnotatorcounterF();
}
if (strTokenStemsF == null) {
strTokenStemsF = SMX.getStrTokenStemsF();
}
SimilarityMatrix getSMX = SMX.callSMX();
double scoreRelationLastUserMsg = getSMX.getDistance();
if (scoreRelationLastUserMsg > preRelationUserCounters) {
preRelationUserCounters = scoreRelationLastUserMsg;
concurrentRelations.add(getSMX.getSecondaryString());
}
, stopWordTokenF, entityTokenTagsF, nerEntitiesTypeF, AnotatorcounterF, strTokenStemsF));
futures.add(submit);
//end of critical section, do the rest sequential.
}
}
int pending = futures.size();
while (pending > 0) {
try {
Future<SentimentAnalyzerTest> completed = completionService.poll(100, TimeUnit.MILLISECONDS);
if (completed != null) {
--pending;
SentimentAnalyzerTest SMX = completed.get();
if (SMX == null) continue;
double scoreRelationLastUserMsg = SMX.getScore();
if (scoreRelationLastUserMsg > preRelationUserCounters) {
preRelationUserCounters = scoreRelationLastUserMsg;
concurrentRelations.add(SMX.getSecondaryString());
}
//this part below should be sequential hopefully
if (tokenizeCountingF == null) {
tokenizeCountingF = SMX.getTokenizeCountingF();
}
if (taggedWordListF == null) {
taggedWordListF = SMX.getTaggedWordListF();
}
if (typedDependenciesF == null) {
typedDependenciesF = SMX.getTypedDependenciesF();
}
if (rnnCoreAnnotationsPredictedF == null) {
rnnCoreAnnotationsPredictedF = SMX.getRnnCoreAnnotationsPredictedF();
}
if (simpleMatricesF == null) {
simpleMatricesF = SMX.getSimpleMatricesF();
}
if (simpleMatricesNodevectorsF == null) {
simpleMatricesNodevectorsF = SMX.getSimpleMatricesNodevectorsF();
}
if (listF == null) {
listF = SMX.getListF();
}
if (longestF == null) {
longestF = SMX.getLongestF();
}
if (sentencesF == null) {
sentencesF = SMX.getSentencesF();
}
if (sentencesSentimentF == null) {
sentencesSentimentF = SMX.getSentencesSentimentF();
}
if (treesF == null) {
treesF = SMX.getTreesF();
}
if (grammaticalStructuresF == null) {
grammaticalStructuresF = SMX.getGrammaticalStructuresF();
}
if (sentimentLongestF == null) {
sentimentLongestF = SMX.getSentimentLongestF();
}
if (imwesF == null) {
imwesF = SMX.getImwesF();
}
if (InflectedCounterNegativeF == null) {
InflectedCounterNegativeF = SMX.getInflectedCounterNegativeF();
}
if (InflectedCounterPositiveF == null) {
InflectedCounterPositiveF = SMX.getInflectedCounterPositiveF();
}
if (tokenEntryF == null) {
tokenEntryF = SMX.getTokenEntryF();
}
if (UnmarkedPatternCounterF == null) {
UnmarkedPatternCounterF = SMX.getUnmarkedPatternCounterF();
}
if (strTokensIpartFormF == null) {
strTokensIpartFormF = SMX.getStrTokensIpartFormF();
}
if (tokenFormsF == null) {
tokenFormsF = SMX.getTokenFormsF();
}
if (intTokenEntyCountsF == null) {
intTokenEntyCountsF = SMX.getIntTokenEntyCountsF();
}
if (MarkedContinuousCounterF == null) {
MarkedContinuousCounterF = SMX.getMarkedContinuousCounterF();
}
if (ITokenTagsF == null) {
ITokenTagsF = SMX.getITokenTagsF();
}
if (strTokenEntryGetPOSF == null) {
strTokenEntryGetPOSF = SMX.getStrTokenEntryGetPOSF();
}
if (retrieveTGWListF == null) {
retrieveTGWListF = SMX.getRetrieveTGWListF();
}
if (PairCounterF == null) {
PairCounterF = SMX.getPairCounterF();
}
if (TokensCounterF == null) {
TokensCounterF = SMX.getTokensCounterF();
}
if (stopWordLemmaF == null) {
stopWordLemmaF = SMX.getStopWordLemmaF();
}
if (nerEntitiesF == null) {
nerEntitiesF = SMX.getNerEntitiesF();
}
if (stopWordTokenF == null) {
stopWordTokenF = SMX.getStopWordTokenF();
}
if (entityTokenTagsF == null) {
entityTokenTagsF = SMX.getEntityTokenTagsF();
}
if (nerEntitiesTypeF == null) {
nerEntitiesTypeF = SMX.getNerEntitiesTypeF();
}
if (AnotatorcounterF == null) {
AnotatorcounterF = SMX.getAnotatorcounterF();
}
if (strTokenStemsF == null) {
strTokenStemsF = SMX.getStrTokenStemsF();
}
}
} catch (InterruptedException e) {
throw new RuntimeException(e);
} catch (ExecutionException e) {
throw new RuntimeException(e);
}
}
int cacheRequirement = 8500;
if (preRelationUserCounters > cacheRequirement && !ues_copy.contains(strF) && filterContent(strF)) {
DataMapper.InsertMYSQLStrings(strF);
@ -608,7 +740,7 @@ public class Datahandler {
strResponses.put(strF, orDefault);
} else if (orDefault.size() > 5) {
double v = Math.random() * 10;
if (v > 8.6) {
if (v > 5.6) {
orDefault = new ArrayList<>();
strResponses.put(strF, orDefault);
}
@ -628,8 +760,7 @@ public class Datahandler {
, strTokensIpartFormF, tokenFormsF, intTokenEntyCountsF, MarkedContinuousCounterF, ITokenTagsF
, strTokenEntryGetPOSF, retrieveTGWListF, PairCounterF, TokensCounterF, stopWordLemmaF, nerEntitiesF
, stopWordTokenF, entityTokenTagsF, nerEntitiesTypeF, AnotatorcounterF, strTokenStemsF);
SimilarityMatrix getSMX = SMX.callSMX();
double scoreRelationLastUserMsg = getSMX.getDistance();
double scoreRelationLastUserMsg = SMX.getScore();
if (preRelationUserCounters > scoreRelationLastUserMsg) {
break;
}

View File

@ -14,14 +14,6 @@ public class SimilarityMatrix {
private String SecondaryString;
private double distance;
public final double getDistance() {
return distance;
}
public final void setDistance(double distance) {
this.distance = distance;
}
public SimilarityMatrix(String str1, String str2) {
this.PrimaryString = str1;
this.SecondaryString = str2;

View File

@ -38,6 +38,7 @@ import edu.stanford.nlp.util.Pair;
import java.io.IOException;
import java.io.StringReader;
import java.util.*;
import java.util.concurrent.Semaphore;
import java.util.logging.FileHandler;
import java.util.logging.Logger;
import java.util.logging.SimpleFormatter;
@ -56,20 +57,19 @@ import org.ejml.simple.SimpleMatrix;
*/
public class SentimentAnalyzerTest {
private final SimilarityMatrix smxParam;
private final String str;
private final String str1;
private final MaxentTagger tagger;
private final GrammaticalStructureFactory gsf;
private final AbstractSequenceClassifier classifier;
private final List<CoreMap> coreMaps1;
private final List<CoreMap> coreMaps2;
private final Annotation pipelineAnnotation1;
private final Annotation pipelineAnnotation2;
private final Annotation pipelineAnnotation1Sentiment;
private final Annotation pipelineAnnotation2Sentiment;
private final CoreDocument pipelineCoreDcoument1;
private final CoreDocument pipelineCoreDcoument2;
private String str;
private String str1;
private MaxentTagger tagger;
private GrammaticalStructureFactory gsf;
private AbstractSequenceClassifier classifier;
private List<CoreMap> coreMaps1;
private List<CoreMap> coreMaps2;
private Annotation pipelineAnnotation1;
private Annotation pipelineAnnotation2;
private Annotation pipelineAnnotation1Sentiment;
private Annotation pipelineAnnotation2Sentiment;
private CoreDocument pipelineCoreDcoument1;
private CoreDocument pipelineCoreDcoument2;
//private loggerlogger =logger.getLogger("autismlog");
private FileHandler fh;
@ -415,8 +415,17 @@ public class SentimentAnalyzerTest {
private ArrayList<String> stopWordLemma1;
private Integer PairCounterF;
private Integer PairCounter1;
private Double score_res;
public SentimentAnalyzerTest(String str, String str1, SimilarityMatrix smxParam, List<CoreMap> coreMaps1, List<CoreMap> coreMaps2,
public Double getScore(){
return score_res;
}
public String getSecondaryString(){
return this.str1;
}
public SentimentAnalyzerTest(String str, String str1, List<CoreMap> coreMaps1, List<CoreMap> coreMaps2,
Annotation strPipeline1, Annotation strPipeline2, Annotation strPipeSentiment1, Annotation strPipeSentiment2,
CoreDocument pipelineCoreDcoument1, CoreDocument pipelineCoreDcoument2,
MaxentTagger tagger, GrammaticalStructureFactory gsf,
@ -462,7 +471,6 @@ public class SentimentAnalyzerTest {
Integer PairCounter1) {
this.str = str;
this.str1 = str1;
this.smxParam = smxParam;
this.tagger = tagger;
this.gsf = gsf;
this.classifier = classifier;
@ -542,6 +550,7 @@ public class SentimentAnalyzerTest {
this.stopWordLemma1 = stopWordLemma1;
this.PairCounterF = PairCounterF;
this.PairCounter1 = PairCounter1;
this.score_res = callSMX();
}
private List<List<TaggedWord>> getTaggedWordList(String message) {
@ -550,7 +559,11 @@ public class SentimentAnalyzerTest {
TokenizerFactory<CoreLabel> ptbTokenizerFactory = PTBTokenizer.factory(new CoreLabelTokenFactory(), "untokenizable=noneDelete"); //noneDelete //firstDelete
tokenizer.setTokenizerFactory(ptbTokenizerFactory);
for (final List<HasWord> sentence : tokenizer) {
taggedwordlist.add(tagger.tagSentence(sentence));
try {
taggedwordlist.add(tagger.tagSentence(sentence));
} catch (Exception ex) {
System.out.println("crashed in tagger.tagsentence");
}
}
return taggedwordlist;
}
@ -2257,6 +2270,7 @@ public class SentimentAnalyzerTest {
public void validateStringCaches() {
Class<SentimentCoreAnnotations.SentimentAnnotatedTree> sentimentAnnotatedTreeClass =
SentimentCoreAnnotations.SentimentAnnotatedTree.class;
@ -2471,7 +2485,8 @@ public class SentimentAnalyzerTest {
}
public SimilarityMatrix callSMX() {
public Double callSMX() {
Double score = -100.0;
/*
@ -2566,7 +2581,8 @@ public class SentimentAnalyzerTest {
score = stopwordTokenPairCounterScoring(score, this.stopWordTokenF, this.stopWordToken1,
this.PairCounterF, this.PairCounter1);
//logger.info("score post stopwordTokenPairCounterScoring " + score);
smxParam.setDistance(score);
return smxParam;
return score;
}
}

View File

@ -1,14 +1,17 @@
package PresentationLayer;
import DataLayer.RunnerClient;
import DataLayer.ThreadClient;
import DataLayer.settings;
import FunctionLayer.Datahandler;
import FunctionLayer.PipelineJMWESingleton;
import edu.stanford.nlp.pipeline.StanfordCoreNLP;
import discord4j.core.event.domain.lifecycle.ReadyEvent;
import discord4j.core.event.domain.message.MessageCreateEvent;
import discord4j.core.object.entity.Message;
import net.dv8tion.jda.api.JDABuilder;
import net.dv8tion.jda.api.entities.Activity;
import net.dv8tion.jda.api.entities.Member;
import net.dv8tion.jda.api.entities.Message;
import net.dv8tion.jda.api.entities.MessageChannel;
import net.dv8tion.jda.api.events.message.MessageReceivedEvent;
import net.dv8tion.jda.api.hooks.ListenerAdapter;
import net.dv8tion.jda.api.requests.GatewayIntent;
@ -20,7 +23,6 @@ import java.net.*;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.List;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
@ -29,67 +31,9 @@ import java.util.concurrent.Executors;
*/
public class DiscordHandler extends ListenerAdapter {
private static void receiveAndSendPacket(DatagramSocket serverSocket, InetAddress ipAddress, int port,
Datahandler datahandler, StanfordCoreNLP stanfordCoreNLP, StanfordCoreNLP stanfordCoreNLPSentiment) throws IOException {
byte[] receiveData = new byte[4096];
DatagramPacket receivePacket = new DatagramPacket(receiveData, receiveData.length);
try {
serverSocket.receive(receivePacket);
} catch (IOException e) {
e.printStackTrace();
}
String sentence = new String(receivePacket.getData(), 0,
receivePacket.getLength());
sentence = sentence.replace("clientmessage:", "");
String ResponseMsg = datahandler.getResponseMsg(sentence, "", stanfordCoreNLP, stanfordCoreNLPSentiment,
true);
byte[] sendData = ResponseMsg.getBytes("UTF-8");
int deliver_port = 0;
switch (port) {
case 48475:
deliver_port = 48470;
break;
case 48476:
deliver_port = 48471;
break;
case 48477:
deliver_port = 48472;
break;
case 48478:
deliver_port = 48473;
break;
}
DatagramPacket sendPacket = new DatagramPacket(sendData, sendData.length, ipAddress, deliver_port);
serverSocket.send(sendPacket);
}
public static void handleUDPTraffic(int port, Datahandler datahandler,
StanfordCoreNLP stanfordCoreNLP, StanfordCoreNLP stanfordCoreNLPSentiment) {
try (DatagramSocket serverSocket = new DatagramSocket(port)) {
String hostIP = new settings().getHostIP();
if (port == new settings().getHostport() || port == new settings().getHostport2()) {
hostIP = new settings().getHostIP2();
}
InetAddress ipAddress = InetAddress.getByName(hostIP);//used ip'
while (true) {
receiveAndSendPacket(serverSocket, ipAddress, port, datahandler, stanfordCoreNLP, stanfordCoreNLPSentiment);
}
} catch (SocketException | UnknownHostException e) {
e.printStackTrace();
} catch (UnsupportedEncodingException e) {
e.printStackTrace();
} catch (IOException e) {
e.printStackTrace();
}
}
private static StanfordCoreNLP stanfordCoreNLP;
private static Datahandler datahandler;
private static StanfordCoreNLP stanfordCoreNLPSentiment;
private static ExecutorService executorService = Executors.newFixedThreadPool(3);
private static ExecutorService executorServiceIngame = Executors.newFixedThreadPool(4);
//TODO add python program that edits the java code. python program just adds test if statements on
//variables until the tests pass
@ -109,21 +53,13 @@ public class DiscordHandler extends ListenerAdapter {
.setActivity(Activity.playing("Being the autism bot"))
.build();
int autismbotCount = 4;
//make sure not to use ports that are already occupied.
for (int i = 0; i < autismbotCount; i++) {
final int j = i;
executorServiceIngame.execute(new Runnable() {
@Override
public void run() {
ArrayList<Integer> ports = new ArrayList<Integer>();
ports.add(48475);
ports.add(48476);
ports.add(48477);
ports.add(48478);
handleUDPTraffic(ports.get(j), datahandler, stanfordCoreNLP, stanfordCoreNLPSentiment);
}
});
ArrayList<Integer> ports = new ArrayList<Integer>();
ports.add(48475);
ports.add(48476);
ports.add(48477);
ports.add(48478);
for (Integer port : ports) {
new ThreadClient(port, datahandler, stanfordCoreNLP, stanfordCoreNLPSentiment);
}
}
@ -165,24 +101,9 @@ public class DiscordHandler extends ListenerAdapter {
}
}
}
if (mentionedBot || channelName.contains("general-autism")) {
final String contentF = content;
String ResponseStr = datahandler.getResponseMsg(contentF, username, stanfordCoreNLP, stanfordCoreNLPSentiment,
false);
if (!ResponseStr.isEmpty()) {
System.out.print("\nResponseStr3: " + ResponseStr + "\n");
event.getMessage().getChannel().sendMessage(ResponseStr).queue();
}
} else {
final String contentF = content;
executorService.execute(new Runnable() {
@Override
public void run() {
String strF = datahandler.trimString(contentF);
datahandler.getResponseFutures(strF, stanfordCoreNLP, stanfordCoreNLPSentiment);
}
});
}
final String contentF = content;
new RunnerClient(contentF, mentionedBot, channelName, datahandler, stanfordCoreNLP, stanfordCoreNLPSentiment,
event, username);
}
}
}

View File

@ -1,4 +1,4 @@
import FunctionLayer.DatahandlerKotlinObsolete;
/*
import FunctionLayer.PipelineJMWESingleton;
import FunctionLayer.StanfordParser.SentimentAnalyzerTest;
import edu.mit.jmwe.data.IMWE;
@ -177,6 +177,7 @@ public class junit {
//@Test
public void testScoring() {
/*
DatahandlerKotlinObsolete datahandler = new DatahandlerKotlinObsolete();
PipelineJMWESingleton.getINSTANCE();
StanfordCoreNLP stanfordCoreNLP = datahandler.pipeLineSetUp();
@ -681,6 +682,9 @@ public class junit {
sent2 = "ΣZΣ | jenz you see any bots on here??? 404Trucy look your new friend 1pm for me :loved: :shinogeci: :sbpl: you alive? does shino like fasz? boost the server pls";
PerformTesting = testCall(sent1, sent2, stanfordCoreNLP, stanfordCoreNLPSentiment);
Assert.assertTrue(PerformTesting < -2400.0);
}
}
*/