From 509cd2cbe9d651782d6fddc781ece305fd249159 Mon Sep 17 00:00:00 2001 From: christian Date: Thu, 21 Oct 2021 21:36:17 +0200 Subject: [PATCH] added dependency updates, remade stuff to updates. fixed removing sentences. added null checks --- ArtificialAutism/pom.xml | 55 +++++++++++-------- .../src/main/java/DataLayer/DataMapper.java | 2 +- .../main/java/FunctionLayer/Datahandler.kt | 27 ++++++--- .../src/main/java/FunctionLayer/DoStuff.java | 4 +- .../StanfordParser/SentimentAnalyzerTest.java | 39 ++++++++----- .../PresentationLayer/DiscordHandler.java | 8 ++- 6 files changed, 82 insertions(+), 53 deletions(-) diff --git a/ArtificialAutism/pom.xml b/ArtificialAutism/pom.xml index 6177ff88..da8c9b6e 100644 --- a/ArtificialAutism/pom.xml +++ b/ArtificialAutism/pom.xml @@ -1,5 +1,6 @@ - + 4.0.0 com.mycompany ArtificialAutism @@ -32,28 +33,29 @@ ws4j 1.0.1 - - StanfordParser - StanfordParser - 1.0 + + stanford-corenlp-models-english + stanford-corenlp-models-english + 4.3.1 + jar - StanfordParserModel - StanfordParserModel - 1.0 - + ejml-simple + ejml-simple + 0.39 + jar - srParser - srParser - 1 - + ejml-core + ejml-core + 0.39 + jar - ejml - ejml - 0.2.3 + ejml-ddense + ejml-ddense + 0.39 jar @@ -68,23 +70,28 @@ 1.0.2 jar - - CoreNLP - CoreNLP - 1.0 - jar - + org.apache.lucene lucene-analyzers-common 7.2.0 jar - + + edu.stanford.nlp + stanford-corenlp + 4.3.1 + + + edu.stanford.nlp + stanford-corenlp + 4.3.1 + models + com.discord4j discord4j-core - 3.1.1 + 3.1.7 org.jetbrains.kotlin diff --git a/ArtificialAutism/src/main/java/DataLayer/DataMapper.java b/ArtificialAutism/src/main/java/DataLayer/DataMapper.java index ae34505f..3ab45f97 100644 --- a/ArtificialAutism/src/main/java/DataLayer/DataMapper.java +++ b/ArtificialAutism/src/main/java/DataLayer/DataMapper.java @@ -85,7 +85,7 @@ public class DataMapper { Connection l_cCon = null; PreparedStatement l_pStatement = null; ResultSet l_rsSearch = null; - String l_sSQL = "delete from Sentences where last_used < NOW() - INTERVAL 4 WEEK LIMIT 55"; + String l_sSQL = "delete from Sentences order by last_used asc LIMIT 15"; try { l_cCon = DBCPDataSource.getConnection(); l_pStatement = l_cCon.prepareStatement(l_sSQL); diff --git a/ArtificialAutism/src/main/java/FunctionLayer/Datahandler.kt b/ArtificialAutism/src/main/java/FunctionLayer/Datahandler.kt index 44f2f10b..1bce0028 100644 --- a/ArtificialAutism/src/main/java/FunctionLayer/Datahandler.kt +++ b/ArtificialAutism/src/main/java/FunctionLayer/Datahandler.kt @@ -42,8 +42,10 @@ public class Datahandler { private var pipelineSentimentAnnotationCache = HashMap() private var coreDocumentAnnotationCache: HashMap private var jmweAnnotationCache = HashMap() - private val stringCache = ArrayList() - private val nerModel = "edu/stanford/nlp/models/ner/english.all.3class.caseless.distsim.crf.ser.gz" + private var stringCache = ArrayList() + + //private val nerModel = "edu/stanford/nlp/models/ner/english.all.3class.caseless.distsim.crf.ser.gz" + private val nerModel = "edu/stanford/nlp/models/ner/english.all.3class.distsim.crf.ser.gz" private var tagger: MaxentTagger = MaxentTagger() private var gsf: GrammaticalStructureFactory private var classifier: AbstractSequenceClassifier @@ -99,8 +101,9 @@ public class Datahandler { fun initiateGrammaticalStructureFactory(): GrammaticalStructureFactory { val options = arrayOf("-maxLength", "100") - val lexParserEnglishRNN = "edu/stanford/nlp/models/lexparser/englishRNN.ser.gz" - val lp = LexicalizedParser.loadModel(lexParserEnglishRNN, *options) + //val lexParserEnglishRNN = "edu/stanford/nlp/models/lexparser/englishRNN.ser.gz" + val lexParserEnglishPCFG = "edu/stanford/nlp/models/lexparser/englishPCFG.ser.gz" + val lp = LexicalizedParser.loadModel(lexParserEnglishPCFG, *options) val tlp = lp.getOp().langpack() return tlp.grammaticalStructureFactory() } @@ -108,8 +111,10 @@ public class Datahandler { public fun pipeLineSetUp(): StanfordCoreNLP { val props = Properties() val shiftReduceParserPath = "edu/stanford/nlp/models/srparser/englishSR.ser.gz" - val nerModel2 = "edu/stanford/nlp/models/ner/english.conll.4class.caseless.distsim.crf.ser.gz" - val nerModel3 = "edu/stanford/nlp/models/ner/english.muc.7class.caseless.distsim.crf.ser.gz" + //val nerModel2 = "edu/stanford/nlp/models/ner/english.conll.4class.caseless.distsim.crf.ser.gz" + val nerModel2 = "edu/stanford/nlp/models/ner/english.conll.4class.distsim.crf.ser.gz" + //val nerModel3 = "edu/stanford/nlp/models/ner/english.muc.7class.caseless.distsim.crf.ser.gz" + val nerModel3 = "edu/stanford/nlp/models/ner/english.muc.7class.distsim.crf.ser.gz" props.setProperty("annotators", "tokenize,ssplit,pos,lemma,ner,parse") props.setProperty("parse.model", shiftReduceParserPath) props.setProperty("parse.maxlen", "90") @@ -129,12 +134,14 @@ public class Datahandler { fun shiftReduceParserInitiate(): StanfordCoreNLP { val propsSentiment = Properties() - val lexParserEnglishRNN = "edu/stanford/nlp/models/lexparser/englishRNN.ser.gz" + //val lexParserEnglishRNN = "edu/stanford/nlp/models/lexparser/englishRNN.ser.gz" + val lexParserEnglishPCFG = "edu/stanford/nlp/models/lexparser/englishPCFG.ser.gz" val sentimentModel = "edu/stanford/nlp/models/sentiment/sentiment.ser.gz" - val taggerPath = "edu/stanford/nlp/models/pos-tagger/english-left3words/english-left3words-distsim.tagger" + //val taggerPath = "edu/stanford/nlp/models/pos-tagger/english-left3words/english-left3words-distsim.tagger" + val taggerPath = "edu/stanford/nlp/models/pos-tagger/english-left3words-distsim.tagger" val customStopWordList = "start,starts,period,periods,a,an,and,are,as,at,be,but,by,for,if,in,into,is,it,no,not,of," + "on,or,such,that,the,their,then,there,these,they,this,to,was,will,with" - propsSentiment.setProperty("parse.model", lexParserEnglishRNN) + propsSentiment.setProperty("parse.model", lexParserEnglishPCFG) propsSentiment.setProperty("sentiment.model", sentimentModel) propsSentiment.setProperty("parse.maxlen", "90") propsSentiment.setProperty("threads", "5") @@ -163,6 +170,8 @@ public class Datahandler { val arrayList = java.util.ArrayList(stringCache) DataMapper.InsertMYSQLStrings(arrayList) DataMapper.checkStringsToDelete(); + stringCache = ArrayList(); + initiateMYSQL(); } } diff --git a/ArtificialAutism/src/main/java/FunctionLayer/DoStuff.java b/ArtificialAutism/src/main/java/FunctionLayer/DoStuff.java index e1df79f9..eb4a506b 100644 --- a/ArtificialAutism/src/main/java/FunctionLayer/DoStuff.java +++ b/ArtificialAutism/src/main/java/FunctionLayer/DoStuff.java @@ -81,9 +81,7 @@ public class DoStuff { } } } - new Thread(() -> { - datahandler.updateStringCache(); - }).start(); + datahandler.updateStringCache(); } } } diff --git a/ArtificialAutism/src/main/java/FunctionLayer/StanfordParser/SentimentAnalyzerTest.java b/ArtificialAutism/src/main/java/FunctionLayer/StanfordParser/SentimentAnalyzerTest.java index d2c62a6c..047043d0 100644 --- a/ArtificialAutism/src/main/java/FunctionLayer/StanfordParser/SentimentAnalyzerTest.java +++ b/ArtificialAutism/src/main/java/FunctionLayer/StanfordParser/SentimentAnalyzerTest.java @@ -1772,6 +1772,9 @@ public class SentimentAnalyzerTest { } private ArrayList getnerEntities(CoreDocument coreDocument) { + if (coreDocument == null || coreDocument.entityMentions() == null) { + return new ArrayList(); + } ArrayList arrs = new ArrayList<>(); for (CoreEntityMention em : coreDocument.entityMentions()) { if (!arrs.contains(em.text())) { @@ -1782,6 +1785,9 @@ public class SentimentAnalyzerTest { } private ArrayList getnerEntitiesType(CoreDocument coreDocument) { + if (coreDocument == null || coreDocument.entityMentions() == null) { + return new ArrayList(); + } ArrayList arrs = new ArrayList<>(); for (CoreEntityMention em : coreDocument.entityMentions()) { if (!arrs.contains(em.entityType())) { @@ -1841,22 +1847,27 @@ public class SentimentAnalyzerTest { } private ArrayList getentityTokenTags(CoreDocument coreDocument) { + if (coreDocument == null || coreDocument.entityMentions() == null) { + return new ArrayList(); + } ArrayList arrs = new ArrayList<>(); - for (CoreEntityMention em : coreDocument.entityMentions()) { - List tokens = em.tokens(); - String entityType = em.entityType(); - Double EntityConfidences = 0.0; - Set> entrySet = em.entityTypeConfidences().entrySet(); - for (Map.Entry entries : entrySet) { - if (EntityConfidences < entries.getValue()) { - EntityConfidences = entries.getValue(); + if (coreDocument != null) { + for (CoreEntityMention em : coreDocument.entityMentions()) { + List tokens = em.tokens(); + String entityType = em.entityType(); + Double EntityConfidences = 0.0; + Set> entrySet = em.entityTypeConfidences().entrySet(); + for (Map.Entry entries : entrySet) { + if (EntityConfidences < entries.getValue()) { + EntityConfidences = entries.getValue(); + } } - } - for (CoreLabel token : tokens) { - if (token != null) { - if (!arrs.contains(token.tag())) { - if (entityType.equals("PERSON") && EntityConfidences > 0.80) { - arrs.add(token.tag()); + for (CoreLabel token : tokens) { + if (token != null) { + if (!arrs.contains(token.tag())) { + if (entityType.equals("PERSON") && EntityConfidences > 0.80) { + arrs.add(token.tag()); + } } } } diff --git a/ArtificialAutism/src/main/java/PresentationLayer/DiscordHandler.java b/ArtificialAutism/src/main/java/PresentationLayer/DiscordHandler.java index b16d5eb4..26179d8d 100644 --- a/ArtificialAutism/src/main/java/PresentationLayer/DiscordHandler.java +++ b/ArtificialAutism/src/main/java/PresentationLayer/DiscordHandler.java @@ -3,7 +3,6 @@ package PresentationLayer; import DataLayer.settings; import FunctionLayer.Datahandler; import FunctionLayer.PipelineJMWESingleton; -import com.sun.tools.javac.util.List; import discord4j.core.DiscordClient; import discord4j.core.GatewayDiscordClient; import discord4j.core.event.domain.message.MessageCreateEvent; @@ -13,6 +12,7 @@ import java.io.IOException; import java.io.UnsupportedEncodingException; import java.net.*; import java.sql.SQLException; +import java.util.ArrayList; /** @@ -95,7 +95,11 @@ public class DiscordHandler { for (int i = 0; i < autismbotCount; i++) { final int j = i; new Thread(() -> { - List ports = List.of(48475, 48476, 48477, 48478); + ArrayList ports = new ArrayList(); + ports.add(48475); + ports.add(48476); + ports.add(48477); + ports.add(48478); handleUDPTraffic(ports.get(j), datahandler, stanfordCoreNLP, stanfordCoreNLPSentiment); }).start(); }