updating suggestiongs bot and the autism bot on discord
This commit is contained in:
parent
78532929ae
commit
5dee2a8e65
@ -5,6 +5,8 @@
|
||||
*/
|
||||
package DataLayer;
|
||||
|
||||
import org.jetbrains.annotations.NotNull;
|
||||
|
||||
import java.sql.Connection;
|
||||
import java.sql.PreparedStatement;
|
||||
import java.sql.ResultSet;
|
||||
@ -78,4 +80,39 @@ public class DataMapper {
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public static void checkStringsToDelete() {
|
||||
Connection l_cCon = null;
|
||||
PreparedStatement l_pStatement = null;
|
||||
ResultSet l_rsSearch = null;
|
||||
String l_sSQL = "delete from Sentences where last_used < NOW() - INTERVAL 4 WEEK LIMIT 25";
|
||||
try {
|
||||
l_cCon = DBCPDataSource.getConnection();
|
||||
l_pStatement = l_cCon.prepareStatement(l_sSQL);
|
||||
l_pStatement.execute();
|
||||
} catch (SQLException throwables) {
|
||||
throwables.printStackTrace();
|
||||
} finally {
|
||||
CloseConnections(l_pStatement, l_rsSearch, l_cCon);
|
||||
}
|
||||
}
|
||||
|
||||
public static void updateLastUsed(@NotNull ArrayList<String> mysqlUpdateLastUsed) {
|
||||
Connection l_cCon = null;
|
||||
PreparedStatement l_pStatement = null;
|
||||
ResultSet l_rsSearch = null;
|
||||
String l_sSQL = "update Sentences Set last_used = now() where Strings = (?)";
|
||||
try {
|
||||
l_cCon = DBCPDataSource.getConnection();
|
||||
l_pStatement = l_cCon.prepareStatement(l_sSQL);
|
||||
for (String str1 : mysqlUpdateLastUsed) {
|
||||
l_pStatement.setString(1, str1);
|
||||
l_pStatement.execute();
|
||||
}
|
||||
} catch (SQLException throwables) {
|
||||
throwables.printStackTrace();
|
||||
} finally {
|
||||
CloseConnections(l_pStatement, l_rsSearch, l_cCon);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -8,6 +8,8 @@ package FunctionLayer
|
||||
import DataLayer.DataMapper
|
||||
import FunctionLayer.StanfordParser.SentimentAnalyzerTest
|
||||
import com.google.common.base.Stopwatch
|
||||
import edu.mit.jmwe.data.IMWE
|
||||
import edu.mit.jmwe.data.IToken
|
||||
import edu.stanford.nlp.ie.AbstractSequenceClassifier
|
||||
import edu.stanford.nlp.ie.crf.CRFClassifier
|
||||
import edu.stanford.nlp.ling.CoreAnnotations
|
||||
@ -18,13 +20,10 @@ import edu.stanford.nlp.pipeline.Annotation
|
||||
import edu.stanford.nlp.pipeline.CoreDocument
|
||||
import edu.stanford.nlp.pipeline.StanfordCoreNLP
|
||||
import edu.stanford.nlp.tagger.maxent.MaxentTagger
|
||||
import edu.stanford.nlp.trees.GrammaticalStructureFactory
|
||||
import edu.stanford.nlp.trees.Tree
|
||||
import edu.stanford.nlp.trees.*
|
||||
import edu.stanford.nlp.util.CoreMap
|
||||
import kotlinx.coroutines.Dispatchers
|
||||
import kotlinx.coroutines.launch
|
||||
import kotlinx.coroutines.runBlocking
|
||||
import kotlinx.coroutines.yield
|
||||
import kotlinx.coroutines.*
|
||||
import org.ejml.simple.SimpleMatrix
|
||||
import java.util.*
|
||||
import java.util.concurrent.TimeUnit
|
||||
import java.util.regex.Pattern
|
||||
@ -57,6 +56,36 @@ public class Datahandler {
|
||||
private var sentences1HashMap: HashMap<String, List<CoreMap>> = HashMap()
|
||||
private var sentencesSentimentHashMap: HashMap<String, List<CoreMap>> = HashMap()
|
||||
private var trees1HashMap: HashMap<String, java.util.ArrayList<Tree>> = HashMap()
|
||||
private var grammaticalStructureHashMap: HashMap<String, java.util.ArrayList<GrammaticalStructure>> =
|
||||
HashMap()
|
||||
private var typedDependenciesHashMap: HashMap<String, java.util.ArrayList<TypedDependency>> =
|
||||
HashMap()
|
||||
private var rnnCoreAnnotationsPredictedHashMap: HashMap<String, java.util.ArrayList<Int>> = HashMap()
|
||||
private var simpleMatricesHashMap: HashMap<String, java.util.ArrayList<SimpleMatrix>> = HashMap()
|
||||
private var simpleMatricesNodevectorsHashMap: HashMap<String, java.util.ArrayList<SimpleMatrix>> = HashMap()
|
||||
private var listHashMap: HashMap<String, MutableList<Any?>> = HashMap()
|
||||
private var longestHashMap: HashMap<String, Int> = HashMap()
|
||||
private var sentimentHashMap: HashMap<String, Int> = HashMap()
|
||||
private var imwesHashMap: HashMap<String, List<IMWE<IToken>>> = HashMap()
|
||||
private var InflectedCounterNegativeHashMap: HashMap<String, Int> = HashMap()
|
||||
private var InflectedCounterPositiveHashMap: HashMap<String, Int> = HashMap()
|
||||
private var tokenEntryHashMap: HashMap<String, ArrayList<String>> = HashMap()
|
||||
private var MarkedContinuousCounterHashMap: HashMap<String, Int> = HashMap()
|
||||
private var UnmarkedPatternCounterHashMap: HashMap<String, Int> = HashMap()
|
||||
private var strTokensIpartFormHashMap: HashMap<String, java.util.ArrayList<String>> = HashMap()
|
||||
private var tokenFormsHashMap: HashMap<String, java.util.ArrayList<String>> = HashMap()
|
||||
private var strTokenEntryGetPOSHashMap: HashMap<String, java.util.ArrayList<String>> = HashMap()
|
||||
private var intTokenEntyCountsHashMap: HashMap<String, java.util.ArrayList<Int>> = HashMap()
|
||||
private var ITokenTagsHashMap: HashMap<String, java.util.ArrayList<String>> = HashMap()
|
||||
private var strTokenStemsHashMap: HashMap<String, java.util.ArrayList<String>> = HashMap()
|
||||
private var AnotatorcounterHashMap: HashMap<String, Int> = HashMap()
|
||||
private var TokensCounterHashMap: HashMap<String, Int> = HashMap()
|
||||
private var entityTokenTagsHashMap: HashMap<String, java.util.ArrayList<String>> = HashMap()
|
||||
private var nerEntitiesHashMap: HashMap<String, java.util.ArrayList<String>> = HashMap()
|
||||
private var nerEntitiesTypeHashMap: HashMap<String, java.util.ArrayList<String>> = HashMap()
|
||||
private var stopWordTokenHashMap: HashMap<String, java.util.ArrayList<String>> = HashMap()
|
||||
private var stopWordLemmaHashMap: HashMap<String, java.util.ArrayList<String>> = HashMap()
|
||||
private var PairCounterHashMap: HashMap<String, Int> = HashMap()
|
||||
|
||||
constructor() {
|
||||
stopwatch = Stopwatch.createUnstarted()
|
||||
@ -131,7 +160,9 @@ public class Datahandler {
|
||||
}
|
||||
stringCache.sortWith(Comparator.comparingInt(String::length).reversed());
|
||||
System.out.println("pre InsertMYSQLStrings")
|
||||
DataMapper.InsertMYSQLStrings(stringCache)
|
||||
val arrayList = java.util.ArrayList<String>(stringCache)
|
||||
DataMapper.InsertMYSQLStrings(arrayList)
|
||||
DataMapper.checkStringsToDelete();
|
||||
}
|
||||
}
|
||||
|
||||
@ -191,8 +222,34 @@ public class Datahandler {
|
||||
var sentencesSentimentF: List<CoreMap>? = null
|
||||
var coreMaps1: List<CoreMap> = jmweAnnotationF.get(CoreAnnotations.SentencesAnnotation::class.java)
|
||||
var treesF: java.util.ArrayList<Tree>? = null
|
||||
|
||||
|
||||
var grammaticalStructuresF: ArrayList<GrammaticalStructure>? = null
|
||||
var typedDependenciesF: java.util.ArrayList<TypedDependency>? = null
|
||||
var rnnCoreAnnotationsPredictedF: java.util.ArrayList<Int>? = null
|
||||
var simpleMatricesF: java.util.ArrayList<SimpleMatrix>? = null
|
||||
var simpleMatricesNodevectorsF: java.util.ArrayList<SimpleMatrix>? = null
|
||||
var listF: MutableList<Any?>? = null
|
||||
var longestF: Int? = null
|
||||
var sentimentLongestF: Int? = null
|
||||
var imwesF: List<IMWE<IToken>>? = null
|
||||
var InflectedCounterNegativeF: Int? = null
|
||||
var InflectedCounterPositiveF: Int? = null
|
||||
var tokenEntryF: ArrayList<String>? = null
|
||||
var MarkedContinuousCounterF: Int? = null
|
||||
var UnmarkedPatternCounterF: Int? = null
|
||||
var strTokensIpartFormF: ArrayList<String>? = null
|
||||
var tokenFormsF: java.util.ArrayList<String>? = null
|
||||
var strTokenEntryGetPOSF: ArrayList<String>? = null
|
||||
var intTokenEntyCountsF: java.util.ArrayList<Int>? = null
|
||||
var ITokenTagsF: ArrayList<String>? = null
|
||||
var strTokenStemsF: java.util.ArrayList<String>? = null
|
||||
var AnotatorcounterF: Int? = null
|
||||
var TokensCounterF: Int? = null
|
||||
var entityTokenTagsF: java.util.ArrayList<String>? = null
|
||||
var nerEntitiesF: java.util.ArrayList<String>? = null
|
||||
var nerEntitiesTypeF: java.util.ArrayList<String>? = null
|
||||
var stopWordTokenF: java.util.ArrayList<String>? = null
|
||||
var stopWordLemmaF: java.util.ArrayList<String>? = null
|
||||
var PairCounterF: Int? = null
|
||||
for (str1 in values_copy) {
|
||||
if (strF != str1) {
|
||||
val annotation2 = pipelineSentimentAnnotationCache.getOrDefault(str1, null)
|
||||
@ -219,63 +276,263 @@ public class Datahandler {
|
||||
val sentenceSentiment1: List<CoreMap>? = sentencesSentimentHashMap.getOrDefault(str1, null)
|
||||
val trees1 = trees1HashMap.getOrDefault(str1, null)
|
||||
var coreMaps2: List<CoreMap> = listOf()
|
||||
val grammaticalStructures1 = grammaticalStructureHashMap.getOrDefault(
|
||||
str1, null)
|
||||
if (jmweAnnotation != null) {
|
||||
coreMaps2 = jmweAnnotation.get(CoreAnnotations.SentencesAnnotation::class.java)
|
||||
}
|
||||
val typedDependencies1 = typedDependenciesHashMap.getOrDefault(str1, null)
|
||||
val rnnCoreAnnotationsPredicted1 = rnnCoreAnnotationsPredictedHashMap.getOrDefault(str1, null)
|
||||
val simpleMatrices1 = simpleMatricesHashMap.getOrDefault(str1, null);
|
||||
val simpleMatricesNodevectors1 = simpleMatricesNodevectorsHashMap.getOrDefault(str1, null);
|
||||
val list1 = listHashMap.getOrDefault(str1, null);
|
||||
val longest1 = longestHashMap.getOrDefault(str1, null);
|
||||
val sentimentLongest1 = sentimentHashMap.getOrDefault(str1, null);
|
||||
val imwes1 = imwesHashMap.getOrDefault(str1, null);
|
||||
val InflectedCounterNegative1 = InflectedCounterNegativeHashMap.getOrDefault(str1, null);
|
||||
val InflectedCounterPositive1 = InflectedCounterPositiveHashMap.getOrDefault(str1, null)
|
||||
val tokenEntry1 = tokenEntryHashMap.getOrDefault(str1, null)
|
||||
val MarkedContinuousCounter1 = MarkedContinuousCounterHashMap.getOrDefault(str1, null)
|
||||
val UnmarkedPatternCounter1 = UnmarkedPatternCounterHashMap.getOrDefault(str1, null)
|
||||
val strTokensIpartForm1 = strTokensIpartFormHashMap.getOrDefault(str1, null);
|
||||
val tokenForms1 = tokenFormsHashMap.getOrDefault(str1, null);
|
||||
val strTokenEntryGetPOS1 = strTokenEntryGetPOSHashMap.getOrDefault(str1, null)
|
||||
val intTokenEntyCounts1 = intTokenEntyCountsHashMap.getOrDefault(str1, null);
|
||||
val ITokenTags1 = ITokenTagsHashMap.getOrDefault(str1, null);
|
||||
val strTokenStems1 = strTokenStemsHashMap.getOrDefault(str1, null);
|
||||
val Anotatorcounter1 = AnotatorcounterHashMap.getOrDefault(str1, null);
|
||||
val TokensCounter1 = TokensCounterHashMap.getOrDefault(str1, null);
|
||||
val entityTokenTags1 = entityTokenTagsHashMap.getOrDefault(str1, null);
|
||||
val nerEntities1 = nerEntitiesHashMap.getOrDefault(str1, null);
|
||||
val nerEntitiesType1 = nerEntitiesTypeHashMap.getOrDefault(str1, null);
|
||||
val stopWordToken1 = stopWordTokenHashMap.getOrDefault(str1, null);
|
||||
val stopWordLemma1 = stopWordLemmaHashMap.getOrDefault(str1, null);
|
||||
val PairCounter1 = PairCounterHashMap.getOrDefault(str1, null);
|
||||
|
||||
var SMX = SentimentAnalyzerTest(strF, str1, SimilarityMatrix(strF, str1),
|
||||
coreMaps1, coreMaps2, strAnno,
|
||||
pipelineAnnotationCache[str1], strAnnoSentiment,
|
||||
pipelineSentimentAnnotationCache[str1], coreDocument, coreDocumentAnnotationCache[str1],
|
||||
tagger, gsf, classifier, tokenizeCounting, tokenizeCountingF,
|
||||
taggedWordListF, taggedWordList1, retrieveTGWListF, retrieveTGWList1,
|
||||
sentencesF, sentence1, sentencesSentimentF, sentenceSentiment1, treesF, trees1)
|
||||
sentencesF, sentence1, sentencesSentimentF, sentenceSentiment1, treesF, trees1,
|
||||
grammaticalStructuresF, grammaticalStructures1, typedDependenciesF,
|
||||
typedDependencies1, rnnCoreAnnotationsPredictedF, rnnCoreAnnotationsPredicted1,
|
||||
simpleMatricesF, simpleMatrices1, simpleMatricesNodevectorsF, simpleMatricesNodevectors1,
|
||||
listF, list1, longestF, longest1, sentimentLongestF, sentimentLongest1, imwesF,
|
||||
imwes1, InflectedCounterNegativeF, InflectedCounterNegative1, InflectedCounterPositiveF,
|
||||
InflectedCounterPositive1, tokenEntryF, tokenEntry1, MarkedContinuousCounterF,
|
||||
MarkedContinuousCounter1, UnmarkedPatternCounterF, UnmarkedPatternCounter1,
|
||||
strTokensIpartFormF, strTokensIpartForm1, tokenFormsF, tokenForms1,
|
||||
strTokenEntryGetPOSF, strTokenEntryGetPOS1, intTokenEntyCountsF,
|
||||
intTokenEntyCounts1, ITokenTagsF, ITokenTags1, strTokenStemsF, strTokenStems1,
|
||||
AnotatorcounterF, Anotatorcounter1, TokensCounterF, TokensCounter1,
|
||||
entityTokenTagsF, entityTokenTags1, nerEntitiesF, nerEntities1, nerEntitiesTypeF,
|
||||
nerEntitiesType1, stopWordTokenF, stopWordToken1, stopWordLemmaF, stopWordLemma1,
|
||||
PairCounterF, PairCounter1)
|
||||
if (tokenizeCounting == null) {
|
||||
val tokenizeCounting1 = SMX.getTokenizeCounting();
|
||||
tokenizeCountingHashMap.put(str1, tokenizeCounting1)
|
||||
tokenizeCountingHashMap.put(str1, SMX.getTokenizeCounting())
|
||||
}
|
||||
if (taggedWordList1 == null) {
|
||||
val taggedWordList1Local = SMX.getTaggedWordList1();
|
||||
taggedWordListHashMap.put(str1, taggedWordList1Local)
|
||||
taggedWordListHashMap.put(str1, SMX.getTaggedWordList1())
|
||||
}
|
||||
if (tokenizeCountingF == null) {
|
||||
val tokenizeCountingF1 = SMX.getTokenizeCountingF();
|
||||
tokenizeCountingF = tokenizeCountingF1;
|
||||
tokenizeCountingF = SMX.getTokenizeCountingF();
|
||||
}
|
||||
if (taggedWordListF == null) {
|
||||
val taggedWordListF1 = SMX.getTaggedWordListF();
|
||||
taggedWordListF = taggedWordListF1;
|
||||
taggedWordListF = SMX.getTaggedWordListF();
|
||||
}
|
||||
if (retrieveTGWListF == null) {
|
||||
val retrieveTGWListF1 = SMX.getRetrieveTGWListF();
|
||||
retrieveTGWListF = retrieveTGWListF1;
|
||||
retrieveTGWListF = SMX.getRetrieveTGWListF();
|
||||
}
|
||||
if (retrieveTGWList1 == null) {
|
||||
val retrieveTGWList11 = SMX.getRetrieveTGWList1();
|
||||
retrieveTGWListHashMap.put(str1, retrieveTGWList11);
|
||||
retrieveTGWListHashMap.put(str1, SMX.getRetrieveTGWList1());
|
||||
}
|
||||
if (sentencesF == null) {
|
||||
val sentencesF1 = SMX.getSentencesF();
|
||||
sentencesF = sentencesF1;
|
||||
sentencesF = SMX.getSentencesF();
|
||||
}
|
||||
if (sentence1 == null) {
|
||||
val sentences1 = SMX.getSentences1();
|
||||
sentences1HashMap.put(str1, sentences1)
|
||||
sentences1HashMap.put(str1, SMX.getSentences1())
|
||||
}
|
||||
if (sentencesSentimentF == null) {
|
||||
val sentencesSentimentF1 = SMX.getSentencesSentimentF();
|
||||
sentencesSentimentF = sentencesSentimentF1;
|
||||
sentencesSentimentF = SMX.getSentencesSentimentF();
|
||||
}
|
||||
if (sentenceSentiment1 == null) {
|
||||
val sentencesSentiment1 = SMX.getSentencesSentiment1();
|
||||
sentencesSentimentHashMap.put(str1, sentencesSentiment1);
|
||||
sentencesSentimentHashMap.put(str1, SMX.getSentencesSentiment1());
|
||||
}
|
||||
if (treesF == null) {
|
||||
val treesF1 = SMX.getTreesF();
|
||||
treesF = treesF1;
|
||||
treesF = SMX.getTreesF();
|
||||
}
|
||||
if (trees1 == null) {
|
||||
val trees11 = SMX.getTrees1();
|
||||
trees1HashMap.put(str1, trees11)
|
||||
trees1HashMap.put(str1, SMX.getTrees1())
|
||||
}
|
||||
if (grammaticalStructuresF == null) {
|
||||
grammaticalStructuresF = SMX.getGrammaticalStructuresF();
|
||||
}
|
||||
if (grammaticalStructures1 == null) {
|
||||
grammaticalStructureHashMap.put(str1, SMX.getGrammaticalStructures1())
|
||||
}
|
||||
if (typedDependenciesF == null) {
|
||||
typedDependenciesF = SMX.getTypedDependenciesF();
|
||||
}
|
||||
if (typedDependencies1 == null) {
|
||||
typedDependenciesHashMap.put(str1, SMX.getTypedDependencies1())
|
||||
}
|
||||
if (rnnCoreAnnotationsPredictedF == null) {
|
||||
rnnCoreAnnotationsPredictedF = SMX.getRnnCoreAnnotationsPredictedF()
|
||||
}
|
||||
if (rnnCoreAnnotationsPredicted1 == null) {
|
||||
rnnCoreAnnotationsPredictedHashMap.put(str1, SMX.getRnnCoreAnnotationsPredicted1())
|
||||
}
|
||||
if (simpleMatricesF == null) {
|
||||
simpleMatricesF = SMX.getSimpleMatricesF();
|
||||
}
|
||||
if (simpleMatrices1 == null) {
|
||||
simpleMatricesHashMap.put(str1, SMX.getSimpleMatrices1());
|
||||
}
|
||||
if (simpleMatricesNodevectorsF == null) {
|
||||
simpleMatricesNodevectorsF = SMX.getSimpleMatricesNodevectorsF();
|
||||
}
|
||||
if (simpleMatricesNodevectors1 == null) {
|
||||
simpleMatricesNodevectorsHashMap.put(str1, SMX.getSimpleMatricesNodevectors1());
|
||||
}
|
||||
if (listF == null) {
|
||||
listF = SMX.getListF();
|
||||
}
|
||||
if (list1 == null) {
|
||||
listHashMap.put(str1, SMX.getList1());
|
||||
}
|
||||
if (longestF == null) {
|
||||
longestF = SMX.getLongestF();
|
||||
}
|
||||
if (longest1 == null) {
|
||||
longestHashMap.put(str1, SMX.getLongest1());
|
||||
}
|
||||
if (sentimentLongestF == null) {
|
||||
sentimentLongestF = SMX.getSentimentLongestF();
|
||||
}
|
||||
if (sentimentLongest1 == null) {
|
||||
sentimentHashMap.put(str1, SMX.getSentimentLongest1());
|
||||
}
|
||||
if (imwesF == null) {
|
||||
imwesF = SMX.getImwesF();
|
||||
}
|
||||
if (imwes1 == null) {
|
||||
imwesHashMap.put(str1, SMX.getImwes1());
|
||||
}
|
||||
if (InflectedCounterNegativeF == null) {
|
||||
InflectedCounterNegativeF = SMX.getInflectedCounterNegativeF();
|
||||
}
|
||||
if (InflectedCounterNegative1 == null) {
|
||||
InflectedCounterNegativeHashMap.put(str1, SMX.getInflectedCounterNegative1());
|
||||
}
|
||||
if (InflectedCounterPositiveF == null) {
|
||||
InflectedCounterPositiveF = SMX.getInflectedCounterPositiveF();
|
||||
}
|
||||
if (InflectedCounterPositive1 == null) {
|
||||
InflectedCounterPositiveHashMap.put(str1, SMX.getInflectedCounterPositive1());
|
||||
}
|
||||
if (tokenEntryF == null) {
|
||||
tokenEntryF = SMX.getTokenEntryF();
|
||||
}
|
||||
if (tokenEntry1 == null) {
|
||||
tokenEntryHashMap.put(str1, SMX.getTokenEntry1())
|
||||
}
|
||||
if (MarkedContinuousCounterF == null) {
|
||||
MarkedContinuousCounterF = SMX.getMarkedContinuousCounterF();
|
||||
}
|
||||
if (MarkedContinuousCounter1 == null) {
|
||||
MarkedContinuousCounterHashMap.put(str1, SMX.getMarkedContinuousCounter1());
|
||||
}
|
||||
if (UnmarkedPatternCounterF == null) {
|
||||
UnmarkedPatternCounterF = SMX.getUnmarkedPatternCounterF();
|
||||
}
|
||||
if (UnmarkedPatternCounter1 == null) {
|
||||
UnmarkedPatternCounterHashMap.put(str1, SMX.getUnmarkedPatternCounter1());
|
||||
}
|
||||
if (strTokensIpartFormF == null) {
|
||||
strTokensIpartFormF = SMX.getStrTokensIpartFormF();
|
||||
}
|
||||
if (strTokensIpartForm1 == null) {
|
||||
strTokensIpartFormHashMap.put(str1, SMX.getStrTokensIpartForm1());
|
||||
}
|
||||
if (tokenFormsF == null) {
|
||||
tokenFormsF = SMX.getTokenFormsF();
|
||||
}
|
||||
if (tokenForms1 == null) {
|
||||
tokenFormsHashMap.put(str1, SMX.getTokenForms1());
|
||||
}
|
||||
if (strTokenEntryGetPOSF == null) {
|
||||
strTokenEntryGetPOSF = SMX.getStrTokenEntryGetPOSF();
|
||||
}
|
||||
if (strTokenEntryGetPOS1 == null) {
|
||||
strTokenEntryGetPOSHashMap.put(str1, SMX.getStrTokenEntryGetPOS1())
|
||||
}
|
||||
if (intTokenEntyCountsF == null) {
|
||||
intTokenEntyCountsF = SMX.getIntTokenEntyCountsF();
|
||||
}
|
||||
if (intTokenEntyCounts1 == null) {
|
||||
intTokenEntyCountsHashMap.put(str1, SMX.getIntTokenEntyCounts1());
|
||||
}
|
||||
if (ITokenTagsF == null) {
|
||||
ITokenTagsF = SMX.getITokenTagsF();
|
||||
}
|
||||
if (ITokenTags1 == null) {
|
||||
ITokenTagsHashMap.put(str1, SMX.getITokenTags1());
|
||||
}
|
||||
if (strTokenStemsF == null) {
|
||||
strTokenStemsF = SMX.getStrTokenStemsF();
|
||||
}
|
||||
if (strTokenStems1 == null) {
|
||||
strTokenStemsHashMap.put(str1, SMX.getStrTokenStems1());
|
||||
}
|
||||
if (AnotatorcounterF == null) {
|
||||
AnotatorcounterF = SMX.getAnotatorcounterF();
|
||||
}
|
||||
if (Anotatorcounter1 == null) {
|
||||
AnotatorcounterHashMap.put(str1, SMX.getAnotatorcounter1());
|
||||
}
|
||||
if (TokensCounterF == null) {
|
||||
TokensCounterF = SMX.getTokensCounterF();
|
||||
}
|
||||
if (TokensCounter1 == null) {
|
||||
TokensCounterHashMap.put(str1, SMX.getTokensCounter1());
|
||||
}
|
||||
if (entityTokenTagsF == null) {
|
||||
entityTokenTagsF = SMX.getEntityTokenTagsF();
|
||||
}
|
||||
if (entityTokenTags1 == null) {
|
||||
entityTokenTagsHashMap.put(str1, SMX.getEntityTokenTags1());
|
||||
}
|
||||
if (nerEntitiesF == null) {
|
||||
nerEntitiesF = SMX.getNerEntitiesF();
|
||||
}
|
||||
if (nerEntities1 == null) {
|
||||
nerEntitiesHashMap.put(str1, SMX.getNerEntities1());
|
||||
}
|
||||
if (nerEntitiesTypeF == null) {
|
||||
nerEntitiesTypeF = SMX.getNerEntitiesTypeF();
|
||||
}
|
||||
if (nerEntitiesType1 == null) {
|
||||
nerEntitiesTypeHashMap.put(str1, SMX.getNerEntitiesType1());
|
||||
}
|
||||
if (stopWordTokenF == null) {
|
||||
stopWordTokenF = SMX.getStopWordTokenF();
|
||||
}
|
||||
if (stopWordToken1 == null) {
|
||||
stopWordTokenHashMap.put(str1, SMX.getStopWordToken1());
|
||||
}
|
||||
if (stopWordLemmaF == null) {
|
||||
stopWordLemmaF = SMX.getStopWordLemmaF();
|
||||
}
|
||||
if (stopWordLemma1 == null) {
|
||||
stopWordLemmaHashMap.put(str1, SMX.getStopWordLemma1());
|
||||
}
|
||||
if (PairCounterF == null) {
|
||||
PairCounterF = SMX.getPairCounterF();
|
||||
}
|
||||
if (PairCounter1 == null) {
|
||||
PairCounterHashMap.put(str1, SMX.getPairCounter1());
|
||||
}
|
||||
|
||||
var getSMX: SimilarityMatrix = SMX.callSMX()
|
||||
@ -292,17 +549,25 @@ public class Datahandler {
|
||||
}
|
||||
val randomLenghtPermit = strF.length * (Math.random() * Math.random() * Math.random() * (Math.random() * 10))
|
||||
Collections.reverse(concurrentRelations)
|
||||
val mysqlUpdateLastUsed: ArrayList<String> = ArrayList()
|
||||
if (!concurrentRelations.isEmpty()) {
|
||||
for (secondaryRelation in concurrentRelations) {
|
||||
if (SB.toString().length > randomLenghtPermit && !SB.toString().isEmpty()) {
|
||||
break
|
||||
}
|
||||
SB.append(secondaryRelation).append(" ")
|
||||
mysqlUpdateLastUsed.add(secondaryRelation)
|
||||
}
|
||||
}
|
||||
if (SB.toString().isEmpty()) {
|
||||
return "failure, preventing stuckness"
|
||||
}
|
||||
runBlocking {
|
||||
CoroutineScope(launch(Dispatchers.IO) {
|
||||
DataMapper.updateLastUsed(mysqlUpdateLastUsed);
|
||||
yield()
|
||||
})
|
||||
}
|
||||
return SB.toString()
|
||||
}
|
||||
|
||||
|
File diff suppressed because it is too large
Load Diff
@ -1,17 +1,3 @@
|
||||
/*
|
||||
* To change this license header, choose License Headers in Project Properties.
|
||||
* To change this template file, choose Tools | Templates
|
||||
* and open the template in the editor.
|
||||
|
||||
ps ax | grep EventNotfierDiscordBot-1.0
|
||||
kill $pid (number)
|
||||
|
||||
nohup screen -d -m -S nonroot java -Xmx6048M -jar /home/javatests/ArtificialAutism-1.0.jar
|
||||
nohup screen -d -m -S nonroot java -Xmx6800M -jar /home/javatests/ArtificialAutism-1.0.jar
|
||||
|
||||
screen -ls (number1)
|
||||
screen -X -S (number1) quit
|
||||
*/
|
||||
package PresentationLayer;
|
||||
|
||||
import DataLayer.settings;
|
||||
@ -117,5 +103,5 @@ public class DiscordHandler {
|
||||
FunctionLayer.DoStuff.doStuff(event, usernameBot, datahandler, stanfordCoreNLP, stanfordCoreNLPSentiment);
|
||||
});
|
||||
gateway.onDisconnect().block();
|
||||
}
|
||||
} //3.1.1 discord4j version
|
||||
}
|
||||
|
@ -1,11 +1,13 @@
|
||||
import discord
|
||||
from discord.ext import commands
|
||||
from discord.ext.tasks import loop
|
||||
from discord import HTTPException
|
||||
from settings import token
|
||||
import time
|
||||
import datetime
|
||||
|
||||
client = discord.Client()
|
||||
ignore_list = []
|
||||
ignore_list = [846756271910158378]
|
||||
#shit api. got switched like 1 year ago so like every single discusison about it is outdated.
|
||||
|
||||
def get_suggestion_type(msg):
|
||||
@ -35,7 +37,7 @@ async def on_message(message):
|
||||
if message.author.bot:
|
||||
return
|
||||
if message.author.id in ignore_list:
|
||||
msg = f'{message.author.name} Your suggestion was ignored because you made too many shitty suggestions already. Do not post in this channel anymore.'
|
||||
msg = f'{message.author.name} Your suggestion was ignored because you made too many shitty suggestions already. Do not post in this channel anymore because your retarded.'
|
||||
await message.channel.send()
|
||||
return
|
||||
if message.channel.name == 'suggestion-box': #suggestion-box
|
||||
@ -44,12 +46,13 @@ async def on_message(message):
|
||||
suggestion_type, message_content = get_suggestion_type(message_content)
|
||||
now = datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S")
|
||||
send_msg = f"""```DISCORD USER ID: {message.author.id}\nUSERNAME: {message.author}\nTYPE: {suggestion_type}\nTIME: {now}\nSTATUS: No admin interested yet (mark with an emote to prevent suggestion deletion within 48 hours)\nSUGGESTION: {message_content}```"""
|
||||
send_channel_msg = f"""{message.author} your suggestion was sent to admins as type: {suggestion_type}. Specify Suggestion types by appending your message with one of the following commands: !ze !zr !mg !forum !discord !meta"""
|
||||
send_channel_msg = f"""{message.author} your suggestion was sent to admins as type: {suggestion_type}. Specify Suggestion types by appending your message with one of the following commands: !ze !zr !mg !forum !discord !meta. \nSpamming the suggestion channel with useless crap will lead to ban."""
|
||||
await message.channel.send(send_channel_msg)
|
||||
await channel.send(send_msg)
|
||||
|
||||
@loop(seconds = 10)
|
||||
async def check_suggestions_to_delete():
|
||||
global client
|
||||
for channel in client.get_all_channels():
|
||||
if channel.name == 'suggestion-admin':
|
||||
msgs_history = channel.history()
|
||||
@ -79,7 +82,13 @@ async def check_suggestions_to_delete():
|
||||
if user.name not in user_reactions:
|
||||
user_reactions.append(user.name)
|
||||
final_msg = f'{first_part}STATUS: Admins interested in topic:{user_reactions}\n{suggestion}{last_part}'
|
||||
await msg.edit(content=final_msg)
|
||||
try:
|
||||
await msg.edit(content=final_msg)
|
||||
except HTTPException:
|
||||
time.sleep(300)
|
||||
client = discord.Client()
|
||||
check_suggestions_to_delete.start()
|
||||
client.run(token)
|
||||
|
||||
#the entire syntax that is visisble from python3 terminal is not very well displayed in the docs of discord.py and nobody has the current syntax in any suggestions anywhere on the internet. almost every single thing is out of date reee
|
||||
if __name__ == '__main__':
|
||||
|
Loading…
Reference in New Issue
Block a user