un-parallelising some functions, changing futures retriever, moving some thresholds and logic around
This commit is contained in:
parent
6d4ab15107
commit
ac56f386a3
2
.gitignore
vendored
Normal file
2
.gitignore
vendored
Normal file
@ -0,0 +1,2 @@
|
||||
|
||||
ArtificialAutism/src/main/java/DataLayer/settings.java
|
@ -10,6 +10,7 @@ import java.sql.SQLException;
|
||||
import java.util.logging.Level;
|
||||
import java.util.logging.Logger;
|
||||
import org.apache.commons.dbcp2.BasicDataSource;
|
||||
import DataLayer.settings;
|
||||
|
||||
/**
|
||||
*
|
||||
@ -20,9 +21,9 @@ public class DBCPDataSource {
|
||||
static {
|
||||
try {
|
||||
ds.setDriver(new com.mysql.cj.jdbc.Driver());
|
||||
ds.setUrl("jdbc:mysql://163.172.84.14:3306/ArtificialAutism?useLegacyDatetimeCode=false&serverTimezone=UTC");
|
||||
ds.setUsername("root");
|
||||
ds.setPassword("FJEF23423u5r9BFhBFEBFIB234h29");
|
||||
ds.setUrl(settings.url);
|
||||
ds.setUsername(settings.username);
|
||||
ds.setPassword(settings.password);
|
||||
ds.setMaxTotal(-1);
|
||||
ds.setMinIdle(5);
|
||||
ds.setMaxIdle(-1);
|
||||
|
@ -10,6 +10,7 @@ import java.sql.SQLException;
|
||||
import java.util.logging.Level;
|
||||
import java.util.logging.Logger;
|
||||
import org.apache.commons.dbcp2.BasicDataSource;
|
||||
import DataLayer.settings;
|
||||
|
||||
/**
|
||||
*
|
||||
@ -21,9 +22,9 @@ public class DBCPDataSourceHLstats {
|
||||
static {
|
||||
try {
|
||||
ds.setDriver(new com.mysql.cj.jdbc.Driver());
|
||||
ds.setUrl("jdbc:mysql://151.80.230.149:3306/unloze_stats?useLegacyDatetimeCode=false&serverTimezone=UTC");
|
||||
ds.setUsername("unloze_stats");
|
||||
ds.setPassword("R8J8E9Fzmcc7ZfDAGAk7");
|
||||
ds.setUrl(settings.hlURL);
|
||||
ds.setUsername(settings.hlusername);
|
||||
ds.setPassword(settings.hlpassword);
|
||||
ds.setMaxTotal(-1);
|
||||
ds.setMinIdle(5);
|
||||
ds.setMaxIdle(-1);
|
||||
|
@ -34,8 +34,7 @@ public class DataMapper {
|
||||
ResultSet l_rsSearch = null;
|
||||
try {
|
||||
l_cCon = DBCPDataSource.getConnection();
|
||||
String l_sSQL = "CREATE TABLE IF NOT EXISTS `ArtificialAutism`.`Sentences` (`Strings` VARCHAR(256) NOT NULL, PRIMARY KEY (`Strings`))\n"
|
||||
+ "ENGINE = InnoDB;";
|
||||
String l_sSQL = "CREATE TABLE IF NOT EXISTS `ArtificialAutism`.`Sentences` (`Strings` text NOT NULL)";
|
||||
l_pStatement = l_cCon.prepareStatement(l_sSQL);
|
||||
l_pStatement.execute();
|
||||
} catch (SQLException ex) {
|
||||
@ -53,9 +52,7 @@ public class DataMapper {
|
||||
try {
|
||||
l_cCon = DBCPDataSource.getConnection();
|
||||
String l_sSQL = "SELECT * FROM `Sentences`";
|
||||
l_pStatement = l_cCon.prepareStatement(l_sSQL, java.sql.ResultSet.TYPE_FORWARD_ONLY,
|
||||
java.sql.ResultSet.CONCUR_READ_ONLY);
|
||||
l_pStatement.setFetchSize(Integer.MIN_VALUE);
|
||||
l_pStatement = l_cCon.prepareStatement(l_sSQL);
|
||||
l_rsSearch = l_pStatement.executeQuery();
|
||||
int ij = 0;
|
||||
while (l_rsSearch.next()) {
|
||||
@ -77,9 +74,7 @@ public class DataMapper {
|
||||
String l_sSQL = "INSERT IGNORE `Sentences` (`Strings`) VALUES (?)";
|
||||
try {
|
||||
l_cCon = DBCPDataSource.getConnection();
|
||||
l_pStatement = l_cCon.prepareStatement(l_sSQL, java.sql.ResultSet.TYPE_FORWARD_ONLY,
|
||||
java.sql.ResultSet.CONCUR_READ_ONLY);
|
||||
l_pStatement.setFetchSize(Integer.MIN_VALUE);
|
||||
l_pStatement = l_cCon.prepareStatement(l_sSQL);
|
||||
for (String str1 : str.values()) {
|
||||
//System.out.println("adding str1: " + str1 + "\n");
|
||||
l_pStatement.setString(1, str1);
|
||||
@ -97,8 +92,7 @@ public class DataMapper {
|
||||
ConcurrentMap<Integer, String> hlStatsMessages = new MapMaker().concurrencyLevel(2).makeMap();
|
||||
try (Connection l_cCon = DBCPDataSourceHLstats.getConnection()) {
|
||||
String l_sSQL = "SELECT message FROM `hlstats_Events_Chat`";
|
||||
try (PreparedStatement l_pStatement = l_cCon.prepareStatement(l_sSQL, java.sql.ResultSet.TYPE_FORWARD_ONLY,
|
||||
java.sql.ResultSet.CONCUR_READ_ONLY)) {
|
||||
try (PreparedStatement l_pStatement = l_cCon.prepareStatement(l_sSQL)) {
|
||||
try (ResultSet l_rsSearch = l_pStatement.executeQuery()) {
|
||||
while (l_rsSearch.next()) {
|
||||
hlStatsMessages.put(hlStatsMessages.size() + 1, l_rsSearch.getString(1));
|
||||
|
@ -35,9 +35,11 @@ import java.util.Map.Entry;
|
||||
import java.util.Properties;
|
||||
import java.util.Set;
|
||||
import java.util.concurrent.Callable;
|
||||
import java.util.concurrent.CompletionService;
|
||||
import java.util.concurrent.ConcurrentMap;
|
||||
import java.util.concurrent.CountDownLatch;
|
||||
import java.util.concurrent.ExecutionException;
|
||||
import java.util.concurrent.ExecutorCompletionService;
|
||||
import java.util.concurrent.ExecutorService;
|
||||
import java.util.concurrent.Executors;
|
||||
import java.util.concurrent.ForkJoinPool;
|
||||
@ -71,7 +73,6 @@ public class Datahandler {
|
||||
private static ConcurrentMap<String, Annotation> pipelineSentimentAnnotationCache;
|
||||
private static ConcurrentMap<String, Annotation> jmweAnnotationCache;
|
||||
private static ConcurrentMap<String, CoreDocument> coreDocumentAnnotationCache;
|
||||
private static ConcurrentMap<String, Integer> conversationUserMatchMap;
|
||||
private static ConcurrentMap<String, SentimentValueCache> sentimentCachingMap = new MapMaker().concurrencyLevel(6).makeMap();
|
||||
private LinkedHashMap<String, LinkedHashMap<String, Double>> lHMSMX = new LinkedHashMap();
|
||||
private final Stopwatch stopwatch;
|
||||
@ -102,7 +103,6 @@ public class Datahandler {
|
||||
this.pipelineAnnotationCache = new MapMaker().concurrencyLevel(4).makeMap();
|
||||
this.pipelineSentimentAnnotationCache = new MapMaker().concurrencyLevel(4).makeMap();
|
||||
this.coreDocumentAnnotationCache = new MapMaker().concurrencyLevel(5).makeMap();
|
||||
this.conversationUserMatchMap = new MapMaker().concurrencyLevel(4).makeMap();
|
||||
}
|
||||
|
||||
public static StanfordCoreNLP getPipeline() {
|
||||
@ -160,7 +160,7 @@ public class Datahandler {
|
||||
try {
|
||||
cdl.await();
|
||||
} catch (InterruptedException ex) {
|
||||
System.out.println("cdl await interrupted: " + ex.getLocalizedMessage() + "\n");
|
||||
//System.out.println("cdl await interrupted: " + ex.getLocalizedMessage() + "\n");
|
||||
}
|
||||
System.out.println("finished shiftReduceParserInitiate\n");
|
||||
}
|
||||
@ -221,7 +221,7 @@ public class Datahandler {
|
||||
hlStatsMessages.put(str, hlStatsMessages.size());
|
||||
}
|
||||
}
|
||||
int capacity = 50;
|
||||
int capacity = 500;
|
||||
hlStatsMessages.keySet().forEach(str -> {
|
||||
if (!str.startsWith("!") && MessageResponseHandler.getStr().values().size() < capacity) {
|
||||
String orElse = strCacheLocal.values().parallelStream().filter(e -> e.equals(str)).findAny().orElse(null);
|
||||
@ -273,36 +273,38 @@ public class Datahandler {
|
||||
|
||||
private ConcurrentMap<Integer, String> futuresReturnOverallEvaluation(List<SimilarityMatrix> similarityMatrixes,
|
||||
ConcurrentMap<Integer, String> strmapreturn) {
|
||||
String newPrimary = similarityMatrixes.get(0).getPrimaryString();
|
||||
int evaluationCap = 50000;
|
||||
boolean hitCap = false;
|
||||
int iterator = 0;
|
||||
for (SimilarityMatrix SMX : similarityMatrixes) {
|
||||
if (!newPrimary.equals(SMX.getPrimaryString())) {
|
||||
newPrimary = SMX.getPrimaryString();
|
||||
strmapreturn = addSMXToMapReturn(strmapreturn, SMX);
|
||||
hitCap = false;
|
||||
}
|
||||
if (!hitCap) {
|
||||
final Double scoreRelationNewMsgToRecentMsg = SMX.getDistance();
|
||||
RelationCounter += scoreRelationNewMsgToRecentMsg > 0 ? scoreRelationNewMsgToRecentMsg * 2 : scoreRelationNewMsgToRecentMsg;
|
||||
if (RelationCounter > evaluationCap) {
|
||||
if (!similarityMatrixes.isEmpty()) {
|
||||
String newPrimary = similarityMatrixes.get(0).getPrimaryString();
|
||||
int evaluationCap = 50000;
|
||||
boolean hitCap = false;
|
||||
int iterator = 0;
|
||||
for (SimilarityMatrix SMX : similarityMatrixes) {
|
||||
if (!newPrimary.equals(SMX.getPrimaryString())) {
|
||||
newPrimary = SMX.getPrimaryString();
|
||||
strmapreturn = addSMXToMapReturn(strmapreturn, SMX);
|
||||
hitCap = true;
|
||||
} else if (RelationCounter < evaluationCap * -1) {
|
||||
addSMXToMapReturn(strmapreturn, SMX);
|
||||
hitCap = true;
|
||||
hitCap = false;
|
||||
}
|
||||
if (!hitCap) {
|
||||
final Double scoreRelationNewMsgToRecentMsg = SMX.getDistance();
|
||||
RelationCounter += scoreRelationNewMsgToRecentMsg > 500 ? scoreRelationNewMsgToRecentMsg * 2 : scoreRelationNewMsgToRecentMsg;
|
||||
if (RelationCounter > evaluationCap) {
|
||||
strmapreturn = addSMXToMapReturn(strmapreturn, SMX);
|
||||
hitCap = true;
|
||||
} else if (RelationCounter < evaluationCap * -1) {
|
||||
addSMXToMapReturn(strmapreturn, SMX);
|
||||
hitCap = true;
|
||||
}
|
||||
}
|
||||
//System.out.println("similarityMatrixes size: " + similarityMatrixes.size() + "\niterator: " + iterator);
|
||||
iterator++;
|
||||
}
|
||||
System.out.println("similarityMatrixes size: " + similarityMatrixes.size() + "\niterator: " + iterator);
|
||||
iterator++;
|
||||
}
|
||||
return strmapreturn;
|
||||
}
|
||||
|
||||
private ConcurrentMap<Integer, String> addSMXToMapReturn(ConcurrentMap<Integer, String> strmapreturn, SimilarityMatrix SMX) {
|
||||
//System.out.println("RelationCounter cap: " + RelationCounter);
|
||||
boolean related = RelationCounter > 0;
|
||||
boolean related = RelationCounter > 500;
|
||||
if (related) {
|
||||
strmapreturn.put(strmapreturn.size(), SMX.getPrimaryString());
|
||||
String transmittedStr = SMX.getSecondaryString();
|
||||
@ -322,29 +324,29 @@ public class Datahandler {
|
||||
private List<SimilarityMatrix> StrComparringNoSentenceRelationMap(
|
||||
ConcurrentMap<Integer, String> strCacheLocal, String str, ConcurrentMap<String, Annotation> localJMWEMap,
|
||||
ConcurrentMap<String, Annotation> localPipelineAnnotation, ConcurrentMap<String, Annotation> localPipelineSentimentAnnotation,
|
||||
ConcurrentMap<String, CoreDocument> localCoreDocumentMap) {
|
||||
ConcurrentMap<String, CoreDocument> localCoreDocumentMap, CompletionService<SimilarityMatrix> ecs, int index) {
|
||||
int prefix_size = 125;
|
||||
SentimentValueCache sentimentCacheStr = sentimentCachingMap.getOrDefault(str, null);
|
||||
List<SimilarityMatrix> smxReturnList = new ArrayList();
|
||||
List<String> randomIterationComparision = new ArrayList();
|
||||
int iteratecap = strCacheLocal.size() > 150 ? strCacheLocal.size() - 150 : strCacheLocal.size();
|
||||
int iteratecap = strCacheLocal.size() > prefix_size ? strCacheLocal.size() - prefix_size : strCacheLocal.size();
|
||||
int iterator = ThreadLocalRandom.current().nextInt(0, iteratecap);
|
||||
int iterated = 0;
|
||||
for (String str1 : strCacheLocal.values()) {
|
||||
if (iterated >= iterator && iterated < iterator + 150) {
|
||||
if (iterated >= iterator && iterated < iterator + prefix_size) {
|
||||
randomIterationComparision.add(str1);
|
||||
}
|
||||
if (iterated > iterator + 150) {
|
||||
if (iterated > iterator + prefix_size) {
|
||||
break;
|
||||
}
|
||||
iterated++;
|
||||
}
|
||||
List<Future<SimilarityMatrix>> futureSMX = new ArrayList(randomIterationComparision.size() + 1);
|
||||
for (String str1 : randomIterationComparision) {
|
||||
if (!str.equals(str1)) {
|
||||
SimilarityMatrix SMXInit = new SimilarityMatrix(str, str1);
|
||||
SentimentValueCache sentimentCacheStr1 = sentimentCachingMap.getOrDefault(str1, null);
|
||||
Callable<SimilarityMatrix> worker;
|
||||
if (stringCache.size() < 150) {
|
||||
if (stringCache.size() < prefix_size) {
|
||||
worker = new SentimentAnalyzerTest(str, str1, SMXInit,
|
||||
localJMWEMap.get(str), localJMWEMap.get(str1), localPipelineAnnotation.get(str),
|
||||
localPipelineAnnotation.get(str1), localPipelineSentimentAnnotation.get(str),
|
||||
@ -355,21 +357,20 @@ public class Datahandler {
|
||||
pipelineAnnotationCache.get(str1), localPipelineSentimentAnnotation.get(str),
|
||||
pipelineSentimentAnnotationCache.get(str1), localCoreDocumentMap.get(str), coreDocumentAnnotationCache.get(str1), sentimentCacheStr, sentimentCacheStr1);
|
||||
}
|
||||
ExecutorService smxService = Executors.newSingleThreadExecutor();
|
||||
Future<SimilarityMatrix> future = smxService.submit(worker);
|
||||
futureSMX.add(future);
|
||||
ecs.submit(worker);
|
||||
index++;
|
||||
}
|
||||
}
|
||||
for (Future<SimilarityMatrix> future : futureSMX) {
|
||||
for (int i = 0; i < index; i++) {
|
||||
try {
|
||||
SimilarityMatrix SMX = future.get();
|
||||
if (SMX != null) {
|
||||
smxReturnList.add(SMX);
|
||||
}
|
||||
Future<SimilarityMatrix> take = ecs.take();
|
||||
SimilarityMatrix smx = take.get();
|
||||
smxReturnList.add(smx);
|
||||
} catch (InterruptedException | ExecutionException ex) {
|
||||
//System.out.println("failed future get");
|
||||
//
|
||||
}
|
||||
}
|
||||
index = 0;
|
||||
return smxReturnList;
|
||||
}
|
||||
|
||||
@ -379,17 +380,17 @@ public class Datahandler {
|
||||
ConcurrentMap<String, CoreDocument> localCoreDocumentMap) {
|
||||
ConcurrentMap<Integer, String> strmapreturn = new MapMaker().concurrencyLevel(6).makeMap();
|
||||
List<SimilarityMatrix> strSenseRelationMap = new ArrayList();
|
||||
int iterator = 0;
|
||||
for (String str : strmap.values()) {
|
||||
ExecutorService threadPool = Executors.newCachedThreadPool();
|
||||
CompletionService<SimilarityMatrix> ecs = new ExecutorCompletionService<>(threadPool);
|
||||
int index = 0;
|
||||
strmap.values().forEach(str -> {
|
||||
List<SimilarityMatrix> localNoSentenceRelationList = StrComparringNoSentenceRelationMap(strCacheLocal, str,
|
||||
localJMWEMap, localPipelineAnnotation, localPipelineSentimentAnnotation, localCoreDocumentMap);
|
||||
localJMWEMap, localPipelineAnnotation, localPipelineSentimentAnnotation, localCoreDocumentMap, ecs, index);
|
||||
for (SimilarityMatrix SMX : localNoSentenceRelationList) {
|
||||
strSenseRelationMap.add(SMX);
|
||||
System.out.println("added SMX: " + SMX.getPrimaryString() + "\n" + SMX.getSecondaryString() + "\nstrSenseRelationMap.size(): "
|
||||
+ strSenseRelationMap.size() + "\nstrmap size: " + strmap.size() + "\niterator: " + iterator + "\n\n");
|
||||
}
|
||||
iterator++;
|
||||
}
|
||||
});
|
||||
threadPool.shutdown();
|
||||
Collections.sort(strSenseRelationMap, (e1, e2) -> e1.getPrimaryString().compareTo(e2.getPrimaryString()));
|
||||
strmapreturn = futuresReturnOverallEvaluation(strSenseRelationMap, strmapreturn);
|
||||
return strmapreturn;
|
||||
@ -410,10 +411,10 @@ public class Datahandler {
|
||||
str = cutContent(str, hlStatsMsg);
|
||||
str = filterContent(str);
|
||||
str = removeSlacks(str);
|
||||
System.out.println("finished removeSlacks \n" + str.size() + "\n");
|
||||
//System.out.println("finished removeSlacks \n" + str.size() + "\n");
|
||||
str = removeNonSensicalStrings(str);
|
||||
str = annotationCacheUpdate(str);
|
||||
System.out.println("annotationCacheUpdate str size POST: " + str.size() + "\n");
|
||||
//System.out.println("annotationCacheUpdate str size POST: " + str.size() + "\n");
|
||||
ConcurrentMap<Integer, String> strf = str;
|
||||
if (!stringCache.isEmpty()) {
|
||||
new Thread(() -> {
|
||||
@ -423,7 +424,7 @@ public class Datahandler {
|
||||
Logger.getLogger(Datahandler.class
|
||||
.getName()).log(Level.SEVERE, null, ex);
|
||||
}
|
||||
MessageResponseHandler.setStr(new MapMaker().concurrencyLevel(2).makeMap());
|
||||
MessageResponseHandler.setStr(new MapMaker().concurrencyLevel(6).makeMap());
|
||||
}).start();
|
||||
} else {
|
||||
try {
|
||||
@ -450,60 +451,57 @@ public class Datahandler {
|
||||
return str;
|
||||
}
|
||||
|
||||
private String getResponseFutures(String strF, ConcurrentMap<String, Integer> mostRecentMsgMap) {
|
||||
ConcurrentMap<Integer, String> strCache = stringCache;
|
||||
double preRelationUserCounters = -6500.0;
|
||||
private String getResponseFutures(String strF) {
|
||||
List<String> values_copy = new ArrayList<String>(stringCache.values());
|
||||
int maxsize = values_copy.size() > 500 ? 500 : values_copy.size();
|
||||
Collections.shuffle(values_copy);
|
||||
List<String> strCache = values_copy.subList(0, maxsize);
|
||||
double preRelationUserCounters = -150000.0;
|
||||
//WHY THE FUCK CANT YOU JUST TRANSFER A SimilarityMatrix OBJECT LIST LIKE ANY OTHER NORMAL COLLECTION, WHY DOES IT HAVE TO BE A FUCKING STRING LIST
|
||||
List<String> concurrentRelations = new ArrayList();
|
||||
for (String str1 : strCache.values()) {
|
||||
List<Callable<SimilarityMatrix>> call_able_list = new ArrayList();
|
||||
for (String str1 : strCache) {
|
||||
if (!strF.equals(str1)) {
|
||||
SentimentValueCache sentimentCacheStr1 = sentimentCachingMap.getOrDefault(str1, null);
|
||||
Callable<SimilarityMatrix> worker = new SentimentAnalyzerTest(strF, str1, new SimilarityMatrix(strF, str1),
|
||||
strAnnoJMWE, jmweAnnotationCache.get(str1), strAnno,
|
||||
pipelineAnnotationCache.get(str1), strAnnoSentiment,
|
||||
pipelineSentimentAnnotationCache.get(str1), coreDoc, coreDocumentAnnotationCache.get(str1), null, sentimentCacheStr1);
|
||||
try {
|
||||
SimilarityMatrix getSMX = worker.call();
|
||||
if (getSMX != null) {
|
||||
Integer repeatedSentences = mostRecentMsgMap.getOrDefault(getSMX.getSecondaryString(), null);
|
||||
Double scoreRelationLastUserMsg = getSMX.getDistance();
|
||||
/*
|
||||
double oldRelation = overAllOldScoreRelations(getSMX.getSecondaryString());
|
||||
if (oldRelation < 0 && conversationUserMatchMap.size() > 1) {
|
||||
conversationUserMatchMap.clear();
|
||||
} else if (oldRelation > 0) {
|
||||
scoreRelationLastUserMsg += oldRelation;
|
||||
}
|
||||
*/
|
||||
if (scoreRelationLastUserMsg > preRelationUserCounters) {
|
||||
preRelationUserCounters = scoreRelationLastUserMsg;
|
||||
concurrentRelations.add(getSMX.getSecondaryString());
|
||||
System.out.println("secondary: " + getSMX.getSecondaryString() + "\nDistance: " + getSMX.getDistance() + "\n");
|
||||
System.out.println("SUCESS concurrentRelationsMap size: " + concurrentRelations.size() + "\n");
|
||||
}
|
||||
call_able_list.add(worker);
|
||||
}
|
||||
}
|
||||
for (Callable<SimilarityMatrix> callSMX : call_able_list) {
|
||||
try {
|
||||
SimilarityMatrix getSMX = callSMX.call();
|
||||
if (getSMX != null) {
|
||||
Double scoreRelationLastUserMsg = getSMX.getDistance();
|
||||
if (scoreRelationLastUserMsg > preRelationUserCounters) {
|
||||
preRelationUserCounters = scoreRelationLastUserMsg;
|
||||
concurrentRelations.add(getSMX.getSecondaryString());
|
||||
//System.out.println("secondary: " + getSMX.getSecondaryString() + "\nDistance: " + getSMX.getDistance() + "\n");
|
||||
//System.out.println("SUCESS concurrentRelationsMap size: " + concurrentRelations.size() + "\n");
|
||||
}
|
||||
} catch (InterruptedException | ExecutionException | TimeoutException ex) {
|
||||
System.out.println("ex getResponsemsg: " + ex.getMessage() + "\n");
|
||||
} catch (Exception ex) {
|
||||
Logger.getLogger(Datahandler.class.getName()).log(Level.SEVERE, null, ex);
|
||||
}
|
||||
} catch (Exception ex) {
|
||||
Logger.getLogger(Datahandler.class.getName()).log(Level.SEVERE, null, ex);
|
||||
}
|
||||
}
|
||||
StringBuilder SB = new StringBuilder();
|
||||
double randomLenghtPermit = strF.length() * (Math.random() * 2.0);
|
||||
double randomLenghtPermit = strF.length() * (Math.random() * Math.random() * Math.random());
|
||||
Collections.reverse(concurrentRelations);
|
||||
if (concurrentRelations.isEmpty()) {
|
||||
return "failure, preventing stuckness";
|
||||
}
|
||||
String firstRelation = concurrentRelations.get(0);
|
||||
for (String secondaryRelation : concurrentRelations) {
|
||||
if (SB.toString().length() > randomLenghtPermit && !SB.toString().isEmpty()) {
|
||||
break;
|
||||
}
|
||||
System.out.println("relation secondary: " + secondaryRelation + "\n");
|
||||
boolean append = appendToString(firstRelation, secondaryRelation);
|
||||
if (append) {
|
||||
SB.append(secondaryRelation).append(" ");
|
||||
}
|
||||
}
|
||||
System.out.println("Reached end SB: " + SB.toString() + "\n");
|
||||
return SB.toString();
|
||||
}
|
||||
|
||||
@ -518,23 +516,10 @@ public class Datahandler {
|
||||
return false;
|
||||
}
|
||||
|
||||
private double overAllOldScoreRelations(String strF) {
|
||||
if (!conversationUserMatchMap.keySet().contains(strF)) {
|
||||
conversationUserMatchMap.put(strF, conversationUserMatchMap.size());
|
||||
}
|
||||
double sentenceContinuity = -500.0;
|
||||
for (String conversationUserStr : conversationUserMatchMap.keySet()) {
|
||||
if (!strF.equals(conversationUserStr)) {
|
||||
sentenceContinuity += getScoreRelationStrF(strF, conversationUserStr);
|
||||
}
|
||||
}
|
||||
return sentenceContinuity;
|
||||
}
|
||||
|
||||
public synchronized String getResponseMsg(String str, ConcurrentMap<String, Integer> mostRecentMsgMap) throws CustomError {
|
||||
public String getResponseMsg(String str) throws CustomError {
|
||||
String strF = trimString(str);
|
||||
getSingularAnnotation(strF);
|
||||
return getResponseFutures(strF, mostRecentMsgMap);
|
||||
return getResponseFutures(strF);
|
||||
}
|
||||
|
||||
public void getSingularAnnotation(String str) {
|
||||
@ -626,7 +611,7 @@ public class Datahandler {
|
||||
|
||||
public static ConcurrentMap<Integer, String> cutContent(ConcurrentMap<Integer, String> str, boolean hlStatsMsg) {
|
||||
ConcurrentMap<Integer, String> returnlist = new MapMaker().concurrencyLevel(2).makeMap();
|
||||
str.values().parallelStream().forEach(str1 -> {
|
||||
str.values().forEach(str1 -> {
|
||||
int iend = str1.indexOf("content: ");
|
||||
if (iend != -1) {
|
||||
String trs = str1.substring(iend + 9);
|
||||
@ -640,7 +625,7 @@ public class Datahandler {
|
||||
|
||||
public static ConcurrentMap<Integer, String> filterContent(ConcurrentMap<Integer, String> str) {
|
||||
ConcurrentMap<Integer, String> strlistreturn = new MapMaker().concurrencyLevel(2).makeMap();
|
||||
str.values().parallelStream().forEach(str1 -> {
|
||||
str.values().forEach(str1 -> {
|
||||
if (!str1.isEmpty() && str1.length() > 3) {
|
||||
str1 = str1.trim();
|
||||
if (str1.contains("PM*")) {
|
||||
@ -751,7 +736,7 @@ public class Datahandler {
|
||||
return str;
|
||||
}
|
||||
Collection<String> values = stringCache.values();
|
||||
str.values().parallelStream().forEach(str1 -> {
|
||||
str.values().forEach(str1 -> {
|
||||
boolean tooclosematch = false;
|
||||
for (String strVals : values) {
|
||||
LevenshteinDistance leven = new LevenshteinDistance(strVals, str1);
|
||||
@ -778,8 +763,7 @@ public class Datahandler {
|
||||
ConcurrentMap<String, Annotation> Annotationspipeline = new MapMaker().concurrencyLevel(4).makeMap();
|
||||
ConcurrentMap<String, Annotation> AnnotationspipelineSentiment = new MapMaker().concurrencyLevel(4).makeMap();
|
||||
ConcurrentMap<String, CoreDocument> coreDocumentpipelineMap = getMultipleCoreDocumentsWaySuggestion(strmap.values(), pipeline);
|
||||
System.out.println("finished getMultipleCoreDocumentsWaySuggestion");
|
||||
strmap.values().parallelStream().forEach(str -> {
|
||||
strmap.values().forEach(str -> {
|
||||
Annotation strAnno1 = new Annotation(str);
|
||||
Annotationspipeline.put(str, strAnno1);
|
||||
Annotation strAnno2 = new Annotation(str);
|
||||
@ -791,15 +775,11 @@ public class Datahandler {
|
||||
Annotationspipeline.entrySet().forEach(pipelineEntry -> {
|
||||
if (pipelineEntry != null) {
|
||||
pipelineAnnotationCache.put(pipelineEntry.getKey(), pipelineEntry.getValue());
|
||||
} else {
|
||||
System.out.println("failed pipeline cache \n");
|
||||
}
|
||||
});
|
||||
AnnotationspipelineSentiment.entrySet().forEach(pipelineEntry -> {
|
||||
if (pipelineEntry != null) {
|
||||
pipelineSentimentAnnotationCache.put(pipelineEntry.getKey(), pipelineEntry.getValue());
|
||||
} else {
|
||||
System.out.println("failed sentiment cache \n");
|
||||
}
|
||||
});
|
||||
coreDocumentpipelineMap.entrySet().forEach(coreDocumentEntry -> {
|
||||
@ -808,6 +788,10 @@ public class Datahandler {
|
||||
return strmap;
|
||||
}
|
||||
|
||||
public int getMessageOverHead() {
|
||||
return stringCache.values().size() - (stringCache.values().size() / 10);
|
||||
}
|
||||
|
||||
private static class AnnotationCollector<T> implements Consumer<T> {
|
||||
|
||||
private static int i = 0;
|
||||
|
85
ArtificialAutism/src/main/java/FunctionLayer/DoStuff.java
Normal file
85
ArtificialAutism/src/main/java/FunctionLayer/DoStuff.java
Normal file
@ -0,0 +1,85 @@
|
||||
/*
|
||||
* To change this license header, choose License Headers in Project Properties.
|
||||
* To change this template file, choose Tools | Templates
|
||||
* and open the template in the editor.
|
||||
*/
|
||||
package FunctionLayer;
|
||||
|
||||
import PresentationLayer.DiscordHandler;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import java.util.logging.Level;
|
||||
import java.util.logging.Logger;
|
||||
import org.javacord.api.DiscordApi;
|
||||
import org.javacord.api.entity.user.User;
|
||||
import org.javacord.api.event.message.MessageCreateEvent;
|
||||
|
||||
/**
|
||||
*
|
||||
* @author install1
|
||||
*/
|
||||
public class DoStuff {
|
||||
|
||||
public volatile boolean occupied = false;
|
||||
|
||||
public boolean isOccupied() {
|
||||
return occupied;
|
||||
}
|
||||
|
||||
public DoStuff() {
|
||||
}
|
||||
|
||||
public void doStuff(MessageCreateEvent event, DiscordApi api) {
|
||||
if (!event.getMessage().getAuthor().isYourself()) {
|
||||
occupied = true;
|
||||
String strtest = event.getServerTextChannel().get().getCategory().toString();
|
||||
strtest = strtest.substring(9, strtest.length() - 1);
|
||||
boolean channelpermissionsDenied = false;
|
||||
switch (strtest) {
|
||||
case "Public Area": {
|
||||
break;
|
||||
}
|
||||
case "Information Area": {
|
||||
break;
|
||||
}
|
||||
default: {
|
||||
channelpermissionsDenied = true;
|
||||
break;
|
||||
}
|
||||
}
|
||||
if (!channelpermissionsDenied) {
|
||||
List<User> userlist = event.getMessage().getMentionedUsers();
|
||||
String strresult = event.getMessage().toString();
|
||||
if (userlist != null) {
|
||||
for (User user : userlist) {
|
||||
strresult = strresult.replace(user.getIdAsString(), "");
|
||||
}
|
||||
}
|
||||
MessageResponseHandler.getMessage(strresult);
|
||||
}
|
||||
if (event.getMessage().getMentionedUsers().contains(api.getYourself())
|
||||
|| event.getServerTextChannel().get().toString().contains("general-autism")) {
|
||||
try {
|
||||
String ResponseStr;
|
||||
String person = event.getMessageAuthor().getName();
|
||||
ResponseStr = MessageResponseHandler.selectReponseMessage(event.getMessage().toString(), person);
|
||||
if (!ResponseStr.isEmpty()) {
|
||||
System.out.print("\nResponseStr3: " + ResponseStr + "\n");
|
||||
event.getChannel().sendMessage(ResponseStr);
|
||||
}
|
||||
} catch (CustomError ex) {
|
||||
Logger.getLogger(DoStuff.class.getName()).log(Level.SEVERE, null, ex);
|
||||
}
|
||||
|
||||
}
|
||||
new Thread(() -> {
|
||||
try {
|
||||
Datahandler.instance.checkIfUpdateStrings(false);
|
||||
} catch (CustomError ex) {
|
||||
Logger.getLogger(DiscordHandler.class.getName()).log(Level.SEVERE, null, ex);
|
||||
}
|
||||
}).start();
|
||||
occupied = false;
|
||||
}
|
||||
}
|
||||
}
|
@ -43,9 +43,8 @@ public class MessageResponseHandler {
|
||||
}
|
||||
}
|
||||
|
||||
public synchronized static String selectReponseMessage(String toString, ConcurrentMap<String, Integer> mostRecentMsgMap,
|
||||
String personName) throws CustomError {
|
||||
ConcurrentMap<Integer, String> str1 = new MapMaker().concurrencyLevel(2).makeMap();
|
||||
public static String selectReponseMessage(String toString, String personName) throws CustomError {
|
||||
ConcurrentMap<Integer, String> str1 = new MapMaker().concurrencyLevel(6).makeMap();
|
||||
str1.put(str1.size() + 1, toString);
|
||||
str1 = Datahandler.cutContent(str1, false);
|
||||
String strreturn = "";
|
||||
@ -54,7 +53,7 @@ public class MessageResponseHandler {
|
||||
strreturn = str;
|
||||
}
|
||||
}
|
||||
String getResponseMsg = Datahandler.instance.getResponseMsg(strreturn, mostRecentMsgMap);
|
||||
String getResponseMsg = Datahandler.instance.getResponseMsg(strreturn);
|
||||
getResponseMsg = checkPersonPresentInSentence(personName, getResponseMsg, strreturn);
|
||||
return getResponseMsg;
|
||||
}
|
||||
@ -62,37 +61,36 @@ public class MessageResponseHandler {
|
||||
private static String checkPersonPresentInSentence(String personName, String responseMsg, String userLastMessage) {
|
||||
//check if userlastmsg contains person as refference
|
||||
//check if first person is author or their person of mention
|
||||
String strreturn = responseMsg;
|
||||
CoreDocument pipelineCoreDcoument = new CoreDocument(responseMsg);
|
||||
CoreDocument pipelineCoreDcoumentLastMsg = new CoreDocument(userLastMessage);
|
||||
Datahandler.getPipeline().annotate(pipelineCoreDcoument);
|
||||
Datahandler.getPipeline().annotate(pipelineCoreDcoumentLastMsg);
|
||||
String regex = "(.*?\\d){10,}";
|
||||
try {
|
||||
String strreturn = responseMsg;
|
||||
CoreDocument pipelineCoreDcoument = new CoreDocument(responseMsg);
|
||||
CoreDocument pipelineCoreDcoumentLastMsg = new CoreDocument(userLastMessage);
|
||||
Datahandler.getPipeline().annotate(pipelineCoreDcoument);
|
||||
Datahandler.getPipeline().annotate(pipelineCoreDcoumentLastMsg);
|
||||
String regex = "(.*?\\d){10,}";
|
||||
for (CoreEntityMention em : pipelineCoreDcoument.entityMentions()) {
|
||||
String entityType = em.entityType();
|
||||
if (entityType.equals("PERSON")) {
|
||||
try {
|
||||
String str = strreturn;
|
||||
String emText = em.text();
|
||||
Pattern pattern = Pattern.compile(regex);
|
||||
Matcher matcher = pattern.matcher(personName);
|
||||
boolean isMatched = matcher.matches();
|
||||
if (!emText.equals(personName) && !isMatched) {
|
||||
for (CoreEntityMention emLastMsg : pipelineCoreDcoumentLastMsg.entityMentions()) {
|
||||
if (!emText.equals(emLastMsg.text())) {
|
||||
str = strreturn.replaceFirst(emText, emLastMsg.text());
|
||||
}
|
||||
String str = strreturn;
|
||||
String emText = em.text();
|
||||
Pattern pattern = Pattern.compile(regex);
|
||||
Matcher matcher = pattern.matcher(personName);
|
||||
boolean isMatched = matcher.matches();
|
||||
if (!emText.equals(personName) && !isMatched) {
|
||||
for (CoreEntityMention emLastMsg : pipelineCoreDcoumentLastMsg.entityMentions()) {
|
||||
if (!emText.equals(emLastMsg.text())) {
|
||||
str = strreturn.replaceFirst(emText, emLastMsg.text());
|
||||
}
|
||||
try {
|
||||
str = str.replace(str.substring(str.lastIndexOf(emText), str.length()), personName);
|
||||
} catch (Exception e) {
|
||||
System.out.println("failed replacing: " + e.getLocalizedMessage() + "\n");
|
||||
}
|
||||
return str;
|
||||
}
|
||||
} catch (Exception e) {
|
||||
System.out.println("failed person replacement: " + e.getLocalizedMessage() + "\n");
|
||||
try {
|
||||
//System.out.println("personName: " + personName + " str: " + str);
|
||||
str = str.replace(str.substring(str.lastIndexOf(emText), str.length()), personName);
|
||||
//System.out.println("str after: " + str);
|
||||
} catch (Exception e) {
|
||||
//System.out.println("failed replacing: " + e.getLocalizedMessage() + "\n");
|
||||
str += " " + personName;
|
||||
}
|
||||
return str;
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -101,4 +99,9 @@ public class MessageResponseHandler {
|
||||
}
|
||||
return responseMsg;
|
||||
}
|
||||
|
||||
public static int getOverHead() {
|
||||
int getResponseMsgOverHead = Datahandler.instance.getMessageOverHead();
|
||||
return getResponseMsgOverHead;
|
||||
}
|
||||
}
|
||||
|
@ -1250,136 +1250,257 @@ public class SentimentAnalyzerTest implements Callable<SimilarityMatrix> {
|
||||
SentimentValueCache cacheSentimentLocal2 = null;
|
||||
int counter1;
|
||||
int counter2;
|
||||
if (cacheSentiment1 == null) {
|
||||
cacheSentimentLocal1 = initializeCacheSetup(str, cacheSentimentLocal1);
|
||||
}
|
||||
if (cacheSentiment2 == null) {
|
||||
cacheSentimentLocal2 = initializeCacheSetup(str1, cacheSentimentLocal2);
|
||||
}
|
||||
counter1 = cacheSentiment1 == null ? cacheSentimentLocal1.getCounter() : cacheSentiment1.getCounter();
|
||||
counter2 = cacheSentiment2 == null ? cacheSentimentLocal2.getCounter() : cacheSentiment2.getCounter();
|
||||
final int overValue = (counter1 >= counter2 ? counter1 - counter2 : counter2 - counter1) * 32;
|
||||
score -= overValue;
|
||||
if (cacheSentiment1 == null) {
|
||||
ConcurrentMap<Integer, String> retrieveTGWListIndex = retrieveTGWListIndex(cacheSentimentLocal1.getTaggedwordlist());
|
||||
for (String str : retrieveTGWListIndex.values()) {
|
||||
cacheSentimentLocal1.addTgwlistIndex(str);
|
||||
try {
|
||||
if (cacheSentiment1 == null) {
|
||||
cacheSentimentLocal1 = initializeCacheSetup(str, cacheSentimentLocal1);
|
||||
}
|
||||
}
|
||||
if (cacheSentiment2 == null) {
|
||||
ConcurrentMap<Integer, String> retrieveTGWListIndex = retrieveTGWListIndex(cacheSentimentLocal2.getTaggedwordlist());
|
||||
for (String str : retrieveTGWListIndex.values()) {
|
||||
cacheSentimentLocal2.addTgwlistIndex(str);
|
||||
if (cacheSentiment2 == null) {
|
||||
cacheSentimentLocal2 = initializeCacheSetup(str1, cacheSentimentLocal2);
|
||||
}
|
||||
} catch (Exception ex) {
|
||||
|
||||
}
|
||||
score = tgwListScoreIncrementer(score, cacheSentiment1 == null
|
||||
? cacheSentimentLocal1 : cacheSentiment1, cacheSentiment2 == null ? cacheSentimentLocal2 : cacheSentiment2);
|
||||
if (cacheSentiment1 == null) {
|
||||
cacheSentimentLocal1 = GrammaticStructureSetup(cacheSentimentLocal1, pipelineAnnotation1);
|
||||
try {
|
||||
counter1 = cacheSentiment1 == null ? cacheSentimentLocal1.getCounter() : cacheSentiment1.getCounter();
|
||||
counter2 = cacheSentiment2 == null ? cacheSentimentLocal2.getCounter() : cacheSentiment2.getCounter();
|
||||
|
||||
final int overValue = (counter1 >= counter2 ? counter1 - counter2 : counter2 - counter1) * 32;
|
||||
score -= overValue;
|
||||
} catch (Exception ex) {
|
||||
|
||||
}
|
||||
if (cacheSentiment2 == null) {
|
||||
cacheSentimentLocal2 = GrammaticStructureSetup(cacheSentimentLocal2, pipelineAnnotation2);
|
||||
try {
|
||||
if (cacheSentiment1 == null) {
|
||||
ConcurrentMap<Integer, String> retrieveTGWListIndex = retrieveTGWListIndex(cacheSentimentLocal1.getTaggedwordlist());
|
||||
for (String str : retrieveTGWListIndex.values()) {
|
||||
cacheSentimentLocal1.addTgwlistIndex(str);
|
||||
}
|
||||
}
|
||||
if (cacheSentiment2 == null) {
|
||||
ConcurrentMap<Integer, String> retrieveTGWListIndex = retrieveTGWListIndex(cacheSentimentLocal2.getTaggedwordlist());
|
||||
for (String str : retrieveTGWListIndex.values()) {
|
||||
cacheSentimentLocal2.addTgwlistIndex(str);
|
||||
}
|
||||
}
|
||||
} catch (Exception ex) {
|
||||
|
||||
}
|
||||
ConcurrentMap<Integer, Tree> sentenceConstituencyParseList2 = cacheSentiment2 == null
|
||||
? cacheSentimentLocal2.getSentenceConstituencyParseList() : cacheSentiment2.getSentenceConstituencyParseList();
|
||||
ConcurrentMap<Integer, Tree> sentenceConstituencyParseList1 = cacheSentiment1 == null
|
||||
? cacheSentimentLocal1.getSentenceConstituencyParseList() : cacheSentiment1.getSentenceConstituencyParseList();
|
||||
score = iterateTrees(sentenceConstituencyParseList2, sentenceConstituencyParseList1, score);
|
||||
Collection<TypedDependency> allTypedDependencies2 = cacheSentiment2 == null ? cacheSentimentLocal2.getAllTypedDependencies()
|
||||
: cacheSentiment2.getAllTypedDependencies();
|
||||
Collection<TypedDependency> allTypedDependencies1 = cacheSentiment1 == null ? cacheSentimentLocal1.getAllTypedDependencies()
|
||||
: cacheSentiment1.getAllTypedDependencies();
|
||||
ConcurrentMap<Integer, GrammaticalStructure> grammaticalMap1 = cacheSentiment1 == null ? cacheSentimentLocal1.getGs() : cacheSentiment1.getGs();
|
||||
ConcurrentMap<Integer, GrammaticalStructure> grammaticalMap2 = cacheSentiment2 == null ? cacheSentimentLocal2.getGs() : cacheSentiment2.getGs();
|
||||
score = typeDependenciesGrammaticalRelation(allTypedDependencies1, allTypedDependencies2, score, grammaticalMap1, grammaticalMap2,
|
||||
sentenceConstituencyParseList1, sentenceConstituencyParseList2);
|
||||
if (cacheSentiment1 == null) {
|
||||
cacheSentimentLocal1 = sentimentCoreAnnotationSetup(pipelineAnnotation1Sentiment, cacheSentimentLocal1);
|
||||
try {
|
||||
score = tgwListScoreIncrementer(score, cacheSentiment1 == null
|
||||
? cacheSentimentLocal1 : cacheSentiment1, cacheSentiment2 == null ? cacheSentimentLocal2 : cacheSentiment2);
|
||||
if (cacheSentiment1 == null) {
|
||||
cacheSentimentLocal1 = GrammaticStructureSetup(cacheSentimentLocal1, pipelineAnnotation1);
|
||||
}
|
||||
if (cacheSentiment2 == null) {
|
||||
cacheSentimentLocal2 = GrammaticStructureSetup(cacheSentimentLocal2, pipelineAnnotation2);
|
||||
}
|
||||
} catch (Exception ex) {
|
||||
|
||||
}
|
||||
if (cacheSentiment2 == null) {
|
||||
cacheSentimentLocal2 = sentimentCoreAnnotationSetup(pipelineAnnotation2Sentiment, cacheSentimentLocal2);
|
||||
ConcurrentMap<Integer, Tree> sentenceConstituencyParseList1 = null;
|
||||
ConcurrentMap<Integer, Tree> sentenceConstituencyParseList2 = null;
|
||||
try {
|
||||
sentenceConstituencyParseList2 = cacheSentiment2 == null
|
||||
? cacheSentimentLocal2.getSentenceConstituencyParseList() : cacheSentiment2.getSentenceConstituencyParseList();
|
||||
sentenceConstituencyParseList1 = cacheSentiment1 == null
|
||||
? cacheSentimentLocal1.getSentenceConstituencyParseList() : cacheSentiment1.getSentenceConstituencyParseList();
|
||||
score = iterateTrees(sentenceConstituencyParseList2, sentenceConstituencyParseList1, score);
|
||||
} catch (Exception ex) {
|
||||
|
||||
}
|
||||
final ConcurrentMap<Integer, SimpleMatrix> simpleSMXlist1 = cacheSentiment1 == null
|
||||
? cacheSentimentLocal1.getSimpleSMXlist() : cacheSentiment1.getSimpleSMXlist();
|
||||
final ConcurrentMap<Integer, SimpleMatrix> simpleSMXlist2 = cacheSentiment2 == null
|
||||
? cacheSentimentLocal2.getSimpleSMXlist() : cacheSentiment2.getSimpleSMXlist();
|
||||
final ConcurrentMap<Integer, SimpleMatrix> simpleSMXlistVector1 = cacheSentiment1 == null
|
||||
? cacheSentimentLocal1.getSimpleSMXlistVector() : cacheSentiment1.getSimpleSMXlistVector();
|
||||
final ConcurrentMap<Integer, SimpleMatrix> simpleSMXlistVector2 = cacheSentiment2 == null
|
||||
? cacheSentimentLocal2.getSimpleSMXlistVector() : cacheSentiment2.getSimpleSMXlistVector();
|
||||
score = simpleRNNMatrixCalculations(score, simpleSMXlist1, simpleSMXlist2);
|
||||
score = simpleRNNMaxtrixVectors(score, simpleSMXlistVector1, simpleSMXlistVector2);
|
||||
int sentiment1 = cacheSentiment1 == null ? cacheSentimentLocal1.getRnnPrediectClassMap().size() : cacheSentiment1.getRnnPrediectClassMap().size();
|
||||
int sentiment2 = cacheSentiment2 == null ? cacheSentimentLocal2.getRnnPrediectClassMap().size() : cacheSentiment2.getRnnPrediectClassMap().size();
|
||||
score -= (sentiment1 > sentiment2 ? sentiment1 - sentiment2 : sentiment2 - sentiment1) * 500;
|
||||
Map.Entry<Double, Map.Entry<SentimentValueCache, SentimentValueCache>> classifyRawEvaluationEntry = classifyRawEvaluation(score, cacheSentimentLocal1,
|
||||
cacheSentimentLocal2);
|
||||
score = classifyRawEvaluationEntry.getKey();
|
||||
if (cacheSentiment1 == null) {
|
||||
cacheSentimentLocal1 = classifyRawEvaluationEntry.getValue().getKey();
|
||||
try {
|
||||
Collection<TypedDependency> allTypedDependencies2 = cacheSentiment2 == null ? cacheSentimentLocal2.getAllTypedDependencies()
|
||||
: cacheSentiment2.getAllTypedDependencies();
|
||||
Collection<TypedDependency> allTypedDependencies1 = cacheSentiment1 == null ? cacheSentimentLocal1.getAllTypedDependencies()
|
||||
: cacheSentiment1.getAllTypedDependencies();
|
||||
|
||||
ConcurrentMap<Integer, GrammaticalStructure> grammaticalMap1 = cacheSentiment1 == null ? cacheSentimentLocal1.getGs() : cacheSentiment1.getGs();
|
||||
ConcurrentMap<Integer, GrammaticalStructure> grammaticalMap2 = cacheSentiment2 == null ? cacheSentimentLocal2.getGs() : cacheSentiment2.getGs();
|
||||
score = typeDependenciesGrammaticalRelation(allTypedDependencies1, allTypedDependencies2, score, grammaticalMap1, grammaticalMap2,
|
||||
sentenceConstituencyParseList1, sentenceConstituencyParseList2);
|
||||
} catch (Exception ex) {
|
||||
|
||||
}
|
||||
if (cacheSentiment2 == null) {
|
||||
cacheSentimentLocal2 = classifyRawEvaluationEntry.getValue().getValue();
|
||||
try {
|
||||
if (cacheSentiment1 == null) {
|
||||
cacheSentimentLocal1 = sentimentCoreAnnotationSetup(pipelineAnnotation1Sentiment, cacheSentimentLocal1);
|
||||
}
|
||||
if (cacheSentiment2 == null) {
|
||||
cacheSentimentLocal2 = sentimentCoreAnnotationSetup(pipelineAnnotation2Sentiment, cacheSentimentLocal2);
|
||||
}
|
||||
} catch (Exception ex) {
|
||||
|
||||
}
|
||||
if (cacheSentiment1 == null) {
|
||||
cacheSentimentLocal1 = setupMainSentimentandLongestVal(pipelineAnnotation1Sentiment, cacheSentimentLocal1);
|
||||
try {
|
||||
final ConcurrentMap<Integer, SimpleMatrix> simpleSMXlist1 = cacheSentiment1 == null
|
||||
? cacheSentimentLocal1.getSimpleSMXlist() : cacheSentiment1.getSimpleSMXlist();
|
||||
final ConcurrentMap<Integer, SimpleMatrix> simpleSMXlist2 = cacheSentiment2 == null
|
||||
? cacheSentimentLocal2.getSimpleSMXlist() : cacheSentiment2.getSimpleSMXlist();
|
||||
final ConcurrentMap<Integer, SimpleMatrix> simpleSMXlistVector1 = cacheSentiment1 == null
|
||||
? cacheSentimentLocal1.getSimpleSMXlistVector() : cacheSentiment1.getSimpleSMXlistVector();
|
||||
final ConcurrentMap<Integer, SimpleMatrix> simpleSMXlistVector2 = cacheSentiment2 == null
|
||||
? cacheSentimentLocal2.getSimpleSMXlistVector() : cacheSentiment2.getSimpleSMXlistVector();
|
||||
score = simpleRNNMatrixCalculations(score, simpleSMXlist1, simpleSMXlist2);
|
||||
score = simpleRNNMaxtrixVectors(score, simpleSMXlistVector1, simpleSMXlistVector2);
|
||||
} catch (Exception ex) {
|
||||
|
||||
}
|
||||
if (cacheSentiment2 == null) {
|
||||
cacheSentimentLocal2 = setupMainSentimentandLongestVal(pipelineAnnotation2Sentiment, cacheSentimentLocal2);
|
||||
try {
|
||||
int sentiment1 = cacheSentiment1 == null ? cacheSentimentLocal1.getRnnPrediectClassMap().size() : cacheSentiment1.getRnnPrediectClassMap().size();
|
||||
int sentiment2 = cacheSentiment2 == null ? cacheSentimentLocal2.getRnnPrediectClassMap().size() : cacheSentiment2.getRnnPrediectClassMap().size();
|
||||
score -= (sentiment1 > sentiment2 ? sentiment1 - sentiment2 : sentiment2 - sentiment1) * 500;
|
||||
Map.Entry<Double, Map.Entry<SentimentValueCache, SentimentValueCache>> classifyRawEvaluationEntry = classifyRawEvaluation(score, cacheSentimentLocal1,
|
||||
cacheSentimentLocal2);
|
||||
score = classifyRawEvaluationEntry.getKey();
|
||||
if (cacheSentiment1 == null) {
|
||||
cacheSentimentLocal1 = classifyRawEvaluationEntry.getValue().getKey();
|
||||
}
|
||||
if (cacheSentiment2 == null) {
|
||||
cacheSentimentLocal2 = classifyRawEvaluationEntry.getValue().getValue();
|
||||
}
|
||||
} catch (Exception ex) {
|
||||
|
||||
}
|
||||
score = sentimentMatrixVariances(score, cacheSentiment1 == null ? cacheSentimentLocal1.getLongest() : cacheSentiment1.getLongest(),
|
||||
cacheSentiment2 == null ? cacheSentimentLocal2.getLongest() : cacheSentiment2.getLongest(), cacheSentiment1 == null
|
||||
? cacheSentimentLocal1.getMainSentiment() : cacheSentiment1.getMainSentiment(), cacheSentiment2 == null
|
||||
? cacheSentimentLocal2.getMainSentiment() : cacheSentiment2.getMainSentiment());
|
||||
if (cacheSentiment1 == null) {
|
||||
cacheSentimentLocal1 = jmweAnnotationSetup(jmweStrAnnotation1, cacheSentimentLocal1);
|
||||
try {
|
||||
if (cacheSentiment1 == null) {
|
||||
cacheSentimentLocal1 = setupMainSentimentandLongestVal(pipelineAnnotation1Sentiment, cacheSentimentLocal1);
|
||||
}
|
||||
if (cacheSentiment2 == null) {
|
||||
cacheSentimentLocal2 = setupMainSentimentandLongestVal(pipelineAnnotation2Sentiment, cacheSentimentLocal2);
|
||||
}
|
||||
score = sentimentMatrixVariances(score, cacheSentiment1 == null ? cacheSentimentLocal1.getLongest() : cacheSentiment1.getLongest(),
|
||||
cacheSentiment2 == null ? cacheSentimentLocal2.getLongest() : cacheSentiment2.getLongest(), cacheSentiment1 == null
|
||||
? cacheSentimentLocal1.getMainSentiment() : cacheSentiment1.getMainSentiment(), cacheSentiment2 == null
|
||||
? cacheSentimentLocal2.getMainSentiment() : cacheSentiment2.getMainSentiment());
|
||||
} catch (Exception ex) {
|
||||
|
||||
}
|
||||
if (cacheSentiment2 == null) {
|
||||
cacheSentimentLocal2 = jmweAnnotationSetup(jmweStrAnnotation2, cacheSentimentLocal2);
|
||||
try {
|
||||
if (cacheSentiment1 == null) {
|
||||
cacheSentimentLocal1 = jmweAnnotationSetup(jmweStrAnnotation1, cacheSentimentLocal1);
|
||||
}
|
||||
if (cacheSentiment2 == null) {
|
||||
cacheSentimentLocal2 = jmweAnnotationSetup(jmweStrAnnotation2, cacheSentimentLocal2);
|
||||
}
|
||||
} catch (Exception ex) {
|
||||
|
||||
}
|
||||
|
||||
SentimentValueCache scoringCache1 = cacheSentiment1 == null ? cacheSentimentLocal1 : cacheSentiment1;
|
||||
SentimentValueCache scoringCache2 = cacheSentiment2 == null ? cacheSentimentLocal2 : cacheSentiment2;
|
||||
score = entryCountsRelation(score, scoringCache1, scoringCache2);
|
||||
score = entryCountsScoring(score, scoringCache1, scoringCache2);
|
||||
score = tokenEntryPosScoring(score, scoringCache1, scoringCache2);
|
||||
score = unmarkedPatternCounterScoring(score, scoringCache1, scoringCache2);
|
||||
score = markedContiniousCounterScoring(score, scoringCache1, scoringCache2);
|
||||
score = strTokensMapScoring(score, scoringCache1, scoringCache2);
|
||||
score = strTokenEntryScoring(score, scoringCache1, scoringCache2);
|
||||
score = strTokenMapTagsScoring(score, scoringCache1, scoringCache2);
|
||||
score = tokenformSizeScoring(score, scoringCache1, scoringCache2);
|
||||
score = tokenStemmingMapScoring(score, scoringCache1, scoringCache2);
|
||||
score = inflectedCounterScoring(score, scoringCache1, scoringCache2);
|
||||
score = annotatorCountScoring(score, scoringCache1, scoringCache2);
|
||||
score = tokensCounterScoring(score, scoringCache1, scoringCache2);
|
||||
LevenshteinDistance leven = new LevenshteinDistance(str, str1);
|
||||
double SentenceScoreDiff = leven.computeLevenshteinDistance();
|
||||
SentenceScoreDiff *= 15;
|
||||
score -= SentenceScoreDiff;
|
||||
if (cacheSentiment1 == null) {
|
||||
cacheSentimentLocal1 = setupNEREntitiesAndTokenTags(pipelineCoreDcoument1, cacheSentimentLocal1);
|
||||
try {
|
||||
score = entryCountsRelation(score, scoringCache1, scoringCache2);
|
||||
} catch (Exception ex) {
|
||||
|
||||
}
|
||||
if (cacheSentiment2 == null) {
|
||||
cacheSentimentLocal2 = setupNEREntitiesAndTokenTags(pipelineCoreDcoument2, cacheSentimentLocal2);
|
||||
try {
|
||||
score = entryCountsScoring(score, scoringCache1, scoringCache2);
|
||||
} catch (Exception ex) {
|
||||
|
||||
}
|
||||
score = nerEntitiesAndTokenScoring(score, cacheSentiment1 == null ? cacheSentimentLocal1 : cacheSentiment1, cacheSentiment2 == null
|
||||
? cacheSentimentLocal2 : cacheSentiment2);
|
||||
if (cacheSentiment1 == null) {
|
||||
cacheSentimentLocal1 = setupStoWordTokensLemma(pipelineAnnotation1Sentiment, cacheSentimentLocal1);
|
||||
try {
|
||||
score = tokenEntryPosScoring(score, scoringCache1, scoringCache2);
|
||||
} catch (Exception ex) {
|
||||
|
||||
}
|
||||
if (cacheSentiment2 == null) {
|
||||
cacheSentimentLocal2 = setupStoWordTokensLemma(pipelineAnnotation2Sentiment, cacheSentimentLocal2);
|
||||
try {
|
||||
score = unmarkedPatternCounterScoring(score, scoringCache1, scoringCache2);
|
||||
} catch (Exception ex) {
|
||||
|
||||
}
|
||||
try {
|
||||
score = markedContiniousCounterScoring(score, scoringCache1, scoringCache2);
|
||||
} catch (Exception ex) {
|
||||
|
||||
}
|
||||
try {
|
||||
score = strTokensMapScoring(score, scoringCache1, scoringCache2);
|
||||
} catch (Exception ex) {
|
||||
|
||||
}
|
||||
try {
|
||||
score = strTokenEntryScoring(score, scoringCache1, scoringCache2);
|
||||
} catch (Exception ex) {
|
||||
|
||||
}
|
||||
try {
|
||||
score = strTokenMapTagsScoring(score, scoringCache1, scoringCache2);
|
||||
} catch (Exception ex) {
|
||||
|
||||
}
|
||||
try {
|
||||
score = tokenformSizeScoring(score, scoringCache1, scoringCache2);
|
||||
} catch (Exception ex) {
|
||||
|
||||
}
|
||||
try {
|
||||
score = tokenStemmingMapScoring(score, scoringCache1, scoringCache2);
|
||||
} catch (Exception ex) {
|
||||
|
||||
}
|
||||
try {
|
||||
score = inflectedCounterScoring(score, scoringCache1, scoringCache2);
|
||||
} catch (Exception ex) {
|
||||
|
||||
}
|
||||
try {
|
||||
score = annotatorCountScoring(score, scoringCache1, scoringCache2);
|
||||
} catch (Exception ex) {
|
||||
|
||||
}
|
||||
try {
|
||||
score = tokensCounterScoring(score, scoringCache1, scoringCache2);
|
||||
} catch (Exception ex) {
|
||||
|
||||
}
|
||||
try {
|
||||
LevenshteinDistance leven = new LevenshteinDistance(str, str1);
|
||||
double SentenceScoreDiff = leven.computeLevenshteinDistance();
|
||||
SentenceScoreDiff *= 15;
|
||||
score -= SentenceScoreDiff;
|
||||
} catch (Exception ex) {
|
||||
|
||||
}
|
||||
try {
|
||||
if (cacheSentiment1 == null) {
|
||||
cacheSentimentLocal1 = setupNEREntitiesAndTokenTags(pipelineCoreDcoument1, cacheSentimentLocal1);
|
||||
}
|
||||
if (cacheSentiment2 == null) {
|
||||
cacheSentimentLocal2 = setupNEREntitiesAndTokenTags(pipelineCoreDcoument2, cacheSentimentLocal2);
|
||||
}
|
||||
score = nerEntitiesAndTokenScoring(score, cacheSentiment1 == null ? cacheSentimentLocal1 : cacheSentiment1, cacheSentiment2 == null
|
||||
? cacheSentimentLocal2 : cacheSentiment2);
|
||||
} catch (Exception ex) {
|
||||
|
||||
}
|
||||
try {
|
||||
if (cacheSentiment1 == null) {
|
||||
cacheSentimentLocal1 = setupStoWordTokensLemma(pipelineAnnotation1Sentiment, cacheSentimentLocal1);
|
||||
}
|
||||
if (cacheSentiment2 == null) {
|
||||
cacheSentimentLocal2 = setupStoWordTokensLemma(pipelineAnnotation2Sentiment, cacheSentimentLocal2);
|
||||
}
|
||||
score = stopWordTokenLemmaScoring(score, cacheSentiment1 == null ? cacheSentimentLocal1 : cacheSentiment1, cacheSentiment2 == null
|
||||
? cacheSentimentLocal2 : cacheSentiment2);
|
||||
} catch (Exception ex) {
|
||||
|
||||
}
|
||||
try {
|
||||
score = stopwordTokenPairCounterScoring(score, cacheSentiment1 == null ? cacheSentimentLocal1 : cacheSentiment1, cacheSentiment2 == null
|
||||
? cacheSentimentLocal2 : cacheSentiment2);
|
||||
} catch (Exception ex) {
|
||||
|
||||
}
|
||||
score = stopWordTokenLemmaScoring(score, cacheSentiment1 == null ? cacheSentimentLocal1 : cacheSentiment1, cacheSentiment2 == null
|
||||
? cacheSentimentLocal2 : cacheSentiment2);
|
||||
score = stopwordTokenPairCounterScoring(score, cacheSentiment1 == null ? cacheSentimentLocal1 : cacheSentiment1, cacheSentiment2 == null
|
||||
? cacheSentimentLocal2 : cacheSentiment2);
|
||||
smxParam.setDistance(score);
|
||||
if (cacheSentiment1 == null) {
|
||||
smxParam.setCacheValue1(cacheSentimentLocal1);
|
||||
}
|
||||
if (cacheSentiment2 == null) {
|
||||
smxParam.setCacheValue2(cacheSentimentLocal2);
|
||||
try {
|
||||
if (cacheSentiment1 == null) {
|
||||
smxParam.setCacheValue1(cacheSentimentLocal1);
|
||||
}
|
||||
if (cacheSentiment2 == null) {
|
||||
smxParam.setCacheValue2(cacheSentimentLocal2);
|
||||
}
|
||||
} catch (Exception ex) {
|
||||
|
||||
}
|
||||
return smxParam;
|
||||
}
|
||||
|
@ -14,20 +14,15 @@ screen -X -S (number1) quit
|
||||
*/
|
||||
package PresentationLayer;
|
||||
|
||||
import FunctionLayer.CustomError;
|
||||
import FunctionLayer.Datahandler;
|
||||
import FunctionLayer.MessageResponseHandler;
|
||||
import FunctionLayer.DoStuff;
|
||||
import FunctionLayer.PipelineJMWESingleton;
|
||||
import com.google.common.collect.MapMaker;
|
||||
import java.io.IOException;
|
||||
import java.sql.SQLException;
|
||||
import java.util.List;
|
||||
import java.util.concurrent.ConcurrentMap;
|
||||
import java.util.logging.Level;
|
||||
import java.util.logging.Logger;
|
||||
import org.javacord.api.DiscordApi;
|
||||
import org.javacord.api.DiscordApiBuilder;
|
||||
import org.javacord.api.entity.user.User;
|
||||
|
||||
/**
|
||||
*
|
||||
@ -35,8 +30,6 @@ import org.javacord.api.entity.user.User;
|
||||
*/
|
||||
public class DiscordHandler {
|
||||
|
||||
private static ConcurrentMap<String, Integer> responseTrackerMap = new MapMaker().concurrencyLevel(6).makeMap();
|
||||
|
||||
public static void main(String[] args) {
|
||||
System.setProperty("java.util.concurrent.ForkJoinPool.common.parallelism", "15");
|
||||
try {
|
||||
@ -55,62 +48,12 @@ public class DiscordHandler {
|
||||
Datahandler.instance.updateStringCache();
|
||||
String token = "NTI5NzAxNTk5NjAyMjc4NDAx.Dw0vDg.7-aMjVWdQMYPl8qVNyvTCPS5F_A";
|
||||
DiscordApi api = new DiscordApiBuilder().setToken(token).login().join();
|
||||
DoStuff dostuff = new DoStuff();
|
||||
api.addMessageCreateListener(event -> {
|
||||
if (!event.getMessage().getAuthor().isYourself()) {
|
||||
StringBuilder sb = new StringBuilder();
|
||||
String strtest = event.getServerTextChannel().get().getCategory().toString();
|
||||
strtest = strtest.substring(9, strtest.length() - 1);
|
||||
boolean channelpermissionsDenied = false;
|
||||
switch (strtest) {
|
||||
case "Public Area": {
|
||||
break;
|
||||
}
|
||||
case "Information Area": {
|
||||
break;
|
||||
}
|
||||
default: {
|
||||
channelpermissionsDenied = true;
|
||||
break;
|
||||
}
|
||||
}
|
||||
if (!channelpermissionsDenied) {
|
||||
List<User> userlist = event.getMessage().getMentionedUsers();
|
||||
String strresult = event.getMessage().toString();
|
||||
if (userlist != null) {
|
||||
for (User user : userlist) {
|
||||
strresult = strresult.replace(user.getIdAsString(), "");
|
||||
}
|
||||
}
|
||||
MessageResponseHandler.getMessage(strresult);
|
||||
}
|
||||
if (event.getMessage().getMentionedUsers().contains(api.getYourself())
|
||||
|| event.getServerTextChannel().get().toString().contains("general-autism")) {
|
||||
try {
|
||||
String ResponseStr;
|
||||
String person = event.getMessageAuthor().getName();
|
||||
ResponseStr = MessageResponseHandler.selectReponseMessage(event.getMessage().toString(), responseTrackerMap, person);
|
||||
if (!ResponseStr.isEmpty()) {
|
||||
System.out.print("\nResponseStr3: " + ResponseStr + "\n");
|
||||
event.getChannel().sendMessage(ResponseStr);
|
||||
Integer responseTracker = responseTrackerMap.getOrDefault(ResponseStr, null);
|
||||
if (responseTracker == null) {
|
||||
responseTrackerMap.put(ResponseStr, 0);
|
||||
} else {
|
||||
responseTrackerMap.put(ResponseStr, responseTracker + 1);
|
||||
}
|
||||
}
|
||||
} catch (CustomError ex) {
|
||||
Logger.getLogger(DiscordHandler.class.getName()).log(Level.SEVERE, null, ex);
|
||||
}
|
||||
}
|
||||
new Thread(() -> {
|
||||
try {
|
||||
Datahandler.instance.checkIfUpdateStrings(false);
|
||||
} catch (CustomError ex) {
|
||||
Logger.getLogger(DiscordHandler.class.getName()).log(Level.SEVERE, null, ex);
|
||||
}
|
||||
}).start();
|
||||
if (!dostuff.isOccupied()) {
|
||||
dostuff.doStuff(event, api);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
}
|
||||
|
Loading…
Reference in New Issue
Block a user