removed levenhstein callable implementation, simplified sentence fetching from DB, moved from unloze db to a small vps db, instead of iterating 1 in updatematrix is 25 now, updated sentiment analyzing, also reduced get time of results to 5 seconds, a bunch of trivial things also got changed
This commit is contained in:
parent
296be21753
commit
9e68cbb283
@ -20,9 +20,9 @@ public class DBCPDataSource {
|
|||||||
static {
|
static {
|
||||||
try {
|
try {
|
||||||
ds.setDriver(new com.mysql.cj.jdbc.Driver());
|
ds.setDriver(new com.mysql.cj.jdbc.Driver());
|
||||||
ds.setUrl("jdbc:mysql://151.80.230.149:3306/ArtificialAutism?useLegacyDatetimeCode=false&serverTimezone=UTC");
|
ds.setUrl("jdbc:mysql://104.248.40.216:3306/ArtificialAutism?useLegacyDatetimeCode=false&serverTimezone=UTC");
|
||||||
ds.setUsername("ArtificialAutism");
|
ds.setUsername("root");
|
||||||
ds.setPassword("b423b54bwbfb1340438fn");
|
ds.setPassword("fb345972349fnsDW234/¤)#2");
|
||||||
ds.setMaxTotal(-1);
|
ds.setMaxTotal(-1);
|
||||||
ds.setMinIdle(5);
|
ds.setMinIdle(5);
|
||||||
ds.setMaxIdle(-1);
|
ds.setMaxIdle(-1);
|
||||||
|
@ -140,41 +140,35 @@ public class DataMapper {
|
|||||||
|
|
||||||
public static LinkedHashMap<String, LinkedHashMap<String, Double>> getAllRelationScores() {
|
public static LinkedHashMap<String, LinkedHashMap<String, Double>> getAllRelationScores() {
|
||||||
int count = getSementicsDBRows();
|
int count = getSementicsDBRows();
|
||||||
int counter2 = 0;
|
|
||||||
int hardCapRetrieveCount = 500000;
|
|
||||||
LinkedHashMap<String, LinkedHashMap<String, Double>> LHMSMX = new LinkedHashMap();
|
LinkedHashMap<String, LinkedHashMap<String, Double>> LHMSMX = new LinkedHashMap();
|
||||||
while (count > counter2) {
|
try (Connection l_cCon = DBCPDataSource.getConnection()) {
|
||||||
try (Connection l_cCon = DBCPDataSource.getConnection()) {
|
l_cCon.setAutoCommit(false);
|
||||||
l_cCon.setAutoCommit(false);
|
String l_sSQL = "SELECT * FROM `WordMatrix`";
|
||||||
String l_sSQL = "SELECT * FROM `WordMatrix` WHERE ID > " + counter2 + " AND ID < " + (counter2 + hardCapRetrieveCount);
|
try (PreparedStatement l_pStatement = l_cCon.prepareStatement(l_sSQL, java.sql.ResultSet.TYPE_FORWARD_ONLY,
|
||||||
try (PreparedStatement l_pStatement = l_cCon.prepareStatement(l_sSQL, java.sql.ResultSet.TYPE_FORWARD_ONLY,
|
java.sql.ResultSet.CONCUR_READ_ONLY)) {
|
||||||
java.sql.ResultSet.CONCUR_READ_ONLY)) {
|
l_pStatement.setFetchSize(Integer.MIN_VALUE);
|
||||||
l_pStatement.setFetchSize(Integer.MIN_VALUE);
|
try (ResultSet l_rsSearch = l_pStatement.executeQuery()) {
|
||||||
try (ResultSet l_rsSearch = l_pStatement.executeQuery()) {
|
int i = 0;
|
||||||
int i = 0;
|
LinkedHashMap<String, Double> LHMLocal = new LinkedHashMap();
|
||||||
LinkedHashMap<String, Double> LHMLocal = new LinkedHashMap();
|
while (l_rsSearch.next()) {
|
||||||
while (l_rsSearch.next() && i < hardCapRetrieveCount) {
|
String str1 = l_rsSearch.getString(1);
|
||||||
String str1 = l_rsSearch.getString(1);
|
String str2 = l_rsSearch.getString(2);
|
||||||
String str2 = l_rsSearch.getString(2);
|
Double score = l_rsSearch.getDouble(3);
|
||||||
Double score = l_rsSearch.getDouble(3);
|
LHMLocal.put(str2, score);
|
||||||
|
while (l_rsSearch.next() && str1.equals(l_rsSearch.getString(1))) {
|
||||||
|
str2 = l_rsSearch.getString(2);
|
||||||
|
score = l_rsSearch.getDouble(3);
|
||||||
LHMLocal.put(str2, score);
|
LHMLocal.put(str2, score);
|
||||||
while (l_rsSearch.next() && i < hardCapRetrieveCount && str1.equals(l_rsSearch.getString(1))) {
|
|
||||||
str2 = l_rsSearch.getString(2);
|
|
||||||
score = l_rsSearch.getDouble(3);
|
|
||||||
LHMLocal.put(str2, score);
|
|
||||||
i++;
|
|
||||||
counter2++;
|
|
||||||
}
|
|
||||||
LHMSMX.put(str1, LHMLocal);
|
|
||||||
System.out.println("i: " + i + "\n" + "free memory: " + Runtime.getRuntime().freeMemory() + "\ncounter2: " + counter2 + "\n");
|
|
||||||
i++;
|
i++;
|
||||||
counter2++;
|
|
||||||
}
|
}
|
||||||
|
LHMSMX.put(str1, LHMLocal);
|
||||||
|
System.out.println("i: " + i + "\n" + "free memory: " + Runtime.getRuntime().freeMemory() + "\n");
|
||||||
|
i++;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
} catch (SQLException ex) {
|
|
||||||
Logger.getLogger(DataMapper.class.getName()).log(Level.SEVERE, null, ex);
|
|
||||||
}
|
}
|
||||||
|
} catch (SQLException ex) {
|
||||||
|
Logger.getLogger(DataMapper.class.getName()).log(Level.SEVERE, null, ex);
|
||||||
}
|
}
|
||||||
return LHMSMX;
|
return LHMSMX;
|
||||||
}
|
}
|
||||||
|
@ -29,13 +29,11 @@ import java.io.StringReader;
|
|||||||
import java.sql.SQLException;
|
import java.sql.SQLException;
|
||||||
import java.util.ArrayList;
|
import java.util.ArrayList;
|
||||||
import java.util.Collection;
|
import java.util.Collection;
|
||||||
import java.util.Iterator;
|
|
||||||
import java.util.LinkedHashMap;
|
import java.util.LinkedHashMap;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
import java.util.Map.Entry;
|
import java.util.Map.Entry;
|
||||||
import java.util.Properties;
|
import java.util.Properties;
|
||||||
import java.util.Random;
|
|
||||||
import java.util.concurrent.Callable;
|
import java.util.concurrent.Callable;
|
||||||
import java.util.concurrent.ConcurrentMap;
|
import java.util.concurrent.ConcurrentMap;
|
||||||
import java.util.concurrent.CountDownLatch;
|
import java.util.concurrent.CountDownLatch;
|
||||||
@ -56,6 +54,7 @@ public class Datahandler {
|
|||||||
public static final long EXPIRE_TIME_IN_SECONDS = TimeUnit.SECONDS.convert(6, TimeUnit.MINUTES);
|
public static final long EXPIRE_TIME_IN_SECONDS = TimeUnit.SECONDS.convert(6, TimeUnit.MINUTES);
|
||||||
public static final long EXPIRE_TIME_IN_SECONDS1 = TimeUnit.SECONDS.convert(10, TimeUnit.HOURS);
|
public static final long EXPIRE_TIME_IN_SECONDS1 = TimeUnit.SECONDS.convert(10, TimeUnit.HOURS);
|
||||||
public static Datahandler instance = new Datahandler();
|
public static Datahandler instance = new Datahandler();
|
||||||
|
private static volatile Double minDistance;
|
||||||
private volatile boolean refreshMatrixFromDB;
|
private volatile boolean refreshMatrixFromDB;
|
||||||
private static volatile int secondaryIterator = 0;
|
private static volatile int secondaryIterator = 0;
|
||||||
private final ConcurrentMap<Integer, String> stringCache;
|
private final ConcurrentMap<Integer, String> stringCache;
|
||||||
@ -66,6 +65,7 @@ public class Datahandler {
|
|||||||
private final Stopwatch stopwatch;
|
private final Stopwatch stopwatch;
|
||||||
private final Stopwatch stopwatch1;
|
private final Stopwatch stopwatch1;
|
||||||
private ForkJoinPool executor;
|
private ForkJoinPool executor;
|
||||||
|
private static String similar = "";
|
||||||
private static String shiftReduceParserPath = "edu/stanford/nlp/models/srparser/englishSR.ser.gz";
|
private static String shiftReduceParserPath = "edu/stanford/nlp/models/srparser/englishSR.ser.gz";
|
||||||
private static String sentimentModel = "edu/stanford/nlp/models/sentiment/sentiment.ser.gz";
|
private static String sentimentModel = "edu/stanford/nlp/models/sentiment/sentiment.ser.gz";
|
||||||
private static String lexParserEnglishRNN = "edu/stanford/nlp/models/lexparser/englishRNN.ser.gz";
|
private static String lexParserEnglishRNN = "edu/stanford/nlp/models/lexparser/englishRNN.ser.gz";
|
||||||
@ -278,6 +278,7 @@ public class Datahandler {
|
|||||||
if (stringCache.values().size() > 10 && !refreshMatrixFromDB) {
|
if (stringCache.values().size() > 10 && !refreshMatrixFromDB) {
|
||||||
ConcurrentMap<Integer, String> stringCachelocal = stringCache;
|
ConcurrentMap<Integer, String> stringCachelocal = stringCache;
|
||||||
int selectUpdate = -1;
|
int selectUpdate = -1;
|
||||||
|
int iteratorCap = 25;
|
||||||
LinkedHashMap<String, LinkedHashMap<String, Double>> LHMSMXLocal = lHMSMX;
|
LinkedHashMap<String, LinkedHashMap<String, Double>> LHMSMXLocal = lHMSMX;
|
||||||
int ij2 = 0;
|
int ij2 = 0;
|
||||||
for (String str : stringCachelocal.values()) {
|
for (String str : stringCachelocal.values()) {
|
||||||
@ -290,67 +291,81 @@ public class Datahandler {
|
|||||||
}
|
}
|
||||||
if (selectUpdate == -1 || selectUpdate + 1 == stringCachelocal.size()) {
|
if (selectUpdate == -1 || selectUpdate + 1 == stringCachelocal.size()) {
|
||||||
int valueSize = stringCachelocal.size();
|
int valueSize = stringCachelocal.size();
|
||||||
if (secondaryIterator + 1 >= valueSize) {
|
if (secondaryIterator + iteratorCap >= valueSize) {
|
||||||
secondaryIterator = 0;
|
secondaryIterator = 0;
|
||||||
}
|
}
|
||||||
selectUpdate = secondaryIterator;
|
selectUpdate = secondaryIterator;
|
||||||
secondaryIterator++;
|
secondaryIterator += iteratorCap;
|
||||||
}
|
}
|
||||||
final String getStringCacheStr = stringCachelocal.getOrDefault(selectUpdate, null);
|
final ConcurrentMap<Integer, String> getStringCacheMap = new MapMaker().concurrencyLevel(2).makeMap();
|
||||||
ConcurrentMap<Integer, SimilarityMatrix> matrixUpdateList = new MapMaker().concurrencyLevel(2).makeMap();
|
for (int i = 0; i < iteratorCap; i++) {
|
||||||
|
getStringCacheMap.put(i, stringCachelocal.get(selectUpdate));
|
||||||
|
selectUpdate++;
|
||||||
|
}
|
||||||
|
ConcurrentMap<Integer, SimilarityMatrix> matrixUpdateMap = new MapMaker().concurrencyLevel(2).makeMap();
|
||||||
ConcurrentMap<Integer, Future<SimilarityMatrix>> futures = new MapMaker().concurrencyLevel(2).makeMap();
|
ConcurrentMap<Integer, Future<SimilarityMatrix>> futures = new MapMaker().concurrencyLevel(2).makeMap();
|
||||||
stringCachelocal.values().forEach((str1) -> {
|
getStringCacheMap.values().forEach((getStringCacheStr) -> {
|
||||||
boolean present = false;
|
stringCachelocal.values().forEach((str1) -> {
|
||||||
LinkedHashMap<String, Double> orDefault = lHMSMX.getOrDefault(getStringCacheStr, null);
|
if (!getStringCacheStr.equals(str1)) {
|
||||||
if (orDefault != null) {
|
boolean present = false;
|
||||||
Double orDefault1 = orDefault.getOrDefault(str1, null);
|
LinkedHashMap<String, Double> orDefault = lHMSMX.getOrDefault(getStringCacheStr, null);
|
||||||
if (orDefault1 != null) {
|
if (orDefault != null) {
|
||||||
present = true;
|
Collection<String> strkeys = orDefault.keySet();
|
||||||
}
|
for (String strkey : strkeys) {
|
||||||
}
|
if (strkey.equals(str1)) {
|
||||||
if (!present) {
|
present = true;
|
||||||
orDefault = lHMSMX.getOrDefault(str1, null);
|
break;
|
||||||
if (orDefault != null) {
|
}
|
||||||
Double orDefault1 = orDefault.getOrDefault(getStringCacheStr, null);
|
}
|
||||||
if (orDefault1 != null) {
|
}
|
||||||
present = true;
|
if (!present) {
|
||||||
|
orDefault = lHMSMX.getOrDefault(str1, null);
|
||||||
|
if (orDefault != null) {
|
||||||
|
Collection<String> strkeys = orDefault.keySet();
|
||||||
|
for (String strkey : strkeys) {
|
||||||
|
if (strkey.equals(getStringCacheStr)) {
|
||||||
|
present = true;
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (!present) {
|
||||||
|
LinkedHashMap<String, Double> orDefault1 = lHMSMX.getOrDefault(getStringCacheStr, null);
|
||||||
|
if (orDefault1 == null) {
|
||||||
|
orDefault1 = new LinkedHashMap<String, Double>();
|
||||||
|
}
|
||||||
|
orDefault1.put(str1, 0.0);
|
||||||
|
lHMSMX.put(getStringCacheStr, orDefault1);
|
||||||
|
SimilarityMatrix SMX = new SimilarityMatrix(getStringCacheStr, str1);
|
||||||
|
Callable<SimilarityMatrix> worker = new SentimentAnalyzerTest(getStringCacheStr, str1, SMX, jmweAnnotationCache.get(getStringCacheStr),
|
||||||
|
jmweAnnotationCache.get(str1), pipelineAnnotationCache.get(getStringCacheStr), pipelineAnnotationCache.get(str1),
|
||||||
|
pipelineSentimentAnnotationCache.get(getStringCacheStr), pipelineSentimentAnnotationCache.get(str1));
|
||||||
|
futures.put(futures.size() + 1, executor.submit(worker));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
});
|
||||||
if (!present) {
|
System.out.println("finished worker assignment, futures size: " + futures.size() + "\n");
|
||||||
LinkedHashMap<String, Double> orDefault1 = lHMSMX.getOrDefault(getStringCacheStr, null);
|
futures.values().parallelStream().forEach((future) -> {
|
||||||
if (orDefault1 == null) {
|
SimilarityMatrix SMX = new SimilarityMatrix("", "");
|
||||||
orDefault1 = new LinkedHashMap<String, Double>();
|
try {
|
||||||
|
SMX = future.get(5, TimeUnit.SECONDS);
|
||||||
|
} catch (InterruptedException | ExecutionException | TimeoutException ex) {
|
||||||
|
Logger.getLogger(Datahandler.class.getName()).log(Level.SEVERE, null, ex);
|
||||||
|
SMX = null;
|
||||||
}
|
}
|
||||||
orDefault1.put(str1, 0.0);
|
if (SMX != null) {
|
||||||
lHMSMX.put(getStringCacheStr, orDefault1);
|
LinkedHashMap<String, Double> getFuture = lHMSMX.getOrDefault(SMX.getPrimaryString(), null);
|
||||||
SimilarityMatrix SMX = new SimilarityMatrix(getStringCacheStr, str1);
|
getFuture.put(SMX.getSecondaryString(), SMX.getDistance());
|
||||||
Callable<SimilarityMatrix> worker = new SentimentAnalyzerTest(getStringCacheStr, str1, SMX, jmweAnnotationCache.get(getStringCacheStr),
|
lHMSMX.put(SMX.getPrimaryString(), getFuture);
|
||||||
jmweAnnotationCache.get(str1), pipelineAnnotationCache.get(getStringCacheStr), pipelineAnnotationCache.get(str1),
|
matrixUpdateMap.put(matrixUpdateMap.size() + 1, SMX);
|
||||||
pipelineSentimentAnnotationCache.get(getStringCacheStr), pipelineSentimentAnnotationCache.get(str1));
|
}
|
||||||
futures.put(futures.size() + 1, executor.submit(worker));
|
});
|
||||||
}
|
|
||||||
});
|
|
||||||
System.out.println("finished worker assignment, futures size: " + futures.size() + "\n");
|
|
||||||
futures.values().parallelStream().forEach((future) -> {
|
|
||||||
SimilarityMatrix SMX = new SimilarityMatrix("", "");
|
|
||||||
try {
|
|
||||||
SMX = future.get(5, TimeUnit.SECONDS);
|
|
||||||
} catch (InterruptedException | ExecutionException | TimeoutException ex) {
|
|
||||||
Logger.getLogger(Datahandler.class.getName()).log(Level.SEVERE, null, ex);
|
|
||||||
SMX = null;
|
|
||||||
}
|
|
||||||
if (SMX != null) {
|
|
||||||
LinkedHashMap<String, Double> getFuture = lHMSMX.getOrDefault(SMX.getPrimaryString(), null);
|
|
||||||
getFuture.put(SMX.getSecondaryString(), SMX.getDistance());
|
|
||||||
lHMSMX.put(SMX.getPrimaryString(), getFuture);
|
|
||||||
matrixUpdateList.put(matrixUpdateList.size() + 1, SMX);
|
|
||||||
}
|
|
||||||
});
|
});
|
||||||
new Thread(() -> {
|
new Thread(() -> {
|
||||||
try {
|
try {
|
||||||
if (!matrixUpdateList.isEmpty()) {
|
if (!matrixUpdateMap.isEmpty()) {
|
||||||
DataMapper.insertSementicMatrixes(matrixUpdateList);
|
DataMapper.insertSementicMatrixes(matrixUpdateMap);
|
||||||
System.out.println("finished datamapper semetic insert");
|
System.out.println("finished datamapper semetic insert");
|
||||||
}
|
}
|
||||||
} catch (CustomError ex) {
|
} catch (CustomError ex) {
|
||||||
@ -359,7 +374,6 @@ public class Datahandler {
|
|||||||
}
|
}
|
||||||
}).start();
|
}).start();
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
public synchronized void checkIfUpdateStrings(boolean hlStatsMsg) throws CustomError {
|
public synchronized void checkIfUpdateStrings(boolean hlStatsMsg) throws CustomError {
|
||||||
@ -398,7 +412,6 @@ public class Datahandler {
|
|||||||
System.out.println("pre mostSimilarSTR \n");
|
System.out.println("pre mostSimilarSTR \n");
|
||||||
String mostSimilarSTR = mostSimilar(str, strArrs);
|
String mostSimilarSTR = mostSimilar(str, strArrs);
|
||||||
if (mostSimilarSTR != null) {
|
if (mostSimilarSTR != null) {
|
||||||
System.out.println("mostSimilarSTR; " + mostSimilarSTR + "\n");
|
|
||||||
LinkedHashMap<String, Double> orDefault = LHMSMXLocal.getOrDefault(mostSimilarSTR, null);
|
LinkedHashMap<String, Double> orDefault = LHMSMXLocal.getOrDefault(mostSimilarSTR, null);
|
||||||
if (orDefault != null) {
|
if (orDefault != null) {
|
||||||
for (Entry<String, Double> entrySet : orDefault.entrySet()) {
|
for (Entry<String, Double> entrySet : orDefault.entrySet()) {
|
||||||
@ -452,7 +465,6 @@ public class Datahandler {
|
|||||||
futureslocal.put(futureslocal.size() + 1, executor.submit(worker));
|
futureslocal.put(futureslocal.size() + 1, executor.submit(worker));
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
int index = 0;
|
|
||||||
futureslocal.values().parallelStream().forEach((future) -> {
|
futureslocal.values().parallelStream().forEach((future) -> {
|
||||||
SimilarityMatrix SMX = new SimilarityMatrix("", "");
|
SimilarityMatrix SMX = new SimilarityMatrix("", "");
|
||||||
try {
|
try {
|
||||||
@ -464,15 +476,10 @@ public class Datahandler {
|
|||||||
});
|
});
|
||||||
for (SimilarityMatrix SMX : futurereturn.values()) {
|
for (SimilarityMatrix SMX : futurereturn.values()) {
|
||||||
double distance = SMX.getDistance();
|
double distance = SMX.getDistance();
|
||||||
/*
|
|
||||||
System.out.println("index: " + index + "\nfutures size: " + futureslocal.values().size() + "\nScore: " + SMX.getDistance() + "\nSecondary: "
|
|
||||||
+ SMX.getSecondaryString() + "\nPrimary: " + SMX.getPrimaryString() + "\n");
|
|
||||||
*/
|
|
||||||
if (distance > Score) {
|
if (distance > Score) {
|
||||||
Score = distance;
|
Score = distance;
|
||||||
SMXreturn = SMX;
|
SMXreturn = SMX;
|
||||||
}
|
}
|
||||||
index++;
|
|
||||||
}
|
}
|
||||||
System.out.println("Reached end: secondary: " + SMXreturn.getSecondaryString() + "\nPrimarY: " + SMXreturn.getPrimaryString()
|
System.out.println("Reached end: secondary: " + SMXreturn.getSecondaryString() + "\nPrimarY: " + SMXreturn.getPrimaryString()
|
||||||
+ "\nScore: " + SMXreturn.getDistance());
|
+ "\nScore: " + SMXreturn.getDistance());
|
||||||
@ -480,30 +487,27 @@ public class Datahandler {
|
|||||||
}
|
}
|
||||||
|
|
||||||
public String mostSimilar(String toBeCompared, ConcurrentMap<Integer, String> concurrentStrings) {
|
public String mostSimilar(String toBeCompared, ConcurrentMap<Integer, String> concurrentStrings) {
|
||||||
int minDistance = 7;
|
similar = "";
|
||||||
String similar = "";
|
minDistance = 7.5;
|
||||||
List<Future<ConcurrentMap<String, Integer>>> futures = new ArrayList();
|
|
||||||
ConcurrentMap<String, Integer> futuresreturnvalues = new MapMaker().concurrencyLevel(2).makeMap();
|
|
||||||
concurrentStrings.values().parallelStream().forEach((str) -> {
|
concurrentStrings.values().parallelStream().forEach((str) -> {
|
||||||
Callable<ConcurrentMap<String, Integer>> worker = new LevenshteinDistance(toBeCompared, str);
|
LevenshteinDistance leven = new LevenshteinDistance(toBeCompared, str);
|
||||||
futures.add(executor.submit(worker));
|
double distance = leven.computeLevenshteinDistance();
|
||||||
});
|
if (distance < minDistance) {
|
||||||
futures.parallelStream().forEach((future) -> {
|
minDistance = distance;
|
||||||
try {
|
System.out.println("distance: " + distance + "\n");
|
||||||
ConcurrentMap<String, Integer> get = future.get();
|
similar = str;
|
||||||
get.entrySet().forEach((str) -> {
|
|
||||||
futuresreturnvalues.put(str.getKey(), str.getValue());
|
|
||||||
});
|
|
||||||
} catch (NullPointerException | InterruptedException | ExecutionException ex) {
|
|
||||||
System.out.println("failed future\nex: " + ex.getMessage() + "\n");
|
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
for (Entry<String, Integer> entritr : futuresreturnvalues.entrySet()) {
|
LinkedHashMap<String, Double> orDefault = lHMSMX.getOrDefault(similar, null);
|
||||||
int distance = entritr.getValue();
|
if (orDefault == null) {
|
||||||
if (distance < minDistance) {
|
return null;
|
||||||
System.out.println("distance: " + distance + "\n");
|
}
|
||||||
minDistance = distance;
|
Double maxDistance = 0.0;
|
||||||
similar = entritr.getKey();
|
for (Entry<String, Double> defaultEntry : orDefault.entrySet()) {
|
||||||
|
Double value = defaultEntry.getValue();
|
||||||
|
if (value > maxDistance) {
|
||||||
|
maxDistance = value;
|
||||||
|
similar = defaultEntry.getKey();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return similar;
|
return similar;
|
||||||
|
@ -15,11 +15,9 @@ import java.util.concurrent.ConcurrentMap;
|
|||||||
*
|
*
|
||||||
* @author install1
|
* @author install1
|
||||||
*/
|
*/
|
||||||
public class LevenshteinDistance implements Callable<ConcurrentMap<String, Integer>> {
|
public class LevenshteinDistance {
|
||||||
|
|
||||||
private CharSequence lhs;
|
private CharSequence lhs;
|
||||||
private CharSequence rhs;
|
private CharSequence rhs;
|
||||||
private ConcurrentMap<String, Integer> distanceEntry = new MapMaker().concurrencyLevel(2).makeMap();
|
|
||||||
|
|
||||||
private static int minimum(int a, int b, int c) {
|
private static int minimum(int a, int b, int c) {
|
||||||
return Math.min(Math.min(a, b), c);
|
return Math.min(Math.min(a, b), c);
|
||||||
@ -48,25 +46,4 @@ public class LevenshteinDistance implements Callable<ConcurrentMap<String, Integ
|
|||||||
}
|
}
|
||||||
return distance[lhs.length()][rhs.length()];
|
return distance[lhs.length()][rhs.length()];
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
|
||||||
public ConcurrentMap<String, Integer> call() {
|
|
||||||
int[][] distance = new int[lhs.length() + 1][rhs.length() + 1];
|
|
||||||
for (int i = 0; i <= lhs.length(); i++) {
|
|
||||||
distance[i][0] = i;
|
|
||||||
}
|
|
||||||
for (int j = 1; j <= rhs.length(); j++) {
|
|
||||||
distance[0][j] = j;
|
|
||||||
}
|
|
||||||
for (int i = 1; i <= lhs.length(); i++) {
|
|
||||||
for (int j = 1; j <= rhs.length(); j++) {
|
|
||||||
distance[i][j] = minimum(
|
|
||||||
distance[i - 1][j] + 1,
|
|
||||||
distance[i][j - 1] + 1,
|
|
||||||
distance[i - 1][j - 1] + ((lhs.charAt(i - 1) == rhs.charAt(j - 1)) ? 0 : 1));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
distanceEntry.put(lhs.toString(), distance[lhs.length()][rhs.length()]);
|
|
||||||
return distanceEntry;
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
@ -28,7 +28,6 @@ import java.io.File;
|
|||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.util.ArrayList;
|
import java.util.ArrayList;
|
||||||
import java.util.Collection;
|
import java.util.Collection;
|
||||||
import java.util.Date;
|
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
import java.util.Properties;
|
import java.util.Properties;
|
||||||
import java.util.concurrent.ConcurrentMap;
|
import java.util.concurrent.ConcurrentMap;
|
||||||
@ -67,8 +66,7 @@ public class PipelineJMWESingleton {
|
|||||||
}
|
}
|
||||||
IMWEDetector detector = getDetector(index, detectorName);
|
IMWEDetector detector = getDetector(index, detectorName);
|
||||||
ConcurrentMap<String, Annotation> returnAnnotations = new MapMaker().concurrencyLevel(2).makeMap();
|
ConcurrentMap<String, Annotation> returnAnnotations = new MapMaker().concurrencyLevel(2).makeMap();
|
||||||
Date startDate = new Date();
|
strvalues.forEach(str -> {
|
||||||
strvalues.parallelStream().forEach(str -> {
|
|
||||||
Annotation annoStr = new Annotation(str);
|
Annotation annoStr = new Annotation(str);
|
||||||
returnAnnotations.put(str, annoStr);
|
returnAnnotations.put(str, annoStr);
|
||||||
});
|
});
|
||||||
|
@ -38,6 +38,7 @@ import java.io.StringReader;
|
|||||||
import java.util.ArrayList;
|
import java.util.ArrayList;
|
||||||
import java.util.Collection;
|
import java.util.Collection;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
|
import java.util.OptionalDouble;
|
||||||
import java.util.Set;
|
import java.util.Set;
|
||||||
import java.util.concurrent.Callable;
|
import java.util.concurrent.Callable;
|
||||||
import java.util.concurrent.ConcurrentMap;
|
import java.util.concurrent.ConcurrentMap;
|
||||||
@ -95,344 +96,378 @@ public class SentimentAnalyzerTest implements Callable<SimilarityMatrix> {
|
|||||||
Double score = -100.0;
|
Double score = -100.0;
|
||||||
try {
|
try {
|
||||||
List<List<TaggedWord>> taggedwordlist1 = new ArrayList();
|
List<List<TaggedWord>> taggedwordlist1 = new ArrayList();
|
||||||
List<List<TaggedWord>> taggedwordlist2 = new ArrayList();
|
List<List<TaggedWord>> taggedwordlist2 = new ArrayList();
|
||||||
DocumentPreprocessor tokenizer = new DocumentPreprocessor(new StringReader(str1));
|
DocumentPreprocessor tokenizer = new DocumentPreprocessor(new StringReader(str1));
|
||||||
//noneDelete
|
//noneDelete
|
||||||
TokenizerFactory<CoreLabel> ptbTokenizerFactory
|
TokenizerFactory<CoreLabel> ptbTokenizerFactory
|
||||||
= PTBTokenizer.factory(new CoreLabelTokenFactory(), "untokenizable=firstDelete");
|
= PTBTokenizer.factory(new CoreLabelTokenFactory(), "untokenizable=firstDelete");
|
||||||
tokenizer.setTokenizerFactory(ptbTokenizerFactory);
|
tokenizer.setTokenizerFactory(ptbTokenizerFactory);
|
||||||
for (List<HasWord> sentence : tokenizer) {
|
for (List<HasWord> sentence : tokenizer) {
|
||||||
taggedwordlist1.add(model.apply(tagger.tagSentence(sentence)).taggedYield());
|
taggedwordlist1.add(model.apply(tagger.tagSentence(sentence)).taggedYield());
|
||||||
}
|
}
|
||||||
tokenizer = new DocumentPreprocessor(new StringReader(str));
|
tokenizer = new DocumentPreprocessor(new StringReader(str));
|
||||||
tokenizer.setTokenizerFactory(ptbTokenizerFactory);
|
tokenizer.setTokenizerFactory(ptbTokenizerFactory);
|
||||||
for (List<HasWord> sentence : tokenizer) {
|
for (List<HasWord> sentence : tokenizer) {
|
||||||
taggedwordlist2.add(model.apply(tagger.tagSentence(sentence)).taggedYield());
|
taggedwordlist2.add(model.apply(tagger.tagSentence(sentence)).taggedYield());
|
||||||
}
|
}
|
||||||
int counter = 0;
|
int counter = 0;
|
||||||
int counter1 = 0;
|
int counter1 = 0;
|
||||||
counter = taggedwordlist2.stream().map((taggedlist2) -> taggedlist2.size()).reduce(counter, Integer::sum);
|
counter = taggedwordlist2.stream().map((taggedlist2) -> taggedlist2.size()).reduce(counter, Integer::sum);
|
||||||
counter1 = taggedwordlist1.stream().map((taggedlist1) -> taggedlist1.size()).reduce(counter1, Integer::sum);
|
counter1 = taggedwordlist1.stream().map((taggedlist1) -> taggedlist1.size()).reduce(counter1, Integer::sum);
|
||||||
int overValue = counter >= counter1 ? counter - counter1 : counter1 - counter;
|
int overValue = counter >= counter1 ? counter - counter1 : counter1 - counter;
|
||||||
overValue *= 16;
|
overValue *= 32;
|
||||||
score -= overValue;
|
score -= overValue;
|
||||||
ConcurrentMap<Integer, String> tgwlistIndex = new MapMaker().concurrencyLevel(2).makeMap();
|
ConcurrentMap<Integer, String> tgwlistIndex = new MapMaker().concurrencyLevel(2).makeMap();
|
||||||
taggedwordlist1.forEach((TGWList) -> {
|
taggedwordlist1.forEach((TGWList) -> {
|
||||||
TGWList.forEach((TaggedWord) -> {
|
TGWList.forEach((TaggedWord) -> {
|
||||||
if (!tgwlistIndex.values().contains(TaggedWord.tag()) && !TaggedWord.tag().equals(":")) {
|
if (!tgwlistIndex.values().contains(TaggedWord.tag()) && !TaggedWord.tag().equals(":")) {
|
||||||
tgwlistIndex.put(tgwlistIndex.size() + 1, TaggedWord.tag());
|
tgwlistIndex.put(tgwlistIndex.size() + 1, TaggedWord.tag());
|
||||||
}
|
|
||||||
});
|
|
||||||
});
|
|
||||||
taggedwordlist1.clear();
|
|
||||||
AtomicInteger runCount = new AtomicInteger(0);
|
|
||||||
taggedwordlist2.forEach((TGWList) -> {
|
|
||||||
TGWList.forEach((TaggedWord) -> {
|
|
||||||
if (tgwlistIndex.values().contains(TaggedWord.tag())) {
|
|
||||||
tgwlistIndex.values().remove(TaggedWord.tag());
|
|
||||||
runCount.getAndIncrement();
|
|
||||||
}
|
|
||||||
});
|
|
||||||
});
|
|
||||||
tgwlistIndex.clear();
|
|
||||||
taggedwordlist2.clear();
|
|
||||||
score += runCount.get() * 64;
|
|
||||||
ConcurrentMap<Integer, Tree> sentenceConstituencyParseList = new MapMaker().concurrencyLevel(2).makeMap();
|
|
||||||
try {
|
|
||||||
for (CoreMap sentence : pipelineAnnotation1.get(CoreAnnotations.SentencesAnnotation.class)) {
|
|
||||||
Tree sentenceConstituencyParse = sentence.get(TreeCoreAnnotations.TreeAnnotation.class);
|
|
||||||
sentenceConstituencyParseList.put(sentenceConstituencyParseList.size(), sentenceConstituencyParse);
|
|
||||||
}
|
}
|
||||||
for (CoreMap sentence : pipelineAnnotation2.get(CoreAnnotations.SentencesAnnotation.class)) {
|
});
|
||||||
Tree sentenceConstituencyParse = sentence.get(TreeCoreAnnotations.TreeAnnotation.class);
|
});
|
||||||
GrammaticalStructure gs = gsf.newGrammaticalStructure(sentenceConstituencyParse);
|
AtomicInteger runCount = new AtomicInteger(0);
|
||||||
Collection<TypedDependency> allTypedDependencies = gs.allTypedDependencies();
|
taggedwordlist2.forEach((TGWList) -> {
|
||||||
ConcurrentMap<Integer, String> filerTreeContent = new MapMaker().concurrencyLevel(2).makeMap();
|
TGWList.forEach((TaggedWord) -> {
|
||||||
for (Tree sentenceConstituencyParse1 : sentenceConstituencyParseList.values()) {
|
if (tgwlistIndex.values().contains(TaggedWord.tag())) {
|
||||||
Set<Constituent> inT1notT2 = Tdiff.markDiff(sentenceConstituencyParse, sentenceConstituencyParse1);
|
tgwlistIndex.values().remove(TaggedWord.tag());
|
||||||
Set<Constituent> inT2notT1 = Tdiff.markDiff(sentenceConstituencyParse1, sentenceConstituencyParse);
|
runCount.getAndIncrement();
|
||||||
ConcurrentMap<Integer, String> constiLabels = new MapMaker().concurrencyLevel(2).makeMap();
|
|
||||||
for (Constituent consti : inT1notT2) {
|
|
||||||
for (Constituent consti1 : inT2notT1) {
|
|
||||||
if (consti.value().equals(consti1.value()) && !constiLabels.values().contains(consti.value())) {
|
|
||||||
score += 64;
|
|
||||||
constiLabels.put(constiLabels.size(), consti.value());
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
GrammaticalStructure gs1 = gsf.newGrammaticalStructure(sentenceConstituencyParse1);
|
|
||||||
Collection<TypedDependency> allTypedDependencies1 = gs1.allTypedDependencies();
|
|
||||||
for (TypedDependency TDY1 : allTypedDependencies1) {
|
|
||||||
IndexedWord dep = TDY1.dep();
|
|
||||||
IndexedWord gov = TDY1.gov();
|
|
||||||
GrammaticalRelation grammaticalRelation = gs.getGrammaticalRelation(gov, dep);
|
|
||||||
if (grammaticalRelation.isApplicable(sentenceConstituencyParse)) {
|
|
||||||
score += 900;
|
|
||||||
}
|
|
||||||
GrammaticalRelation reln = TDY1.reln();
|
|
||||||
if (reln.isApplicable(sentenceConstituencyParse)) {
|
|
||||||
score += 256;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
for (TypedDependency TDY : allTypedDependencies) {
|
|
||||||
IndexedWord dep = TDY.dep();
|
|
||||||
IndexedWord gov = TDY.gov();
|
|
||||||
GrammaticalRelation grammaticalRelation = gs1.getGrammaticalRelation(gov, dep);
|
|
||||||
if (grammaticalRelation.isApplicable(sentenceConstituencyParse)) {
|
|
||||||
score += 900;
|
|
||||||
}
|
|
||||||
GrammaticalRelation reln = TDY.reln();
|
|
||||||
if (reln.isApplicable(sentenceConstituencyParse1)) {
|
|
||||||
score += 256;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
AtomicInteger runCount1 = new AtomicInteger(0);
|
|
||||||
sentenceConstituencyParse.taggedLabeledYield().forEach((LBW) -> {
|
|
||||||
sentenceConstituencyParse1.taggedLabeledYield().stream().filter((LBW1) -> (LBW.lemma().equals(LBW1.lemma())
|
|
||||||
&& !filerTreeContent.values().contains(LBW.lemma()))).map((_item) -> {
|
|
||||||
filerTreeContent.put(filerTreeContent.size() + 1, LBW.lemma());
|
|
||||||
return _item;
|
|
||||||
}).forEachOrdered((_item) -> {
|
|
||||||
runCount1.getAndIncrement();
|
|
||||||
});
|
|
||||||
});
|
|
||||||
score += runCount1.get() * 1500;
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
} catch (Exception ex) {
|
});
|
||||||
System.out.println("pipelineAnnotation stacktrace: " + ex.getLocalizedMessage() + "\n");
|
});
|
||||||
|
score += runCount.get() * 64;
|
||||||
|
ConcurrentMap<Integer, Tree> sentenceConstituencyParseList = new MapMaker().concurrencyLevel(2).makeMap();
|
||||||
|
try {
|
||||||
|
for (CoreMap sentence : pipelineAnnotation1.get(CoreAnnotations.SentencesAnnotation.class)) {
|
||||||
|
Tree sentenceConstituencyParse = sentence.get(TreeCoreAnnotations.TreeAnnotation.class);
|
||||||
|
sentenceConstituencyParseList.put(sentenceConstituencyParseList.size(), sentenceConstituencyParse);
|
||||||
}
|
}
|
||||||
sentenceConstituencyParseList.clear();
|
for (CoreMap sentence : pipelineAnnotation2.get(CoreAnnotations.SentencesAnnotation.class)) {
|
||||||
ConcurrentMap<Integer, SimpleMatrix> simpleSMXlist = new MapMaker().concurrencyLevel(2).makeMap();
|
int constiRelationsize = 0;
|
||||||
ConcurrentMap<Integer, SimpleMatrix> simpleSMXlistVector = new MapMaker().concurrencyLevel(2).makeMap();
|
Tree sentenceConstituencyParse = sentence.get(TreeCoreAnnotations.TreeAnnotation.class);
|
||||||
ConcurrentMap<Integer, Integer> sentiment1 = new MapMaker().concurrencyLevel(2).makeMap();
|
GrammaticalStructure gs = gsf.newGrammaticalStructure(sentenceConstituencyParse);
|
||||||
ConcurrentMap<Integer, Integer> sentiment2 = new MapMaker().concurrencyLevel(2).makeMap();
|
Collection<TypedDependency> allTypedDependencies = gs.allTypedDependencies();
|
||||||
for (CoreMap sentence : pipelineAnnotation1Sentiment.get(CoreAnnotations.SentencesAnnotation.class)) {
|
ConcurrentMap<Integer, String> filerTreeContent = new MapMaker().concurrencyLevel(2).makeMap();
|
||||||
Tree tree = sentence.get(SentimentCoreAnnotations.SentimentAnnotatedTree.class);
|
for (Tree sentenceConstituencyParse1 : sentenceConstituencyParseList.values()) {
|
||||||
sentiment1.put(sentiment1.size(), RNNCoreAnnotations.getPredictedClass(tree));
|
Set<Constituent> constinuent1 = Tdiff.markDiff(sentenceConstituencyParse, sentenceConstituencyParse1);
|
||||||
SimpleMatrix predictions = RNNCoreAnnotations.getPredictions(tree);
|
Set<Constituent> constinuent2 = Tdiff.markDiff(sentenceConstituencyParse1, sentenceConstituencyParse);
|
||||||
SimpleMatrix nodeVector = RNNCoreAnnotations.getNodeVector(tree);
|
ConcurrentMap<Integer, String> constiLabels = new MapMaker().concurrencyLevel(2).makeMap();
|
||||||
simpleSMXlist.put(simpleSMXlist.size(), predictions);
|
for (Constituent consti : constinuent1) {
|
||||||
simpleSMXlistVector.put(simpleSMXlistVector.size() + 1, nodeVector);
|
for (Constituent consti1 : constinuent2) {
|
||||||
}
|
if (consti.value().equals(consti1.value()) && !constiLabels.values().contains(consti.value())) {
|
||||||
for (CoreMap sentence : pipelineAnnotation2Sentiment.get(CoreAnnotations.SentencesAnnotation.class)) {
|
constiLabels.put(constiLabels.size(), consti.value());
|
||||||
Tree tree = sentence.get(SentimentCoreAnnotations.SentimentAnnotatedTree.class);
|
constiRelationsize++;
|
||||||
sentiment2.put(sentiment2.size() + 1, RNNCoreAnnotations.getPredictedClass(tree));
|
}
|
||||||
SimpleMatrix predictions = RNNCoreAnnotations.getPredictions(tree);
|
}
|
||||||
SimpleMatrix nodeVector = RNNCoreAnnotations.getNodeVector(tree);
|
|
||||||
score = simpleSMXlist.values().stream().map((simpleSMX) -> predictions.dot(simpleSMX) * 100).map((dot) -> dot > 50 ? dot - 50 : 50 - dot).map((subtracter) -> {
|
|
||||||
subtracter *= 25;
|
|
||||||
return subtracter;
|
|
||||||
}).map((subtracter) -> subtracter).reduce(score, (accumulator, _item) -> accumulator - _item);
|
|
||||||
for (SimpleMatrix simpleSMX : simpleSMXlistVector.values()) {
|
|
||||||
double dot = nodeVector.dot(simpleSMX);
|
|
||||||
double elementSum = nodeVector.kron(simpleSMX).elementSum();
|
|
||||||
elementSum = Math.round(elementSum * 100.0) / 100.0;
|
|
||||||
if (dot < 0.1) {
|
|
||||||
score += 256;
|
|
||||||
}
|
}
|
||||||
if (elementSum < 0.1 && elementSum > 0.0) {
|
int constituents1 = constinuent1.size() - constiRelationsize;
|
||||||
score += 1300;
|
int constituents2 = constinuent2.size() - constiRelationsize;
|
||||||
} else if (elementSum > 0.1 && elementSum < 1.0) {
|
if (constituents1 > 0 && constituents2 > 0) {
|
||||||
score -= 1100;
|
score -= (constituents1 + constituents2) * 200;
|
||||||
} else {
|
} else {
|
||||||
score -= 1424;
|
score += constiRelationsize * 200;
|
||||||
}
|
}
|
||||||
|
GrammaticalStructure gs1 = gsf.newGrammaticalStructure(sentenceConstituencyParse1);
|
||||||
|
Collection<TypedDependency> allTypedDependencies1 = gs1.allTypedDependencies();
|
||||||
|
for (TypedDependency TDY1 : allTypedDependencies1) {
|
||||||
|
IndexedWord dep = TDY1.dep();
|
||||||
|
IndexedWord gov = TDY1.gov();
|
||||||
|
GrammaticalRelation grammaticalRelation = gs.getGrammaticalRelation(gov, dep);
|
||||||
|
if (grammaticalRelation.isApplicable(sentenceConstituencyParse)) {
|
||||||
|
score += 1900;
|
||||||
|
}
|
||||||
|
GrammaticalRelation reln = TDY1.reln();
|
||||||
|
if (reln.isApplicable(sentenceConstituencyParse)) {
|
||||||
|
score += 525;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
for (TypedDependency TDY : allTypedDependencies) {
|
||||||
|
IndexedWord dep = TDY.dep();
|
||||||
|
IndexedWord gov = TDY.gov();
|
||||||
|
GrammaticalRelation grammaticalRelation = gs1.getGrammaticalRelation(gov, dep);
|
||||||
|
if (grammaticalRelation.isApplicable(sentenceConstituencyParse)) {
|
||||||
|
score += 900;
|
||||||
|
}
|
||||||
|
GrammaticalRelation reln = TDY.reln();
|
||||||
|
if (reln.isApplicable(sentenceConstituencyParse1)) {
|
||||||
|
score += 525;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
AtomicInteger runCount1 = new AtomicInteger(0);
|
||||||
|
sentenceConstituencyParse.taggedLabeledYield().forEach((LBW) -> {
|
||||||
|
sentenceConstituencyParse1.taggedLabeledYield().stream().filter((LBW1) -> (LBW.lemma().equals(LBW1.lemma())
|
||||||
|
&& !filerTreeContent.values().contains(LBW.lemma()))).map((_item) -> {
|
||||||
|
filerTreeContent.put(filerTreeContent.size() + 1, LBW.lemma());
|
||||||
|
return _item;
|
||||||
|
}).forEachOrdered((_item) -> {
|
||||||
|
runCount1.getAndIncrement();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
score += runCount1.get() * 1500;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
score -= (sentiment1.size() > sentiment2.size() ? sentiment1.size() - sentiment2.size() : sentiment2.size() - sentiment1.size()) * 500;
|
} catch (Exception ex) {
|
||||||
DocumentReaderAndWriter<CoreLabel> readerAndWriter = classifier.makePlainTextReaderAndWriter();
|
System.out.println("pipelineAnnotation stacktrace: " + ex.getLocalizedMessage() + "\n");
|
||||||
List classifyRaw1 = classifier.classifyRaw(str, readerAndWriter);
|
}
|
||||||
List classifyRaw2 = classifier.classifyRaw(str1, readerAndWriter);
|
sentenceConstituencyParseList.clear();
|
||||||
score -= (classifyRaw1.size() > classifyRaw2.size() ? classifyRaw1.size() - classifyRaw2.size() : classifyRaw2.size() - classifyRaw1.size()) * 200;
|
ConcurrentMap<Integer, SimpleMatrix> simpleSMXlist = new MapMaker().concurrencyLevel(2).makeMap();
|
||||||
int mainSentiment1 = 0;
|
ConcurrentMap<Integer, SimpleMatrix> simpleSMXlistVector = new MapMaker().concurrencyLevel(2).makeMap();
|
||||||
int longest1 = 0;
|
ConcurrentMap<Integer, Integer> sentiment1 = new MapMaker().concurrencyLevel(2).makeMap();
|
||||||
int mainSentiment2 = 0;
|
ConcurrentMap<Integer, Integer> sentiment2 = new MapMaker().concurrencyLevel(2).makeMap();
|
||||||
int longest2 = 0;
|
for (CoreMap sentence : pipelineAnnotation1Sentiment.get(CoreAnnotations.SentencesAnnotation.class)) {
|
||||||
for (CoreMap sentence : pipelineAnnotation1Sentiment.get(CoreAnnotations.SentencesAnnotation.class)) {
|
Tree tree = sentence.get(SentimentCoreAnnotations.SentimentAnnotatedTree.class);
|
||||||
Tree tree = sentence.get(SentimentCoreAnnotations.SentimentAnnotatedTree.class);
|
sentiment1.put(sentiment1.size(), RNNCoreAnnotations.getPredictedClass(tree));
|
||||||
int sentiment = RNNCoreAnnotations.getPredictedClass(tree);
|
SimpleMatrix predictions = RNNCoreAnnotations.getPredictions(tree);
|
||||||
String partText = sentence.toString();
|
SimpleMatrix nodeVector = RNNCoreAnnotations.getNodeVector(tree);
|
||||||
SimpleMatrix predictions = RNNCoreAnnotations.getPredictions(tree);
|
simpleSMXlist.put(simpleSMXlist.size(), predictions);
|
||||||
if (partText.length() > longest1) {
|
simpleSMXlistVector.put(simpleSMXlistVector.size() + 1, nodeVector);
|
||||||
mainSentiment1 = sentiment;
|
}
|
||||||
longest1 = partText.length();
|
ConcurrentMap<Integer, Double> elementSumCounter = new MapMaker().concurrencyLevel(2).makeMap();
|
||||||
|
ConcurrentMap<Integer, Double> dotMap = new MapMaker().concurrencyLevel(2).makeMap();
|
||||||
|
for (CoreMap sentence : pipelineAnnotation2Sentiment.get(CoreAnnotations.SentencesAnnotation.class)) {
|
||||||
|
Tree tree = sentence.get(SentimentCoreAnnotations.SentimentAnnotatedTree.class);
|
||||||
|
sentiment2.put(sentiment2.size() + 1, RNNCoreAnnotations.getPredictedClass(tree));
|
||||||
|
SimpleMatrix predictions = RNNCoreAnnotations.getPredictions(tree);
|
||||||
|
SimpleMatrix nodeVector = RNNCoreAnnotations.getNodeVector(tree);
|
||||||
|
score += simpleSMXlist.values().stream().map((simpleSMX) -> predictions.dot(simpleSMX) * 100).map((dot) -> dot > 50 ? dot - 50 : dot > 0 ? 50 - dot : 50).map((subtracter) -> {
|
||||||
|
subtracter *= 25; //25
|
||||||
|
return subtracter;
|
||||||
|
}).map((subtracter) -> subtracter).reduce(score, (accumulator, _item) -> accumulator + _item);
|
||||||
|
for (SimpleMatrix simpleSMX : simpleSMXlistVector.values()) {
|
||||||
|
double dot = nodeVector.dot(simpleSMX);
|
||||||
|
double elementSum = nodeVector.kron(simpleSMX).elementSum();
|
||||||
|
elementSum = Math.round(elementSum * 100.0) / 100.0;
|
||||||
|
elementSumCounter.put(elementSumCounter.size() + 1, elementSum);
|
||||||
|
dotMap.put(dotMap.size() + 1, dot);
|
||||||
|
if (dot < 0.1) {
|
||||||
|
score += 256;
|
||||||
}
|
}
|
||||||
}
|
if (dot > 0.50) {
|
||||||
for (CoreMap sentence : pipelineAnnotation2Sentiment.get(CoreAnnotations.SentencesAnnotation.class)) {
|
score -= 2400;
|
||||||
Tree tree = sentence.get(SentimentCoreAnnotations.SentimentAnnotatedTree.class);
|
|
||||||
int sentiment = RNNCoreAnnotations.getPredictedClass(tree);
|
|
||||||
SimpleMatrix predictions = RNNCoreAnnotations.getPredictions(tree);
|
|
||||||
String partText = sentence.toString();
|
|
||||||
if (partText.length() > longest2) {
|
|
||||||
mainSentiment2 = sentiment;
|
|
||||||
longest2 = partText.length();
|
|
||||||
}
|
}
|
||||||
}
|
if (elementSum < 0.01 && elementSum > 0.00) {
|
||||||
if (longest1 != longest2) {
|
score += 1300;
|
||||||
long deffLongest = longest1 > longest2 ? longest1 : longest2;
|
} else if (elementSum > 0.1 && elementSum < 1.0) {
|
||||||
long deffshorter = longest1 < longest2 ? longest1 : longest2;
|
score += 1100;
|
||||||
if (deffLongest >= (deffshorter * 2) - 1 && deffLongest - deffshorter <= 45) {
|
|
||||||
score += (deffLongest - deffshorter) * 200;
|
|
||||||
} else if (mainSentiment1 != mainSentiment2 && deffLongest - deffshorter > 20 && deffLongest - deffshorter < 45) {
|
|
||||||
score += (deffLongest - deffshorter) * 200;
|
|
||||||
} else {
|
} else {
|
||||||
score -= (deffLongest - deffshorter) * 50;
|
score -= elementSum * 1424;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
int tokensCounter1 = 0;
|
}
|
||||||
int tokensCounter2 = 0;
|
if (dotMap.values().size() > 1) {
|
||||||
int anotatorcounter1 = 0;
|
OptionalDouble minvalueDots = dotMap.values().stream().mapToDouble(Double::doubleValue).min();
|
||||||
int anotatorcounter2 = 0;
|
OptionalDouble maxvalueDots = dotMap.values().stream().mapToDouble(Double::doubleValue).max();
|
||||||
int inflectedCounterPositive1 = 0;
|
if (maxvalueDots.getAsDouble() - minvalueDots.getAsDouble() < 0.05) {
|
||||||
int inflectedCounterPositive2 = 0;
|
score += 3500;
|
||||||
int inflectedCounterNegative = 0;
|
|
||||||
int MarkedContinuousCounter1 = 0;
|
|
||||||
int MarkedContinuousCounter2 = 0;
|
|
||||||
int UnmarkedPatternCounter = 0;
|
|
||||||
ConcurrentMap<Integer, String> ITokenMapTag1 = new MapMaker().concurrencyLevel(2).makeMap();
|
|
||||||
ConcurrentMap<Integer, String> ITokenMapTag2 = new MapMaker().concurrencyLevel(2).makeMap();
|
|
||||||
ConcurrentMap<Integer, String> strTokenStems1 = new MapMaker().concurrencyLevel(2).makeMap();
|
|
||||||
ConcurrentMap<Integer, String> strTokenStems2 = new MapMaker().concurrencyLevel(2).makeMap();
|
|
||||||
ConcurrentMap<Integer, String> strTokenForm1 = new MapMaker().concurrencyLevel(2).makeMap();
|
|
||||||
ConcurrentMap<Integer, String> strTokenForm2 = new MapMaker().concurrencyLevel(2).makeMap();
|
|
||||||
ConcurrentMap<Integer, String> strTokenGetEntry1 = new MapMaker().concurrencyLevel(2).makeMap();
|
|
||||||
ConcurrentMap<Integer, String> strTokenGetEntry2 = new MapMaker().concurrencyLevel(2).makeMap();
|
|
||||||
ConcurrentMap<Integer, String> strTokenGetiPart1 = new MapMaker().concurrencyLevel(2).makeMap();
|
|
||||||
ConcurrentMap<Integer, String> strTokenGetiPart2 = new MapMaker().concurrencyLevel(2).makeMap();
|
|
||||||
ConcurrentMap<Integer, String> strTokenEntryPOS1 = new MapMaker().concurrencyLevel(2).makeMap();
|
|
||||||
ConcurrentMap<Integer, String> strTokenEntryPOS2 = new MapMaker().concurrencyLevel(2).makeMap();
|
|
||||||
try {
|
|
||||||
List<CoreMap> sentences = jmweStrAnnotation1.get(CoreAnnotations.SentencesAnnotation.class);
|
|
||||||
for (CoreMap sentence : sentences) {
|
|
||||||
for (IMWE<IToken> token : sentence.get(JMWEAnnotation.class)) {
|
|
||||||
if (token.isInflected()) {
|
|
||||||
inflectedCounterPositive1++;
|
|
||||||
} else {
|
|
||||||
inflectedCounterNegative++;
|
|
||||||
}
|
|
||||||
strTokenForm1.put(strTokenForm1.size() + 1, token.getForm());
|
|
||||||
strTokenGetEntry1.put(strTokenGetEntry1.size() + 1, token.getEntry().toString().substring(token.getEntry().toString().length() - 1));
|
|
||||||
Collection<IMWEDesc.IPart> values = token.getPartMap().values();
|
|
||||||
IMWEDesc entry = token.getEntry();
|
|
||||||
MarkedContinuousCounter1 += entry.getMarkedContinuous();
|
|
||||||
UnmarkedPatternCounter += entry.getUnmarkedPattern();
|
|
||||||
for (IMWEDesc.IPart iPart : values) {
|
|
||||||
strTokenGetiPart1.put(strTokenGetiPart1.size() + 1, iPart.getForm());
|
|
||||||
}
|
|
||||||
for (String strPostPrefix : entry.getPOS().getPrefixes()) {
|
|
||||||
strTokenEntryPOS1.put(strTokenEntryPOS1.size() + 1, strPostPrefix);
|
|
||||||
}
|
|
||||||
for (IToken tokens : token.getTokens()) {
|
|
||||||
ITokenMapTag1.put(ITokenMapTag1.size() + 1, tokens.getTag());
|
|
||||||
for (String strtoken : tokens.getStems()) {
|
|
||||||
strTokenStems1.put(strTokenStems1.size() + 1, strtoken);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
tokensCounter1++;
|
|
||||||
}
|
|
||||||
anotatorcounter1++;
|
|
||||||
}
|
|
||||||
sentences = jmweStrAnnotation2.get(CoreAnnotations.SentencesAnnotation.class);
|
|
||||||
for (CoreMap sentence : sentences) {
|
|
||||||
for (IMWE<IToken> token : sentence.get(JMWEAnnotation.class)) {
|
|
||||||
if (token.isInflected()) {
|
|
||||||
inflectedCounterPositive2++;
|
|
||||||
} else {
|
|
||||||
inflectedCounterNegative--;
|
|
||||||
}
|
|
||||||
strTokenForm2.put(strTokenForm2.size() + 1, token.getForm());
|
|
||||||
strTokenGetEntry2.put(strTokenGetEntry2.size() + 1, token.getEntry().toString().substring(token.getEntry().toString().length() - 1));
|
|
||||||
Collection<IMWEDesc.IPart> values = token.getPartMap().values();
|
|
||||||
IMWEDesc entry = token.getEntry();
|
|
||||||
MarkedContinuousCounter2 += entry.getMarkedContinuous();
|
|
||||||
UnmarkedPatternCounter += entry.getUnmarkedPattern();
|
|
||||||
for (IMWEDesc.IPart iPart : values) {
|
|
||||||
strTokenGetiPart2.put(strTokenGetiPart2.size() + 1, iPart.getForm());
|
|
||||||
}
|
|
||||||
for (String strPostPrefix : entry.getPOS().getPrefixes()) {
|
|
||||||
strTokenEntryPOS2.put(strTokenEntryPOS2.size() + 1, strPostPrefix);
|
|
||||||
}
|
|
||||||
for (IToken tokens : token.getTokens()) {
|
|
||||||
ITokenMapTag2.put(ITokenMapTag2.size() + 1, tokens.getTag());
|
|
||||||
for (String strtoken : tokens.getStems()) {
|
|
||||||
strTokenStems2.put(strTokenStems2.size() + 1, strtoken);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
tokensCounter2++;
|
|
||||||
}
|
|
||||||
anotatorcounter2++;
|
|
||||||
}
|
|
||||||
} catch (Exception ex) {
|
|
||||||
System.out.println("SENTIMENT stacktrace: " + ex.getMessage() + "\n");
|
|
||||||
}
|
}
|
||||||
for (String strTokenPos1 : strTokenEntryPOS1.values()) {
|
}
|
||||||
for (String strTokenPos2 : strTokenEntryPOS2.values()) {
|
if (elementSumCounter.values().size() > 1){
|
||||||
if (strTokenPos1.equals(strTokenPos2)) {
|
OptionalDouble minvalueElements = elementSumCounter.values().stream().mapToDouble(Double::doubleValue).min();
|
||||||
score += 500;
|
OptionalDouble maxvalueElements = elementSumCounter.values().stream().mapToDouble(Double::doubleValue).max();
|
||||||
|
if (maxvalueElements.getAsDouble() - minvalueElements.getAsDouble() < 0.05) {
|
||||||
|
score += 3500;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
score -= (sentiment1.size() > sentiment2.size() ? sentiment1.size() - sentiment2.size() : sentiment2.size() - sentiment1.size()) * 500;
|
||||||
|
DocumentReaderAndWriter<CoreLabel> readerAndWriter = classifier.makePlainTextReaderAndWriter();
|
||||||
|
List classifyRaw1 = classifier.classifyRaw(str, readerAndWriter);
|
||||||
|
List classifyRaw2 = classifier.classifyRaw(str1, readerAndWriter);
|
||||||
|
score -= (classifyRaw1.size() > classifyRaw2.size() ? classifyRaw1.size() - classifyRaw2.size() : classifyRaw2.size() - classifyRaw1.size()) * 200;
|
||||||
|
int mainSentiment1 = 0;
|
||||||
|
int longest1 = 0;
|
||||||
|
int mainSentiment2 = 0;
|
||||||
|
int longest2 = 0;
|
||||||
|
for (CoreMap sentence : pipelineAnnotation1Sentiment.get(CoreAnnotations.SentencesAnnotation.class)) {
|
||||||
|
Tree tree = sentence.get(SentimentCoreAnnotations.SentimentAnnotatedTree.class);
|
||||||
|
int sentiment = RNNCoreAnnotations.getPredictedClass(tree);
|
||||||
|
String partText = sentence.toString();
|
||||||
|
SimpleMatrix predictions = RNNCoreAnnotations.getPredictions(tree);
|
||||||
|
if (partText.length() > longest1) {
|
||||||
|
mainSentiment1 = sentiment;
|
||||||
|
longest1 = partText.length();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
for (CoreMap sentence : pipelineAnnotation2Sentiment.get(CoreAnnotations.SentencesAnnotation.class)) {
|
||||||
|
Tree tree = sentence.get(SentimentCoreAnnotations.SentimentAnnotatedTree.class);
|
||||||
|
int sentiment = RNNCoreAnnotations.getPredictedClass(tree);
|
||||||
|
SimpleMatrix predictions = RNNCoreAnnotations.getPredictions(tree);
|
||||||
|
String partText = sentence.toString();
|
||||||
|
if (partText.length() > longest2) {
|
||||||
|
mainSentiment2 = sentiment;
|
||||||
|
longest2 = partText.length();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (longest1 != longest2) {
|
||||||
|
long deffLongest = longest1 > longest2 ? longest1 : longest2;
|
||||||
|
long deffshorter = longest1 < longest2 ? longest1 : longest2;
|
||||||
|
//deffLongest >= (deffshorter * 2)
|
||||||
|
if (deffLongest < (deffshorter * 2) - 1 && deffLongest - deffshorter <= 45) {
|
||||||
|
score += (deffLongest - deffshorter) * 120;
|
||||||
|
} else if (mainSentiment1 != mainSentiment2 && deffLongest - deffshorter > 20 && deffLongest - deffshorter < 45) {
|
||||||
|
score += (deffLongest - deffshorter) * 120;
|
||||||
|
} else if (deffLongest - deffshorter < 2) {
|
||||||
|
score += (deffLongest + deffshorter) * 40;
|
||||||
|
} else if (deffLongest - deffshorter <= 5){
|
||||||
|
score += 2500;
|
||||||
|
} else{
|
||||||
|
score -= (deffLongest - deffshorter) * 50;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
int tokensCounter1 = 0;
|
||||||
|
int tokensCounter2 = 0;
|
||||||
|
int anotatorcounter1 = 0;
|
||||||
|
int anotatorcounter2 = 0;
|
||||||
|
int inflectedCounterPositive1 = 0;
|
||||||
|
int inflectedCounterPositive2 = 0;
|
||||||
|
int inflectedCounterNegative = 0;
|
||||||
|
int MarkedContinuousCounter1 = 0;
|
||||||
|
int MarkedContinuousCounter2 = 0;
|
||||||
|
int UnmarkedPatternCounter = 0;
|
||||||
|
ConcurrentMap<Integer, String> ITokenMapTag1 = new MapMaker().concurrencyLevel(2).makeMap();
|
||||||
|
ConcurrentMap<Integer, String> ITokenMapTag2 = new MapMaker().concurrencyLevel(2).makeMap();
|
||||||
|
ConcurrentMap<Integer, String> strTokenStems1 = new MapMaker().concurrencyLevel(2).makeMap();
|
||||||
|
ConcurrentMap<Integer, String> strTokenStems2 = new MapMaker().concurrencyLevel(2).makeMap();
|
||||||
|
ConcurrentMap<Integer, String> strTokenForm1 = new MapMaker().concurrencyLevel(2).makeMap();
|
||||||
|
ConcurrentMap<Integer, String> strTokenForm2 = new MapMaker().concurrencyLevel(2).makeMap();
|
||||||
|
ConcurrentMap<Integer, String> strTokenGetEntry1 = new MapMaker().concurrencyLevel(2).makeMap();
|
||||||
|
ConcurrentMap<Integer, String> strTokenGetEntry2 = new MapMaker().concurrencyLevel(2).makeMap();
|
||||||
|
ConcurrentMap<Integer, String> strTokenGetiPart1 = new MapMaker().concurrencyLevel(2).makeMap();
|
||||||
|
ConcurrentMap<Integer, String> strTokenGetiPart2 = new MapMaker().concurrencyLevel(2).makeMap();
|
||||||
|
ConcurrentMap<Integer, String> strTokenEntryPOS1 = new MapMaker().concurrencyLevel(2).makeMap();
|
||||||
|
ConcurrentMap<Integer, String> strTokenEntryPOS2 = new MapMaker().concurrencyLevel(2).makeMap();
|
||||||
|
try {
|
||||||
|
List<CoreMap> sentences = jmweStrAnnotation1.get(CoreAnnotations.SentencesAnnotation.class);
|
||||||
|
for (CoreMap sentence : sentences) {
|
||||||
|
for (IMWE<IToken> token : sentence.get(JMWEAnnotation.class)) {
|
||||||
|
if (token.isInflected()) {
|
||||||
|
inflectedCounterPositive1++;
|
||||||
|
} else {
|
||||||
|
inflectedCounterNegative++;
|
||||||
}
|
}
|
||||||
|
strTokenForm1.put(strTokenForm1.size() + 1, token.getForm());
|
||||||
|
strTokenGetEntry1.put(strTokenGetEntry1.size() + 1, token.getEntry().toString().substring(token.getEntry().toString().length() - 1));
|
||||||
|
Collection<IMWEDesc.IPart> values = token.getPartMap().values();
|
||||||
|
IMWEDesc entry = token.getEntry();
|
||||||
|
MarkedContinuousCounter1 += entry.getMarkedContinuous();
|
||||||
|
UnmarkedPatternCounter += entry.getUnmarkedPattern();
|
||||||
|
for (IMWEDesc.IPart iPart : values) {
|
||||||
|
strTokenGetiPart1.put(strTokenGetiPart1.size() + 1, iPart.getForm());
|
||||||
|
}
|
||||||
|
for (String strPostPrefix : entry.getPOS().getPrefixes()) {
|
||||||
|
strTokenEntryPOS1.put(strTokenEntryPOS1.size() + 1, strPostPrefix);
|
||||||
|
}
|
||||||
|
for (IToken tokens : token.getTokens()) {
|
||||||
|
ITokenMapTag1.put(ITokenMapTag1.size() + 1, tokens.getTag());
|
||||||
|
for (String strtoken : tokens.getStems()) {
|
||||||
|
strTokenStems1.put(strTokenStems1.size() + 1, strtoken);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
tokensCounter1++;
|
||||||
|
}
|
||||||
|
anotatorcounter1++;
|
||||||
|
}
|
||||||
|
sentences = jmweStrAnnotation2.get(CoreAnnotations.SentencesAnnotation.class);
|
||||||
|
for (CoreMap sentence : sentences) {
|
||||||
|
for (IMWE<IToken> token : sentence.get(JMWEAnnotation.class)) {
|
||||||
|
if (token.isInflected()) {
|
||||||
|
inflectedCounterPositive2++;
|
||||||
|
} else {
|
||||||
|
inflectedCounterNegative--;
|
||||||
|
}
|
||||||
|
strTokenForm2.put(strTokenForm2.size() + 1, token.getForm());
|
||||||
|
strTokenGetEntry2.put(strTokenGetEntry2.size() + 1, token.getEntry().toString().substring(token.getEntry().toString().length() - 1));
|
||||||
|
Collection<IMWEDesc.IPart> values = token.getPartMap().values();
|
||||||
|
IMWEDesc entry = token.getEntry();
|
||||||
|
MarkedContinuousCounter2 += entry.getMarkedContinuous();
|
||||||
|
UnmarkedPatternCounter += entry.getUnmarkedPattern();
|
||||||
|
for (IMWEDesc.IPart iPart : values) {
|
||||||
|
strTokenGetiPart2.put(strTokenGetiPart2.size() + 1, iPart.getForm());
|
||||||
|
}
|
||||||
|
for (String strPostPrefix : entry.getPOS().getPrefixes()) {
|
||||||
|
strTokenEntryPOS2.put(strTokenEntryPOS2.size() + 1, strPostPrefix);
|
||||||
|
}
|
||||||
|
for (IToken tokens : token.getTokens()) {
|
||||||
|
ITokenMapTag2.put(ITokenMapTag2.size() + 1, tokens.getTag());
|
||||||
|
for (String strtoken : tokens.getStems()) {
|
||||||
|
strTokenStems2.put(strTokenStems2.size() + 1, strtoken);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
tokensCounter2++;
|
||||||
|
}
|
||||||
|
anotatorcounter2++;
|
||||||
|
}
|
||||||
|
} catch (Exception ex) {
|
||||||
|
System.out.println("SENTIMENT stacktrace: " + ex.getMessage() + "\n");
|
||||||
|
}
|
||||||
|
for (String strTokenPos1 : strTokenEntryPOS1.values()) {
|
||||||
|
for (String strTokenPos2 : strTokenEntryPOS2.values()) {
|
||||||
|
if (strTokenPos1.equals(strTokenPos2)) {
|
||||||
|
score += 500;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
if (UnmarkedPatternCounter > 0 && UnmarkedPatternCounter < 5) {
|
||||||
score += UnmarkedPatternCounter * 1600;
|
score += UnmarkedPatternCounter * 1600;
|
||||||
if (MarkedContinuousCounter1 > 0 && MarkedContinuousCounter2 > 0) {
|
}
|
||||||
score += MarkedContinuousCounter1 > MarkedContinuousCounter2 ? (MarkedContinuousCounter1 - MarkedContinuousCounter2) * 500
|
if (MarkedContinuousCounter1 > 0 && MarkedContinuousCounter2 > 0) {
|
||||||
: (MarkedContinuousCounter2 - MarkedContinuousCounter1) * 500;
|
score += MarkedContinuousCounter1 > MarkedContinuousCounter2 ? (MarkedContinuousCounter1 - MarkedContinuousCounter2) * 500
|
||||||
}
|
: (MarkedContinuousCounter2 - MarkedContinuousCounter1) * 500;
|
||||||
for (String strTokeniPart1 : strTokenGetiPart1.values()) {
|
}
|
||||||
for (String strTokeniPart2 : strTokenGetiPart2.values()) {
|
for (String strTokeniPart1 : strTokenGetiPart1.values()) {
|
||||||
if (strTokeniPart1.equals(strTokeniPart2)) {
|
for (String strTokeniPart2 : strTokenGetiPart2.values()) {
|
||||||
score += 400;
|
if (strTokeniPart1.equals(strTokeniPart2)) {
|
||||||
}
|
score += 400;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
for (String strTokenEntry1 : strTokenGetEntry1.values()) {
|
}
|
||||||
for (String strTokenEntry2 : strTokenGetEntry2.values()) {
|
for (String strTokenEntry1 : strTokenGetEntry1.values()) {
|
||||||
if (strTokenEntry1.equals(strTokenEntry2)) {
|
for (String strTokenEntry2 : strTokenGetEntry2.values()) {
|
||||||
score += 2500;
|
if (strTokenEntry1.equals(strTokenEntry2)) {
|
||||||
}
|
score += 2500;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
for (String strmapTag : ITokenMapTag1.values()) {
|
}
|
||||||
for (String strmapTag1 : ITokenMapTag2.values()) {
|
for (String strmapTag : ITokenMapTag1.values()) {
|
||||||
if (strmapTag.equals(strmapTag1)) {
|
for (String strmapTag1 : ITokenMapTag2.values()) {
|
||||||
score += 1450;
|
if (strmapTag.equals(strmapTag1)) {
|
||||||
}
|
score += 1450;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
for (String strTokenForm1itr1 : strTokenForm1.values()) {
|
}
|
||||||
for (String strTokenForm1itr2 : strTokenForm2.values()) {
|
for (String strTokenForm1itr1 : strTokenForm1.values()) {
|
||||||
if (strTokenForm1itr1.equals(strTokenForm1itr2)) {
|
for (String strTokenForm1itr2 : strTokenForm2.values()) {
|
||||||
score += 2600;
|
if (strTokenForm1itr1.equals(strTokenForm1itr2)) {
|
||||||
} else if (strTokenForm1itr1.contains(strTokenForm1itr2)) {
|
score += 2600;
|
||||||
score += 500;
|
} else if (strTokenForm1itr1.contains(strTokenForm1itr2)) {
|
||||||
}
|
score += 500;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
for (String strTokenStem : strTokenStems1.values()) {
|
}
|
||||||
for (String strTokenStem1 : strTokenStems2.values()) {
|
for (String strTokenStem : strTokenStems1.values()) {
|
||||||
if (strTokenStem.equals(strTokenStem1)) {
|
for (String strTokenStem1 : strTokenStems2.values()) {
|
||||||
score += 1500;
|
if (strTokenStem.equals(strTokenStem1)) {
|
||||||
}
|
score += 1500;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
if (inflectedCounterPositive1 + inflectedCounterPositive2 > inflectedCounterNegative && inflectedCounterNegative > 0) {
|
}
|
||||||
score += (inflectedCounterPositive1 - inflectedCounterNegative) * 650;
|
if (inflectedCounterPositive1 + inflectedCounterPositive2 > inflectedCounterNegative && inflectedCounterNegative > 0) {
|
||||||
}
|
score += (inflectedCounterPositive1 - inflectedCounterNegative) * 650;
|
||||||
if (inflectedCounterPositive1 > 0 && inflectedCounterPositive2 > 0) {
|
}
|
||||||
score += ((inflectedCounterPositive1 + inflectedCounterPositive2) - inflectedCounterNegative) * 550;
|
if (inflectedCounterPositive1 > 0 && inflectedCounterPositive2 > 0) {
|
||||||
}
|
score += ((inflectedCounterPositive1 + inflectedCounterPositive2) - inflectedCounterNegative) * 550;
|
||||||
if (anotatorcounter1 > 1 && anotatorcounter2 > 1) {
|
}
|
||||||
score += (anotatorcounter1 + anotatorcounter2) * 400;
|
if (anotatorcounter1 > 1 && anotatorcounter2 > 1) {
|
||||||
}
|
score += (anotatorcounter1 + anotatorcounter2) * 400;
|
||||||
if (tokensCounter1 > 0 && tokensCounter2 > 0) {
|
}
|
||||||
score += (tokensCounter1 + tokensCounter2) * 400;
|
if (tokensCounter1 > 0 && tokensCounter2 > 0) {
|
||||||
} else {
|
score += (tokensCounter1 + tokensCounter2) * 400;
|
||||||
score -= tokensCounter1 >= tokensCounter2 ? (tokensCounter1 - tokensCounter2) * 500 : (tokensCounter2 - tokensCounter1) * 500;
|
} else {
|
||||||
}
|
int elseint = tokensCounter1 >= tokensCounter2 ? (tokensCounter1 - tokensCounter2) * 500 : (tokensCounter2 - tokensCounter1) * 500;
|
||||||
LevenshteinDistance leven = new LevenshteinDistance(str, str1);
|
score -= elseint;
|
||||||
double SentenceScoreDiff = leven.computeLevenshteinDistance();
|
}
|
||||||
SentenceScoreDiff *= 15;
|
LevenshteinDistance leven = new LevenshteinDistance(str, str1);
|
||||||
score -= SentenceScoreDiff;
|
double SentenceScoreDiff = leven.computeLevenshteinDistance();
|
||||||
|
SentenceScoreDiff *= 15;
|
||||||
|
score -= SentenceScoreDiff;
|
||||||
} catch (Exception ex) {
|
} catch (Exception ex) {
|
||||||
System.out.println("SENTIMENT stacktrace Overall catch: " + ex.getMessage() + "\n");
|
System.out.println("SENTIMENT stacktrace Overall catch: " + ex.getMessage() + "\n");
|
||||||
}
|
}
|
||||||
|
@ -21,7 +21,6 @@ import FunctionLayer.PipelineJMWESingleton;
|
|||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.sql.SQLException;
|
import java.sql.SQLException;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
import java.util.concurrent.CountDownLatch;
|
|
||||||
import java.util.logging.Level;
|
import java.util.logging.Level;
|
||||||
import java.util.logging.Logger;
|
import java.util.logging.Logger;
|
||||||
import org.javacord.api.DiscordApi;
|
import org.javacord.api.DiscordApi;
|
||||||
|
Loading…
Reference in New Issue
Block a user