From cc6aed28f96cdd8629d98016a537e338551fa4e4 Mon Sep 17 00:00:00 2001 From: jenz Date: Sun, 5 Jan 2025 01:45:08 +0100 Subject: [PATCH] first commit --- ArtificialAutism.iml | 8 + NOTES.txt | 16 + app.config | 13 + create_table_exhaustive.sql | 464 +++ create_table_sentence_testing.sql | 9 + discord_autismbot.service | 16 + dynamicScore.json | 485 +++ insert_table_sentence_testing.sql | 136 + pom.xml | 226 ++ src/main/java/DataLayer/DBCPDataSource.java | 55 + src/main/java/DataLayer/DataMapper.java | 660 ++++ src/main/java/DataLayer/RunnerClient.java | 31 + src/main/java/DataLayer/ThreadClient.java | 109 + src/main/java/DataLayer/settings.java | 21 + src/main/java/DataLayer/testClasses.java | 51 + src/main/java/FunctionLayer/Datahandler.java | 926 +++++ .../FunctionLayer/LevenshteinDistance.java | 43 + .../FunctionLayer/PipelineJMWESingleton.java | 157 + .../java/FunctionLayer/SimilarityMatrix.java | 32 + .../SentimentAnalyzerTestDynamicTesting.java | 3096 +++++++++++++++++ .../java/FunctionLayer/StopwordAnnotator.java | 108 + src/main/java/META-INF/MANIFEST.MF | 3 + .../PresentationLayer/DiscordHandler.java | 114 + src/test/java/TestJunit.java | 560 +++ 24 files changed, 7339 insertions(+) create mode 100644 ArtificialAutism.iml create mode 100644 NOTES.txt create mode 100644 app.config create mode 100644 create_table_exhaustive.sql create mode 100644 create_table_sentence_testing.sql create mode 100644 discord_autismbot.service create mode 100644 dynamicScore.json create mode 100644 insert_table_sentence_testing.sql create mode 100644 pom.xml create mode 100644 src/main/java/DataLayer/DBCPDataSource.java create mode 100644 src/main/java/DataLayer/DataMapper.java create mode 100644 src/main/java/DataLayer/RunnerClient.java create mode 100644 src/main/java/DataLayer/ThreadClient.java create mode 100755 src/main/java/DataLayer/settings.java create mode 100644 src/main/java/DataLayer/testClasses.java create mode 100644 src/main/java/FunctionLayer/Datahandler.java create mode 100644 src/main/java/FunctionLayer/LevenshteinDistance.java create mode 100644 src/main/java/FunctionLayer/PipelineJMWESingleton.java create mode 100644 src/main/java/FunctionLayer/SimilarityMatrix.java create mode 100644 src/main/java/FunctionLayer/StanfordParser/SentimentAnalyzerTestDynamicTesting.java create mode 100644 src/main/java/FunctionLayer/StopwordAnnotator.java create mode 100644 src/main/java/META-INF/MANIFEST.MF create mode 100644 src/main/java/PresentationLayer/DiscordHandler.java create mode 100644 src/test/java/TestJunit.java diff --git a/ArtificialAutism.iml b/ArtificialAutism.iml new file mode 100644 index 0000000..214891f --- /dev/null +++ b/ArtificialAutism.iml @@ -0,0 +1,8 @@ + + + + + + \ No newline at end of file diff --git a/NOTES.txt b/NOTES.txt new file mode 100644 index 0000000..072ca51 --- /dev/null +++ b/NOTES.txt @@ -0,0 +1,16 @@ +for running this you need to include stanford parser: +https://nlp.stanford.edu/software/lex-parser.shtml#Download + +if you want to run it on a remote machine with the current POM setup you need to include all jars which +means also the stanford parser jars filling around 620 MB, this jar is currently not included in the uploaded /libs folder +because its too large, download it from the link above and put it in the /libs folder of the remote machine to run it on + +now also requires: https://nlp.stanford.edu/software/stanford-srparser-2014-10-23-models.jar +kinda fucking huge + +now requires about 4 GB ram atleast to run. +also now requires nlp.stanford.edu/software/stanford-corenlp-full-2018-10-05.zip +because it needs lexicalized parsers which shift reduce parser does not posses. +also requires the regular stanford parser except of just shift reduce parser + +jars too large for /lib folder: CorenlpModels-1.0, srParser-1, ws4j-1.0.1, StanfordParserModel-1.0 \ No newline at end of file diff --git a/app.config b/app.config new file mode 100644 index 0000000..14154e7 --- /dev/null +++ b/app.config @@ -0,0 +1,13 @@ +app.url=jdbc:mysql://localhost:3306/databasename?useSSL=false&useLegacyDatetimeCode=false&serverTimezone=UTC&allowPublicKeyRetrieval=True +app.username= +app.password= +app.hostip= +app.hostip2= +app.hostport= +app.hostport2= +app.discordtoken= +app.interval_days=4 +app.string_count=14000 +app.thread_count=4 +app.interval_days_minus=4 +app.random_length=2.5 diff --git a/create_table_exhaustive.sql b/create_table_exhaustive.sql new file mode 100644 index 0000000..d083836 --- /dev/null +++ b/create_table_exhaustive.sql @@ -0,0 +1,464 @@ +create table ArtificialAutism.exhaustive_params( + rowid int primary key auto_increment, + stopwordTokenPairCounterScoring_param1 int , +stopwordTokenPairCounterScoring_param2 int , +stopwordTokenPairCounterScoring_param3 int , +stopwordTokenPairCounterScoring_param4 int , +stopwordTokenPairCounterScoring_param5 int , +stopwordTokenPairCounterScoring_param6 int , +stopwordTokenPairCounterScoring_param7 int , +stopwordTokenPairCounterScoring_param8 int , +stopwordTokenPairCounterScoring_param9 int , +stopwordTokenPairCounterScoring_param10 int , +stopwordTokenPairCounterScoring_param11 int , +stopwordTokenPairCounterScoring_param12 int , +stopwordTokenPairCounterScoring_param13 int , +stopwordTokenPairCounterScoring_param14 int , +stopwordTokenPairCounterScoring_param15 int , +stopwordTokenPairCounterScoring_param16 int , +stopwordTokenPairCounterScoring_param17 int , +stopwordTokenPairCounterScoring_param18 int , +stopwordTokenPairCounterScoring_param19 int , +stopwordTokenPairCounterScoring_param20 int , +stopwordTokenPairCounterScoring_param21 int , +stopwordTokenPairCounterScoring_param22 int , +stopwordTokenPairCounterScoring_param23 int , +stopWordTokenLemmaScoring_param1 int , +stopWordTokenLemmaScoring_param2 int , +stopWordTokenLemmaScoring_param3 int , +stopWordTokenLemmaScoring_param4 int , +stopWordTokenLemmaScoring_param5 int , +stopWordTokenLemmaScoring_param6 int , +stopWordTokenLemmaScoring_param7 int , +stopWordTokenLemmaScoring_param8 int , +stopWordTokenLemmaScoring_param9 int , +stopWordTokenLemmaScoring_param10 int , +stopWordTokenLemmaScoring_param11 int , +stopWordTokenLemmaScoring_param12 int , +stopWordTokenLemmaScoring_param13 int , +stopWordTokenLemmaScoring_param14 int , +stopWordTokenLemmaScoring_param15 int , +stopWordTokenLemmaScoring_param16 int , +stopWordTokenLemmaScoring_param17 int , +nerEntitiesAndTokenScoring_param1 int , +nerEntitiesAndTokenScoring_param2 int , +SentenceScoreDiff_param1 int , +tokensCounterScoring_param1 int , +tokensCounterScoring_param2 int , +tokensCounterScoring_param3 int , +tokensCounterScoring_param4 int , +tokensCounterScoring_param5 int , +tokensCounterScoring_param6 int , +tokensCounterScoring_param7 int , +tokensCounterScoring_param8 int , +tokensCounterScoring_param9 int , +tokensCounterScoring_param10 int , +tokensCounterScoring_param11 int , +tokensCounterScoring_param12 int , +tokensCounterScoring_param13 int , +tokensCounterScoring_param14 int , +tokensCounterScoring_param15 int , +tokensCounterScoring_param16 int , +tokensCounterScoring_param17 int , +tokensCounterScoring_param18 int , +tokensCounterScoring_param19 int , +tokensCounterScoring_param20 int , +tokensCounterScoring_param21 int , +tokensCounterScoring_param22 int , +tokensCounterScoring_param23 int , +tokensCounterScoring_param24 int , +tokensCounterScoring_param25 int , +tokensCounterScoring_param26 int , +tokensCounterScoring_param27 int , +annotatorCountScoring_param1 int , +annotatorCountScoring_param2 int , +annotatorCountScoring_param3 int , +annotatorCountScoring_param4 int , +annotatorCountScoring_param5 int , +inflectedCounterScoring_param1 int , +inflectedCounterScoring_param2 int , +inflectedCounterScoring_param3 int , +inflectedCounterScoring_param4 int , +inflectedCounterScoring_param5 int , +inflectedCounterScoring_param6 int , +inflectedCounterScoring_param7 int , +inflectedCounterScoring_param8 int , +inflectedCounterScoring_param9 int , +inflectedCounterScoring_param10 int , +inflectedCounterScoring_param11 int , +tokenStemmingMapScoring_param1 int , +tokenformSizeScoring_param1 int , +tokenformSizeScoring_param2 int , +tokenformSizeScoring_param3 int , +tokenformSizeScoring_param4 int , +tokenformSizeScoring_param5 int , +tokenformSizeScoring_param6 int , +tokenformSizeScoring_param7 int , +tokenformSizeScoring_param8 int , +tokenformSizeScoring_param9 int , +strTokenMapTagsScoring_param1 int , +strTokenMapTagsScoring_param2 int , +strTokenMapTagsScoring_param3 int , +strTokenMapTagsScoring_param4 int , +strTokenMapTagsScoring_param5 int , +strTokenMapTagsScoring_param6 int , +strTokenEntryScoring_param1 int , +strTokenEntryScoring_param2 int , +strTokenEntryScoring_param3 int , +strTokenEntryScoring_param4 int , +strTokenEntryScoring_param5 int , +strTokenEntryScoring_param6 int , +strTokenEntryScoring_param7 int , +strTokenEntryScoring_param8 int , +strTokenEntryScoring_param9 int , +strTokenEntryScoring_param10 int , +strTokenEntryScoring_param11 int , +strTokensMapScoring_param1 int , +strTokensMapScoring_param2 int , +strTokensMapScoring_param3 int , +strTokensMapScoring_param4 int , +strTokensMapScoring_param5 int , +strTokensMapScoring_param6 int , +markedContiniousCounterScoring_param1 int , +markedContiniousCounterScoring_param2 int , +markedContiniousCounterScoring_param3 int , +markedContiniousCounterScoring_param4 int , +markedContiniousCounterScoring_param5 int , +markedContiniousCounterScoring_param6 int , +markedContiniousCounterScoring_param7 int , +markedContiniousCounterScoring_param8 int , +markedContiniousCounterScoring_param9 int , +markedContiniousCounterScoring_param10 int , +markedContiniousCounterScoring_param11 int , +unmarkedPatternCounterScoring_param1 int , +unmarkedPatternCounterScoring_param2 int , +unmarkedPatternCounterScoring_param3 int , +unmarkedPatternCounterScoring_param4 int , +unmarkedPatternCounterScoring_param5 int , +unmarkedPatternCounterScoring_param6 int , +unmarkedPatternCounterScoring_param7 int , +unmarkedPatternCounterScoring_param8 int , +tokenEntryPosScoring_param1 int , +tokenEntryPosScoring_param2 int , +tokenEntryPosScoring_param3 int , +tokenEntryPosScoring_param4 int , +entryCountsScoring_param1 int , +entryCountsScoring_param2 int , +entryCountsScoring_param3 int , +entryCountsScoring_param4 int , +entryCountsScoring_param5 int , +entryCountsScoring_param6 int , +entryCountsScoring_param7 int , +entryCountsScoring_param8 int , +entryCountsRelation_param1 int , +entryCountsRelation_param2 int , +entryCountsRelation_param3 int , +entryCountsRelation_param4 int , +entryCountsRelation_param5 int , +entryCountsRelation_param6 int , +entryCountsRelation_param7 int , +entryCountsRelation_param8 int , +entryCountsRelation_param9 int , +entryCountsRelation_param10 int , +entryCountsRelation_param11 int , +entryCountsRelation_param12 int , +entryCountsRelation_param13 int , +entryCountsRelation_param14 int , +entryCountsRelation_param15 int , +sentimentMatrixVariances_param1 int , +sentimentMatrixVariances_param2 int , +sentimentMatrixVariances_param3 int , +sentimentMatrixVariances_param4 int , +sentimentMatrixVariances_param5 int , +sentimentMatrixVariances_param6 int , +sentimentMatrixVariances_param7 int , +sentimentMatrixVariances_param8 int , +sentimentMatrixVariances_param9 int , +sentimentMatrixVariances_param10 int , +sentimentMatrixVariances_param11 int , +sentimentMatrixVariances_param12 int , +sentimentMatrixVariances_param13 int , +sentimentMatrixVariances_param14 int , +sentimentMatrixVariances_param15 int , +sentimentMatrixVariances_param16 int , +sentimentMatrixVariances_param17 int , +sentimentMatrixVariances_param18 int , +sentimentMatrixVariances_param19 int , +sentimentMatrixVariances_param20 int , +sentimentMatrixVariances_param21 int , +sentimentMatrixVariances_param22 int , +sentimentMatrixVariances_param23 int , +sentimentMatrixVariances_param24 int , +sentimentMatrixVariances_param25 int , +sentimentMatrixVariances_param26 int , +sentimentMatrixVariances_param27 int , +sentimentMatrixVariances_param28 int , +sentimentMatrixVariances_param29 int , +sentimentMatrixVariances_param30 int , +sentimentMatrixVariances_param31 int , +sentimentMatrixVariances_param32 int , +sentimentMatrixVariances_param33 int , +sentimentMatrixVariances_param34 int , +sentimentMatrixVariances_param35 int , +sentimentMatrixVariances_param36 int , +sentimentMatrixVariances_param37 int , +classifyRawEvaluation_param1 int , +classifyRawEvaluation_param2 int , +classifyRawEvaluation_param3 int , +sentiment1_param1 int , +simpleRNNMaxtrixVectors_param1 int , +simpleRNNMaxtrixVectors_param2 int , +simpleRNNMaxtrixVectors_param3 int , +simpleRNNMaxtrixVectors_param4 int , +simpleRNNMaxtrixVectors_param5 int , +simpleRNNMaxtrixVectorsDouble_param6 decimal(10,5), +simpleRNNMaxtrixVectorsDouble_param7 decimal(10,5), +simpleRNNMaxtrixVectors_param8 int , +simpleRNNMaxtrixVectorsDouble_param9 decimal(10,5), +simpleRNNMaxtrixVectorsDouble_param10 decimal(10,5), +simpleRNNMaxtrixVectors_param11 int , +simpleRNNMaxtrixVectorsDouble_param12 decimal(10,5), +simpleRNNMaxtrixVectorsDouble_param13 decimal(10,5), +simpleRNNMaxtrixVectors_param14 int , +simpleRNNMaxtrixVectors_param15 int , +simpleRNNMaxtrixVectors_param16 int , +simpleRNNMaxtrixVectors_param17 int , +simpleRNNMaxtrixVectors_param18 int , +simpleRNNMaxtrixVectors_param19 int , +simpleRNNMaxtrixVectors_param20 int , +simpleRNNMaxtrixVectors_param21 int , +simpleRNNMaxtrixVectors_param22 int , +simpleRNNMaxtrixVectors_param23 int , +simpleRNNMaxtrixVectors_param24 int , +simpleRNNMaxtrixVectors_param25 int , +simpleRNNMaxtrixVectors_param26 int , +simpleRNNMaxtrixVectors_param27 int , +simpleRNNMaxtrixVectors_param28 int , +simpleRNNMaxtrixVectors_param29 int , +simpleRNNMaxtrixVectors_param30 int , +simpleRNNMaxtrixVectors_param31 int , +simpleRNNMaxtrixVectors_param32 int , +simpleRNNMaxtrixVectors_param33 int , +simpleRNNMaxtrixVectorsDouble_param34 decimal(10,5), +simpleRNNMaxtrixVectorsDouble_param35 decimal(10,5), +simpleRNNMaxtrixVectors_param36 int , +simpleRNNMaxtrixVectorsDouble_param37 decimal(10,5), +simpleRNNMaxtrixVectorsDouble_param38 decimal(10,5), +simpleRNNMaxtrixVectors_param39 int , +simpleRNNMaxtrixVectors_param40 int , +simpleRNNMaxtrixVectors_param41 int , +simpleRNNMaxtrixVectors_param42 int , +simpleRNNMaxtrixVectors_param43 int , +simpleRNNMaxtrixVectors_param44 int , +simpleRNNMaxtrixVectors_param45 int , +simpleRNNMaxtrixVectors_param46 int , +simpleRNNMaxtrixVectors_param47 int , +simpleRNNMaxtrixVectors_param48 int , +simpleRNNMaxtrixVectors_param49 int , +simpleRNNMaxtrixVectorsDouble_param50 decimal(10,5), +simpleRNNMaxtrixVectors_param51 int , +simpleRNNMaxtrixVectorsDouble_param52 decimal(10,5), +simpleRNNMaxtrixVectors_param53 int , +simpleRNNMaxtrixVectorsDouble_param54 decimal(10,5), +simpleRNNMaxtrixVectors_param55 int , +simpleRNNMaxtrixVectorsDouble_param56 decimal(10,5), +simpleRNNMaxtrixVectorsDouble_param57 decimal(10,5), +simpleRNNMaxtrixVectors_param58 int , +simpleRNNMaxtrixVectorsDouble_param59 decimal(10,5), +simpleRNNMaxtrixVectorsDouble_param60 decimal(10,5), +simpleRNNMaxtrixVectors_param61 int , +simpleRNNMaxtrixVectorsDouble_param62 decimal(10,5), +simpleRNNMaxtrixVectorsDouble_param63 decimal(10,5), +simpleRNNMaxtrixVectors_param64 int , +simpleRNNMaxtrixVectorsDouble_param65 decimal(10,5), +simpleRNNMaxtrixVectorsDouble_param66 decimal(10,5), +simpleRNNMaxtrixVectors_param67 int , +simpleRNNMaxtrixVectors_param68 int , +simpleRNNMaxtrixVectors_param69 int , +simpleRNNMaxtrixVectorsDouble_param70 decimal(10,5), +simpleRNNMaxtrixVectorsDouble_param71 decimal(10,5), +simpleRNNMaxtrixVectors_param72 int , +simpleRNNMaxtrixVectorsDouble_param73 decimal(10,5), +simpleRNNMaxtrixVectorsDouble_param74 decimal(10,5), +simpleRNNMaxtrixVectors_param75 int , +simpleRNNMaxtrixVectors_param76 int , +simpleRNNMaxtrixVectorsDouble_param77 decimal(10,5), +simpleRNNMaxtrixVectorsDouble_param78 decimal(10,5), +simpleRNNMaxtrixVectors_param79 int , +simpleRNNMaxtrixVectors_param80 int , +simpleRNNMaxtrixVectors_param81 int , +simpleRNNMaxtrixVectors_param82 int , +simpleRNNMaxtrixVectors_param83 int , +simpleRNNMaxtrixVectors_param84 int , +simpleRNNMaxtrixVectors_param85 int , +simpleRNNMaxtrixVectors_param86 int , +simpleRNNMatrixCalculations_param1 int , +simpleRNNMatrixCalculations_param2 int , +simpleRNNMatrixCalculations_param3 int , +simpleRNNMatrixCalculations_param4 int , +simpleRNNMatrixCalculations_param5 int , +simpleRNNMatrixCalculations_param6 int , +simpleRNNMatrixCalculations_param7 int , +simpleRNNMatrixCalculations_param8 int , +simpleRNNMatrixCalculations_param9 int , +simpleRNNMatrixCalculations_param10 int , +simpleRNNMatrixCalculations_param11 int , +simpleRNNMatrixCalculations_param12 int , +simpleRNNMatrixCalculations_param13 int , +simpleRNNMatrixCalculations_param14 int , +simpleRNNMatrixCalculations_param15 int , +simpleRNNMatrixCalculations_param16 int , +simpleRNNMatrixCalculations_param17 int , +simpleRNNMatrixCalculationsDouble_param18 decimal(10,5), +simpleRNNMatrixCalculationsDouble_param19 decimal(10,5), +simpleRNNMatrixCalculationsDouble_param20 decimal(10,5), +simpleRNNMatrixCalculationsDouble_param21 decimal(10,5), +simpleRNNMatrixCalculationsDouble_param22 decimal(10,5), +simpleRNNMatrixCalculationsDouble_param23 decimal(10,5), +simpleRNNMatrixCalculations_param24 int , +simpleRNNMatrixCalculations_param25 int , +simpleRNNMatrixCalculations_param26 int , +simpleRNNMatrixCalculations_param27 int , +simpleRNNMatrixCalculationsDouble_param28 decimal(10,5), +simpleRNNMatrixCalculations_param29 int , +simpleRNNMatrixCalculationsDouble_param30 decimal(10,5), +simpleRNNMatrixCalculationsDouble_param31 decimal(10,5), +simpleRNNMatrixCalculationsDouble_param32 decimal(10,5), +simpleRNNMatrixCalculationsDouble_param33 decimal(10,5), +simpleRNNMatrixCalculationsDouble_param34 decimal(10,5), +simpleRNNMatrixCalculationsDouble_param35 decimal(10,5), +simpleRNNMatrixCalculationsDouble_param36 decimal(10,5), +simpleRNNMatrixCalculations_param37 int , +simpleRNNMatrixCalculations_param38 int , +simpleRNNMatrixCalculations_param39 int , +simpleRNNMatrixCalculations_param40 int , +simpleRNNMatrixCalculations_param41 int , +simpleRNNMatrixCalculations_param42 int , +simpleRNNMatrixCalculations_param43 int , +simpleRNNMatrixCalculations_param44 int , +simpleRNNMatrixCalculations_param45 int , +simpleRNNMatrixCalculations_param46 int , +simpleRNNMatrixCalculationsDouble_param47 decimal(10,5), +simpleRNNMatrixCalculations_param48 int , +simpleRNNMatrixCalculationsDouble_param49 decimal(10,5), +simpleRNNMatrixCalculations_param50 int , +simpleRNNMatrixCalculations_param51 int , +simpleRNNMatrixCalculationsDouble_param52 decimal(10,5), +simpleRNNMatrixCalculations_param53 int , +simpleRNNMatrixCalculations_param54 int , +simpleRNNMatrixCalculationsDouble_param55 decimal(10,5), +simpleRNNMatrixCalculationsDouble_param56 decimal(10,5), +simpleRNNMatrixCalculations_param57 int , +simpleRNNMatrixCalculations_param58 int , +simpleRNNMatrixCalculations_param59 int , +simpleRNNMatrixCalculations_param60 int , +simpleRNNMatrixCalculations_param61 int , +simpleRNNMatrixCalculations_param62 int , +simpleRNNMatrixCalculations_param63 int , +simpleRNNMatrixCalculations_param64 int , +simpleRNNMatrixCalculations_param65 int , +simpleRNNMatrixCalculations_param66 int , +simpleRNNMatrixCalculations_param67 int , +simpleRNNMatrixCalculations_param68 int , +simpleRNNMatrixCalculations_param69 int , +simpleRNNMatrixCalculations_param70 int , +simpleRNNMatrixCalculations_param71 int , +simpleRNNMatrixCalculations_param72 int , +simpleRNNMatrixCalculations_param73 int , +simpleRNNMatrixCalculations_param74 int , +typeDependenciesGrammaticalRelation_param1 int , +typeDependenciesGrammaticalRelation_param2 int , +typeDependenciesGrammaticalRelation_param3 int , +typeDependenciesGrammaticalRelation_param4 int , +typeDependenciesGrammaticalRelation_param5 int , +typeDependenciesGrammaticalRelation_param6 int , +typeDependenciesGrammaticalRelation_param7 int , +typeDependenciesGrammaticalRelation_param8 int , +typeDependenciesGrammaticalRelation_param9 int , +typeDependenciesGrammaticalRelation_param10 int , +typeDependenciesGrammaticalRelation_param11 int , +typeDependenciesGrammaticalRelation_param12 int , +typeDependenciesGrammaticalRelationDouble_param13 decimal(10,5), +typeDependenciesGrammaticalRelation_param14 int , +typeDependenciesGrammaticalRelation_param15 int , +typeDependenciesGrammaticalRelation_param16 int , +typeDependenciesGrammaticalRelation_param17 int , +typeDependenciesGrammaticalRelation_param18 int , +typeDependenciesGrammaticalRelation_param19 int , +typeDependenciesGrammaticalRelation_param20 int , +typeDependenciesGrammaticalRelation_param21 int , +typeDependenciesGrammaticalRelation_param22 int , +typeDependenciesGrammaticalRelation_param23 int , +typeDependenciesGrammaticalRelation_param24 int , +typeDependenciesGrammaticalRelation_param25 int , +typeDependenciesGrammaticalRelation_param26 int , +typeDependenciesGrammaticalRelation_param27 int , +typeDependenciesGrammaticalRelation_param28 int , +typeDependenciesGrammaticalRelation_param29 int , +typeDependenciesGrammaticalRelation_param30 int , +typeDependenciesGrammaticalRelation_param31 int , +typeDependenciesGrammaticalRelation_param32 int , +typeDependenciesGrammaticalRelation_param33 int , +typeDependenciesGrammaticalRelation_param34 int , +typeDependenciesGrammaticalRelation_param35 int , +typeDependenciesGrammaticalRelation_param36 int , +typeDependenciesGrammaticalRelation_param37 int , +typeDependenciesGrammaticalRelation_param38 int , +typeDependenciesGrammaticalRelation_param39 int , +typeDependenciesGrammaticalRelation_param40 int , +typeDependenciesGrammaticalRelation_param41 int , +typeDependenciesGrammaticalRelation_param42 int , +typeDependenciesGrammaticalRelation_param43 int , +typeDependenciesGrammaticalRelation_param44 int , +typeDependenciesGrammaticalRelation_param45 int , +typeDependenciesGrammaticalRelation_param46 int , +typeDependenciesGrammaticalRelation_param47 int , +typeDependenciesGrammaticalRelation_param48 int , +typeDependenciesGrammaticalRelation_param49 int , +typeDependenciesGrammaticalRelation_param50 int , +typeDependenciesGrammaticalRelation_param51 int , +typeDependenciesGrammaticalRelation_param52 int , +typeDependenciesGrammaticalRelation_param53 int , +typeDependenciesGrammaticalRelation_param54 int , +typeDependenciesGrammaticalRelation_param55 int , +typeDependenciesGrammaticalRelation_param56 int , +typeDependenciesGrammaticalRelation_param57 int , +typeDependenciesGrammaticalRelation_param58 int , +typeDependenciesGrammaticalRelation_param59 int , +iterateTrees_param1 int , +iterateTrees_param2 int , +iterateTrees_param3 int , +iterateTrees_param4 int , +iterateTrees_param5 int , +iterateTrees_param6 int , +iterateTrees_param7 int , +iterateTrees_param8 int , +iterateTrees_param9 int , +iterateTrees_param10 int , +iterateTreesDouble_param11 decimal(10,5), +iterateTrees_param12 int , +iterateTrees_param13 int , +iterateTrees_param14 int , +iterateTrees_param15 int , +iterateTrees_param16 int , +iterateTrees_param17 int , +iterateTrees_param18 int , +iterateTrees_param19 int , +iterateTreesDouble_param20 decimal(10,5), +iterateTrees_param21 int , +iterateTrees_param22 int , +iterateTrees_param23 int , +iterateTrees_param24 int , +iterateTrees_param25 int , +iterateTrees_param26 int , +iterateTrees_param27 int , +iterateTrees_param28 int , +iterateTrees_param29 int , +iterateTrees_param30 int , +iterateTrees_param31 int , +tgwListScoreIncrementer_param1 int , +overValue_param1 int , +created_on timestamp default now(), +colission_count int , +passed_all_test_cases tinyint(1) DEFAULT NULL +) diff --git a/create_table_sentence_testing.sql b/create_table_sentence_testing.sql new file mode 100644 index 0000000..b3134f9 --- /dev/null +++ b/create_table_sentence_testing.sql @@ -0,0 +1,9 @@ +CREATE TABLE `sentence_testing` ( + `String1` varchar(255) NOT NULL, + `String2` varchar(255) NOT NULL, + `score_required` int(11) DEFAULT 0, + `comperator_for_score` varchar(255) NOT NULL, + `PerformTestingFittingLess` tinyint(1) DEFAULT 0, + `rowid` int(11) NOT NULL AUTO_INCREMENT, + PRIMARY KEY (`rowid`) +) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_unicode_ci; diff --git a/discord_autismbot.service b/discord_autismbot.service new file mode 100644 index 0000000..52dbe74 --- /dev/null +++ b/discord_autismbot.service @@ -0,0 +1,16 @@ +[Unit] +Description=autismbot Discord + +[Service] +User=nonroot +Group=nonroot +WorkingDirectory=/home/nonroot/autism_bot +Environment=PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/usr/lib/jvm/jdk-17/bin +ExecStart=/usr/lib/jvm/jdk-17/bin/java -Xmx4900M -jar /home/nonroot/autism_bot/ArtificialAutism-1.0.jar +Restart=on-failure +RestartSec=10 + +#Restart=always + +[Install] +WantedBy=default.target diff --git a/dynamicScore.json b/dynamicScore.json new file mode 100644 index 0000000..b5345fa --- /dev/null +++ b/dynamicScore.json @@ -0,0 +1,485 @@ +{ + "stopwordTokenPairCounterScoring_param1": 5, + "stopwordTokenPairCounterScoring_param2": 550, + "stopwordTokenPairCounterScoring_param3" : 5, + "stopwordTokenPairCounterScoring_param4" : 7500, + "stopwordTokenPairCounterScoring_param5" : 6, + "stopwordTokenPairCounterScoring_param6" : 550, + "stopwordTokenPairCounterScoring_param7" : 3, + "stopwordTokenPairCounterScoring_param8" : 6, + "stopwordTokenPairCounterScoring_param9" : 3500, + "stopwordTokenPairCounterScoring_param10" : 3, + "stopwordTokenPairCounterScoring_param11" : 2513, + "stopwordTokenPairCounterScoring_param12": 0, + "stopwordTokenPairCounterScoring_param13": 1, + "stopwordTokenPairCounterScoring_param14": 2550, + "stopwordTokenPairCounterScoring_param15": 5, + "stopwordTokenPairCounterScoring_param16": 1500, + "stopwordTokenPairCounterScoring_param17": 5, + "stopwordTokenPairCounterScoring_param18": 450, + "stopwordTokenPairCounterScoring_param19": 2, + "stopwordTokenPairCounterScoring_param20": 4, + "stopwordTokenPairCounterScoring_param21": 2500, + "stopwordTokenPairCounterScoring_param22": 3, + "stopwordTokenPairCounterScoring_param23": 3200, + + "stopWordTokenLemmaScoring_param1": 5, + "stopWordTokenLemmaScoring_param2": 9300, + "stopWordTokenLemmaScoring_param3": 3, + "stopWordTokenLemmaScoring_param4": 2, + "stopWordTokenLemmaScoring_param5": 7400, + "stopWordTokenLemmaScoring_param6": 1, + "stopWordTokenLemmaScoring_param7": 4100, + "stopWordTokenLemmaScoring_param8": 3, + "stopWordTokenLemmaScoring_param9": 540, + "stopWordTokenLemmaScoring_param10": 1, + "stopWordTokenLemmaScoring_param11": 0, + "stopWordTokenLemmaScoring_param12": 8500, + "stopWordTokenLemmaScoring_param13": 4, + "stopWordTokenLemmaScoring_param14": 8650, + "stopWordTokenLemmaScoring_param15": 2, + "stopWordTokenLemmaScoring_param16": 2500, + "stopWordTokenLemmaScoring_param17": 51, + + "nerEntitiesAndTokenScoring_param1": 2500, + "nerEntitiesAndTokenScoring_param2": 2000, + + "SentenceScoreDiff_param1": 15, + + "tokensCounterScoring_param1": 1, + "tokensCounterScoring_param2": 5, + "tokensCounterScoring_param3": 2, + "tokensCounterScoring_param4": 10, + "tokensCounterScoring_param5": 500, + "tokensCounterScoring_param6": 500, + "tokensCounterScoring_param7": 500, + "tokensCounterScoring_param8": 5, + "tokensCounterScoring_param9": 0, + "tokensCounterScoring_param10": 500, + "tokensCounterScoring_param11": 0, + "tokensCounterScoring_param12": 500, + "tokensCounterScoring_param13": 2000, + "tokensCounterScoring_param14": 35, + "tokensCounterScoring_param15": 34, + "tokensCounterScoring_param16": 0, + "tokensCounterScoring_param17": 1, + "tokensCounterScoring_param18": 500, + "tokensCounterScoring_param19": 3012, + "tokensCounterScoring_param20": 0, + "tokensCounterScoring_param21": 1500, + "tokensCounterScoring_param22": 0, + "tokensCounterScoring_param23": 2, + "tokensCounterScoring_param24": 2, + "tokensCounterScoring_param25": 0, + "tokensCounterScoring_param26": 3065, + "tokensCounterScoring_param27": 5, + + "annotatorCountScoring_param1": 1, + "annotatorCountScoring_param2": 2, + "annotatorCountScoring_param3": 700, + "annotatorCountScoring_param4": 5, + "annotatorCountScoring_param5": 400, + + "inflectedCounterScoring_param1": 0, + "inflectedCounterScoring_param2": 650, + "inflectedCounterScoring_param3": 0, + "inflectedCounterScoring_param4": 2, + "inflectedCounterScoring_param5": 5, + "inflectedCounterScoring_param6": 150, + "inflectedCounterScoring_param7": 10, + "inflectedCounterScoring_param8": 5, + "inflectedCounterScoring_param9": 400, + "inflectedCounterScoring_param10": 15, + "inflectedCounterScoring_param11": 4000, + + "tokenStemmingMapScoring_param1": 500, + + "tokenformSizeScoring_param1": 0, + "tokenformSizeScoring_param2": 5, + "tokenformSizeScoring_param3": 1600, + "tokenformSizeScoring_param4": 500, + "tokenformSizeScoring_param5": 0, + "tokenformSizeScoring_param6": 2, + "tokenformSizeScoring_param7": 600, + "tokenformSizeScoring_param8": 4, + "tokenformSizeScoring_param9": 600, + + "strTokenMapTagsScoring_param1": 1450, + "strTokenMapTagsScoring_param2": 0, + "strTokenMapTagsScoring_param3": 2, + "strTokenMapTagsScoring_param4": 700, + "strTokenMapTagsScoring_param5": 600, + "strTokenMapTagsScoring_param6": 700, + + "strTokenEntryScoring_param1": 2, + "strTokenEntryScoring_param2": 2500, + "strTokenEntryScoring_param3": 2, + "strTokenEntryScoring_param4": 450, + "strTokenEntryScoring_param5": 1, + "strTokenEntryScoring_param6": 9450, + "strTokenEntryScoring_param7": 1, + "strTokenEntryScoring_param8": 4080, + "strTokenEntryScoring_param9": 2, + "strTokenEntryScoring_param10": 4560, + "strTokenEntryScoring_param11": 250, + + "strTokensMapScoring_param1": 3880, + "strTokensMapScoring_param2": 2, + "strTokensMapScoring_param3": 700, + "strTokensMapScoring_param4": 600, + "strTokensMapScoring_param5": 0, + "strTokensMapScoring_param6": 700, + + "markedContiniousCounterScoring_param1": 0, + "markedContiniousCounterScoring_param2": 50, + "markedContiniousCounterScoring_param3": 120, + "markedContiniousCounterScoring_param4": 2, + "markedContiniousCounterScoring_param5": 4500, + "markedContiniousCounterScoring_param6": 0, + "markedContiniousCounterScoring_param7": 500, + "markedContiniousCounterScoring_param8": 0, + "markedContiniousCounterScoring_param9": 10, + "markedContiniousCounterScoring_param10": 5, + "markedContiniousCounterScoring_param11": 400, + + "unmarkedPatternCounterScoring_param1": 0, + "unmarkedPatternCounterScoring_param2": 100, + "unmarkedPatternCounterScoring_param3": 2, + "unmarkedPatternCounterScoring_param4": 2500, + "unmarkedPatternCounterScoring_param5": 5, + "unmarkedPatternCounterScoring_param6": 4000, + "unmarkedPatternCounterScoring_param7": 500, + "unmarkedPatternCounterScoring_param8": 3, + + "tokenEntryPosScoring_param1": 1, + "tokenEntryPosScoring_param2": 500, + "tokenEntryPosScoring_param3": 3, + "tokenEntryPosScoring_param4": 700, + + "entryCountsScoring_param1": 20, + "entryCountsScoring_param2": 10, + "entryCountsScoring_param3": 30, + "entryCountsScoring_param4": 46800, + "entryCountsScoring_param5": 0, + "entryCountsScoring_param6": 250, + "entryCountsScoring_param7": 3, + "entryCountsScoring_param8": 930, + + "entryCountsRelation_param1": 20, + "entryCountsRelation_param2": 0, + "entryCountsRelation_param3": 5900, + "entryCountsRelation_param4": 0, + "entryCountsRelation_param5": 5, + "entryCountsRelation_param6": 450, + "entryCountsRelation_param7": 50, + "entryCountsRelation_param8": 180, + "entryCountsRelation_param9": 2, + "entryCountsRelation_param10": 450, + "entryCountsRelation_param11": 3, + "entryCountsRelation_param12": 550, + "entryCountsRelation_param13": 10, + "entryCountsRelation_param14": 2, + "entryCountsRelation_param15": 600, + + "sentimentMatrixVariances_param1": 12, + "sentimentMatrixVariances_param2": 16, + "sentimentMatrixVariances_param3": 10000, + "sentimentMatrixVariances_param4": 44, + "sentimentMatrixVariances_param5": 48, + "sentimentMatrixVariances_param6": 7900, + "sentimentMatrixVariances_param7": 5, + "sentimentMatrixVariances_param8": 5500, + "sentimentMatrixVariances_param9": 2, + "sentimentMatrixVariances_param10": 1, + "sentimentMatrixVariances_param11": 45, + "sentimentMatrixVariances_param12": 20, + "sentimentMatrixVariances_param13": 20, + "sentimentMatrixVariances_param14": 45, + "sentimentMatrixVariances_param15": 20, + "sentimentMatrixVariances_param16": 2, + "sentimentMatrixVariances_param17": 20, + "sentimentMatrixVariances_param18": 2, + "sentimentMatrixVariances_param19": 5, + "sentimentMatrixVariances_param20": 20, + "sentimentMatrixVariances_param21": 50, + "sentimentMatrixVariances_param22": 5, + "sentimentMatrixVariances_param23": 24, + "sentimentMatrixVariances_param24": 20, + "sentimentMatrixVariances_param25": 4500, + "sentimentMatrixVariances_param26": 5, + "sentimentMatrixVariances_param27": 19, + "sentimentMatrixVariances_param28": 24, + "sentimentMatrixVariances_param29": 4500, + "sentimentMatrixVariances_param30": 26, + "sentimentMatrixVariances_param31": 35, + "sentimentMatrixVariances_param32": 3900, + "sentimentMatrixVariances_param33": 1, + "sentimentMatrixVariances_param34": 15, + "sentimentMatrixVariances_param35": 25, + "sentimentMatrixVariances_param36": 4900, + "sentimentMatrixVariances_param37": 1900, + + "classifyRawEvaluation_param1": 990, + "classifyRawEvaluation_param2": 2, + "classifyRawEvaluation_param3": 5500, + + "sentiment1_param1": 500, + + "simpleRNNMaxtrixVectors_param1": 10, + "simpleRNNMaxtrixVectors_param2": 500, + "simpleRNNMaxtrixVectors_param3": 500, + "simpleRNNMaxtrixVectors_param4": 100, + "simpleRNNMaxtrixVectors_param5": 100, + "simpleRNNMaxtrixVectorsDouble_param6": 0.1, + "simpleRNNMaxtrixVectorsDouble_param7": 0.050, + "simpleRNNMaxtrixVectors_param8": 250, + "simpleRNNMaxtrixVectorsDouble_param9": 0.44, + "simpleRNNMaxtrixVectorsDouble_param10": 0.60, + "simpleRNNMaxtrixVectors_param11": 2400, + "simpleRNNMaxtrixVectorsDouble_param12": 0.40, + "simpleRNNMaxtrixVectorsDouble_param13": 0.445, + "simpleRNNMaxtrixVectors_param14": 3600, + "simpleRNNMaxtrixVectors_param15": 50, + "simpleRNNMaxtrixVectors_param16": 48, + "simpleRNNMaxtrixVectors_param17": 50, + "simpleRNNMaxtrixVectors_param18": 6900, + "simpleRNNMaxtrixVectors_param19": 30, + "simpleRNNMaxtrixVectors_param20": 135, + "simpleRNNMaxtrixVectors_param21": 100, + "simpleRNNMaxtrixVectors_param22": 105, + "simpleRNNMaxtrixVectors_param23": 255, + "simpleRNNMaxtrixVectors_param24": 150, + "simpleRNNMaxtrixVectors_param25": 90, + "simpleRNNMaxtrixVectors_param26": 95, + "simpleRNNMaxtrixVectors_param27": 7700, + "simpleRNNMaxtrixVectors_param28": 80, + "simpleRNNMaxtrixVectors_param29": 85, + "simpleRNNMaxtrixVectors_param30": 4500, + "simpleRNNMaxtrixVectors_param31": 125, + "simpleRNNMaxtrixVectors_param32": 130, + "simpleRNNMaxtrixVectors_param33": 43, + "simpleRNNMaxtrixVectorsDouble_param34": 0.01, + "simpleRNNMaxtrixVectorsDouble_param35": 0.00, + "simpleRNNMaxtrixVectors_param36": 1300, + "simpleRNNMaxtrixVectorsDouble_param37": 0.1, + "simpleRNNMaxtrixVectorsDouble_param38": 1.0, + "simpleRNNMaxtrixVectors_param39": 7935, + "simpleRNNMaxtrixVectors_param40": 1, + "simpleRNNMaxtrixVectors_param41": 10, + "simpleRNNMaxtrixVectors_param42": 585, + "simpleRNNMaxtrixVectors_param43": 2500, + "simpleRNNMaxtrixVectors_param44": 50, + "simpleRNNMaxtrixVectors_param45": 51, + "simpleRNNMaxtrixVectors_param46": 57, + "simpleRNNMaxtrixVectors_param47": 22, + "simpleRNNMaxtrixVectors_param48": 24, + "simpleRNNMaxtrixVectors_param49": 9875, + "simpleRNNMaxtrixVectorsDouble_param50": 0.000, + "simpleRNNMaxtrixVectors_param51": 500, + "simpleRNNMaxtrixVectorsDouble_param52": 0.1, + "simpleRNNMaxtrixVectors_param53": 255, + "simpleRNNMaxtrixVectorsDouble_param54": 0.50, + "simpleRNNMaxtrixVectors_param55": 1200, + "simpleRNNMaxtrixVectorsDouble_param56": 0.050, + "simpleRNNMaxtrixVectorsDouble_param57": 0.10, + "simpleRNNMaxtrixVectors_param58": 350, + "simpleRNNMaxtrixVectorsDouble_param59": 3.0, + "simpleRNNMaxtrixVectorsDouble_param60": 9.25, + "simpleRNNMaxtrixVectors_param61": 1555, + "simpleRNNMaxtrixVectorsDouble_param62": 9.25, + "simpleRNNMaxtrixVectorsDouble_param63": 10.0, + "simpleRNNMaxtrixVectors_param64": 2935, + "simpleRNNMaxtrixVectorsDouble_param65": 1.0, + "simpleRNNMaxtrixVectorsDouble_param66": 3.0, + "simpleRNNMaxtrixVectors_param67": 585, + "simpleRNNMaxtrixVectors_param68": 3, + "simpleRNNMaxtrixVectors_param69": 6555, + "simpleRNNMaxtrixVectorsDouble_param70": 0.01, + "simpleRNNMaxtrixVectorsDouble_param71": 0.00, + "simpleRNNMaxtrixVectors_param72": 3300, + "simpleRNNMaxtrixVectorsDouble_param73": 0.1, + "simpleRNNMaxtrixVectorsDouble_param74": 0.2, + "simpleRNNMaxtrixVectors_param75": 6790, + "simpleRNNMaxtrixVectors_param76": 1025, + "simpleRNNMaxtrixVectorsDouble_param77": 0.050, + "simpleRNNMaxtrixVectorsDouble_param78": 0.10, + "simpleRNNMaxtrixVectors_param79": 750, + "simpleRNNMaxtrixVectors_param80": 1, + "simpleRNNMaxtrixVectors_param81": 10, + "simpleRNNMaxtrixVectors_param82": 380, + "simpleRNNMaxtrixVectors_param83": 3, + "simpleRNNMaxtrixVectors_param84": 7500, + "simpleRNNMaxtrixVectors_param85": 5, + "simpleRNNMaxtrixVectors_param86": 400, + + "simpleRNNMatrixCalculations_param1": 100, + "simpleRNNMatrixCalculations_param2": 50, + "simpleRNNMatrixCalculations_param3": 100, + "simpleRNNMatrixCalculations_param4": 0, + "simpleRNNMatrixCalculations_param5": 25, + "simpleRNNMatrixCalculations_param6": 100, + "simpleRNNMatrixCalculations_param7": 25, + "simpleRNNMatrixCalculations_param8": 25, + "simpleRNNMatrixCalculations_param9": 25, + "simpleRNNMatrixCalculations_param10": 5, + "simpleRNNMatrixCalculations_param11": 0, + "simpleRNNMatrixCalculations_param12": 0, + "simpleRNNMatrixCalculations_param13": 10, + "simpleRNNMatrixCalculations_param14": 55, + "simpleRNNMatrixCalculations_param15": 82, + "simpleRNNMatrixCalculations_param16": 30, + "simpleRNNMatrixCalculations_param17": 60, + "simpleRNNMatrixCalculationsDouble_param18": 0.0, + "simpleRNNMatrixCalculationsDouble_param19": 1.3, + "simpleRNNMatrixCalculationsDouble_param20": 1.9, + "simpleRNNMatrixCalculationsDouble_param21": 1.99, + "simpleRNNMatrixCalculationsDouble_param22": 1.248, + "simpleRNNMatrixCalculationsDouble_param23": 1.238, + "simpleRNNMatrixCalculations_param24": 2500, + "simpleRNNMatrixCalculations_param25": 34, + "simpleRNNMatrixCalculations_param26": 28, + "simpleRNNMatrixCalculations_param27": 7948, + "simpleRNNMatrixCalculationsDouble_param28": 55.5, + "simpleRNNMatrixCalculations_param29": 7530, + "simpleRNNMatrixCalculationsDouble_param30": 0.0, + "simpleRNNMatrixCalculationsDouble_param31": 1.6, + "simpleRNNMatrixCalculationsDouble_param32": 1.95, + "simpleRNNMatrixCalculationsDouble_param33": 61.1, + "simpleRNNMatrixCalculationsDouble_param34": 61.9, + "simpleRNNMatrixCalculationsDouble_param35": 37.5, + "simpleRNNMatrixCalculationsDouble_param36": 38.2, + "simpleRNNMatrixCalculations_param37": 4500, + "simpleRNNMatrixCalculations_param38": 4500, + "simpleRNNMatrixCalculations_param39": 550, + "simpleRNNMatrixCalculations_param40": 25, + "simpleRNNMatrixCalculations_param41": 75, + "simpleRNNMatrixCalculations_param42": 250, + "simpleRNNMatrixCalculations_param43": 75, + "simpleRNNMatrixCalculations_param44": 25, + "simpleRNNMatrixCalculations_param45": 23, + "simpleRNNMatrixCalculations_param46": 3500, + "simpleRNNMatrixCalculationsDouble_param47": 75.9, + "simpleRNNMatrixCalculations_param48": 24, + "simpleRNNMatrixCalculationsDouble_param49": 24.9, + "simpleRNNMatrixCalculations_param50": 85, + "simpleRNNMatrixCalculations_param51": 4500, + "simpleRNNMatrixCalculationsDouble_param52": 43.5, + "simpleRNNMatrixCalculations_param53": 50, + "simpleRNNMatrixCalculations_param54": 55, + "simpleRNNMatrixCalculationsDouble_param55": 60.0, + "simpleRNNMatrixCalculationsDouble_param56": 66.5, + "simpleRNNMatrixCalculations_param57": 55, + "simpleRNNMatrixCalculations_param58": 75, + "simpleRNNMatrixCalculations_param59": 90, + "simpleRNNMatrixCalculations_param60": 50, + "simpleRNNMatrixCalculations_param61": 55, + "simpleRNNMatrixCalculations_param62": 2800, + "simpleRNNMatrixCalculations_param63": 25, + "simpleRNNMatrixCalculations_param64": 45, + "simpleRNNMatrixCalculations_param65": 100, + "simpleRNNMatrixCalculations_param66": 45, + "simpleRNNMatrixCalculations_param67": 25, + "simpleRNNMatrixCalculations_param68": 25, + "simpleRNNMatrixCalculations_param69": 1400, + "simpleRNNMatrixCalculations_param70": 0, + "simpleRNNMatrixCalculations_param71": 500, + "simpleRNNMatrixCalculations_param72": 150, + "simpleRNNMatrixCalculations_param73": 12, + "simpleRNNMatrixCalculations_param74": 2501, + + "typeDependenciesGrammaticalRelation_param1": 701, + "typeDependenciesGrammaticalRelation_param2": 528, + "typeDependenciesGrammaticalRelation_param3": 700, + "typeDependenciesGrammaticalRelation_param4":527, + "typeDependenciesGrammaticalRelation_param5": 0, + "typeDependenciesGrammaticalRelation_param6": 4, + "typeDependenciesGrammaticalRelation_param7": 3450, + "typeDependenciesGrammaticalRelation_param8": 5, + "typeDependenciesGrammaticalRelation_param9": 0, + "typeDependenciesGrammaticalRelation_param10": 5, + "typeDependenciesGrammaticalRelation_param11": 1, + "typeDependenciesGrammaticalRelation_param12": 160, + "typeDependenciesGrammaticalRelationDouble_param13": 2.5, + "typeDependenciesGrammaticalRelation_param14": 260, + "typeDependenciesGrammaticalRelation_param15": 5, + "typeDependenciesGrammaticalRelation_param16": 10, + "typeDependenciesGrammaticalRelation_param17": 260, + "typeDependenciesGrammaticalRelation_param18": 600, + "typeDependenciesGrammaticalRelation_param19": 5, + "typeDependenciesGrammaticalRelation_param20": 8, + "typeDependenciesGrammaticalRelation_param21": 10, + "typeDependenciesGrammaticalRelation_param22": 50, + "typeDependenciesGrammaticalRelation_param23": 23435, + "typeDependenciesGrammaticalRelation_param24": 180, + "typeDependenciesGrammaticalRelation_param25": 480, + "typeDependenciesGrammaticalRelation_param26": 10522, + "typeDependenciesGrammaticalRelation_param27": 35, + "typeDependenciesGrammaticalRelation_param28": 45, + "typeDependenciesGrammaticalRelation_param29": 4021, + "typeDependenciesGrammaticalRelation_param30": 6, + "typeDependenciesGrammaticalRelation_param31": 9340, + "typeDependenciesGrammaticalRelation_param32": 15, + "typeDependenciesGrammaticalRelation_param33": 10, + "typeDependenciesGrammaticalRelation_param34": 2, + "typeDependenciesGrammaticalRelation_param35": 80, + "typeDependenciesGrammaticalRelation_param36": 2502, + "typeDependenciesGrammaticalRelation_param37": 0, + "typeDependenciesGrammaticalRelation_param38": 4101, + "typeDependenciesGrammaticalRelation_param39": 5, + "typeDependenciesGrammaticalRelation_param40": 450, + "typeDependenciesGrammaticalRelation_param41": 450, + "typeDependenciesGrammaticalRelation_param42": 1, + "typeDependenciesGrammaticalRelation_param43": 3, + "typeDependenciesGrammaticalRelation_param44": 1500, + "typeDependenciesGrammaticalRelation_param45": 5, + "typeDependenciesGrammaticalRelation_param46": 500, + "typeDependenciesGrammaticalRelation_param47": 1, + "typeDependenciesGrammaticalRelation_param48": 3431, + "typeDependenciesGrammaticalRelation_param49": 0, + "typeDependenciesGrammaticalRelation_param50": 3, + "typeDependenciesGrammaticalRelation_param51": 4500, + "typeDependenciesGrammaticalRelation_param52": 5, + "typeDependenciesGrammaticalRelation_param53": 500, + "typeDependenciesGrammaticalRelation_param54": 6500, + "typeDependenciesGrammaticalRelation_param55": 250, + "typeDependenciesGrammaticalRelation_param56": 3, + "typeDependenciesGrammaticalRelation_param57": 160, + "typeDependenciesGrammaticalRelation_param58": 3, + "typeDependenciesGrammaticalRelation_param59": 160, + + "iterateTrees_param1": 4, + "iterateTrees_param2": 5, + "iterateTrees_param3": 0, + "iterateTrees_param4": 200, + "iterateTrees_param5": 0, + "iterateTrees_param6": 4, + "iterateTrees_param7": 500, + "iterateTrees_param8": 2, + "iterateTrees_param9": 350, + "iterateTrees_param10": 2, + "iterateTreesDouble_param11": 1.5, + "iterateTrees_param12": 3, + "iterateTrees_param13": 8745, + "iterateTrees_param14": 5, + "iterateTrees_param15": 1550, + "iterateTrees_param16": 3, + "iterateTrees_param17": 3949, + "iterateTrees_param18": 550, + "iterateTrees_param19": 800, + "iterateTreesDouble_param20": 1.5, + "iterateTrees_param21": 350, + "iterateTrees_param22": 2826, + "iterateTrees_param23": 5, + "iterateTrees_param24": 1923, + "iterateTrees_param25": 2, + "iterateTrees_param26": 12985, + "iterateTrees_param27": 1, + "iterateTrees_param28": 2803, + "iterateTrees_param29": 553, + "iterateTrees_param30": 10, + "iterateTrees_param31": 400, + + "tgwListScoreIncrementer_param1": 64, + + "overValue_param1": 32 +} \ No newline at end of file diff --git a/insert_table_sentence_testing.sql b/insert_table_sentence_testing.sql new file mode 100644 index 0000000..8e603e5 --- /dev/null +++ b/insert_table_sentence_testing.sql @@ -0,0 +1,136 @@ + +INSERT INTO ArtificialAutism.sentence_testing +(String1, String2, score_required, comperator_for_score, PerformTestingFittingLess) +VALUES +('I was thinking to small supplies to avoid waiting in the rain. This way, in case of trouble you go home and take in your supply instead of waiting 45 min', +'*NêkØ* Kroaat_West bG <3', 800, '<', false), +('u want head from me :wlenny:', 'no thanks but i know some ladyboys here that would', 2000, '>', false), +('we need a trim for kroaat\'s teamwin', 'no thanks but i know some ladyboys here that would', -1, '<', true), +('i am not a stalker', 'but we ban for bhop hack', -1, 'no operation', false), +('i am not a stalker', 'hey stalker', -1, '>', true), +('what do you think of humanzz', 'did we actually go inside rocket -_-', -1, 'no operation', false), +('what do you think of humanzz', 'crying for beeing tossed for fire', -1, '>', true), +('what do you think of humanzz', 'crying for beeing tossed for fire', 3000, '>', false), +('admin ! this map needs a Free Torchlight for all', '( ? <:wlenny:514861023002624001> ?? ? <:wlenny:514861023002624001> ) ( ? <:wlenny:514861023002624001> ?? ? <:wlenny:514861023002624001> ) ( ? <:wlenny:514861023002624001> ?? ? <:wlenny:514861023002624001> ) ( ? <:wlenny:514861023002624001> ?? ?', -5000, '<', false), +('i said that because i indeed have more knowledge about medicines than the average joe', '( ? <:wlenny:514861023002624001> ?? ? <:wlenny:514861023002624001> ) ( ? <:wlenny:514861023002624001> ?? ? <:wlenny:514861023002624001> ) ( ? <:wlenny:514861023002624001> ?? ? <:wlenny:514861023002624001> ) ( ? <:wlenny:514861023002624001> ?? ?', -1, 'no operation', false), +('i said that because i indeed have more knowledge about medicines than the average joe', 'Depends on the situation but i will mostly trust my doctor if he says this will help and i actually need it', -1, '>', true), +('tell me something', 'you learn fast yoshmi', -1, 'no operation', false), +('tell me something', 'when i see europeans dead i laugh', -1, '>', true), +('crobat im gonna nominate next event for you', 'why did we sploit . <:wlenny:514861023002624001> <:wlenny:514861023002624001> <:wlenny:514861023002624001>', -1, 'no operation', false), +('crobat im gonna nominate next event for you', 'lets go for mako', -1, '>', true), +('how are the calcluations going? any issue with the JMWE?', 'anyone know if upgrading damage increases the mines\' damage also', -1, 'no operation', false), +('how are the calcluations going? any issue with the JMWE?', 'i have to get back to work', -1, '>', true), +('sprichst du Deutsch?', 'like rpggift unknown !!! 130', -2500, '<', false), +('do you like memes?', 'we need to adapt to the team we have', 3400, '<', false), +('do you like memes?', 'i have to get back to work', 14400, '>', false), +('is that a cursed sentence?', 'just didnt want to have heal since i died', -200, '<', false), +('my name is ? ? ? ? G ? ? ? but this server doesn\'t read my special ? ? ? ? ? ? characters', 'dont say that sentence again', -5000, '<', false), +('my name is ? ? ? ? G ? ? ? but this server doesn\'t read my special ? ? ? ? ? ? characters', 'please dont tell me your gonna repeat that', -5000, '<', false), +('my name is ? ? ? ? G ? ? ? but this server doesn\'t read my special ? ? ? ? ? ? characters', 'na it was a good try', -5000, '<', false), +('my name is ? ? ? ? G ? ? ? but this server doesn\'t read my special ? ? ? ? ? ? characters', 'NATSU DIES IN THE END', -5000, '<', false), +('reeee', '??( ? :wlenny~1: ?? ? :wlenny~1: )?? ( ? :wlenny~1: ?? ? :wlenny~1: )/ [ :wlenny~1: ?~ :wlenny~1: :] ? :wlenny~1: ?? ?? <', -2500, '<', false), +('dw, my mom is a stupid cunt, she deserved it', '(????????????-)---….. JOINT :wlenny~1: !', -2500, '<', false), +('are you a paste cut or a cut and paste?', '? ? ? ? ? ? ? ? A ? ? ? V ? ? ? ? ? ? ? ? ?', -2500, '<', false), +('Did you know that Denmark\'s short form (DK) is pronounced as "decay"? :thonk~1:', '?( ? <:wlenny:514861023002624001> ?? ? <:wlenny:514861023002624001> )??( ? <:wlenny:514861023002624001> ?? ? <:wlenny:514861023002624001> )??( ? <:wlenny:514861023002624001> ?? ?<:wlenny:514861023002624001>)??( ?<:wlenny:514861023002624001>?? ? <:w', -2500, '<', false), +('are you a space cat or a cat in space? <:thonk:382012909942734858>', '{fullred}( ? <:wlenny:514861023002624001> ? ? {hotpink}? ? ? ? ? ? ? ? ? ? ? ? ? ? ? ? <:wlenny:514861023002624001> ?)', -2500, '<', false), +('something else to tell me now', '{fullred}(--)? ?(--? )?{mediumblue}?(--)? ?(--)?{magenta}?(--)?{indigo}?(--? )?', -2500, '<', false), +('do you have repeating sentences', 'its pretty cheap with 10 ppl you pay about 60 euro for a week', 1500, '<', false), +('what is 60 euro a week', '{fullred}( ? <:wlenny:514861023002624001> ? ? {hotpink}? ? ? ? ? ? ? ? ? ? ? ? ? ? ? ? <:wlenny:514861023002624001> ?)', -2500, '<', false), +('do you watch news and if yes which one', '? ? ? ? ? ? ? ? A ? ? ? V ? ? ? ? ? ? ? ? ?', -2500, '<', false), +('"im gonna bad manner you', '? ? ? ? ? ? ? ? A ? ? ? V ? ? ? ? ? ? ? ? ?', -2500, '<', false), +('LOOK OUT BIG DOG', '? ? ? ? ? ? ? ? A ? ? ? V ? ? ? ? ? ? ? ? ?', -2500, '<', false), +('3 days = 30 cents', '? ? ? ? ? ? ? ? A ? ? ? V ? ? ? ? ? ? ? ? ?', -2500, '<', false), +(':D we had a lot of fun for 2 rounds :D', '? ? ? ? ? ? ? ? A ? ? ? V ? ? ? ? ? ? ? ? ?', -2500, '<', false), +('>FUCK I JUST PRESSED MY ZTELE BIND', '? ? ? ? ? ? ? ? A ? ? ? V ? ? ? ? ? ? ? ? ?', -2500, '<', false), +('400 solos on mako <:wlenny:514861023002624001>', '? ? ? ? ? ? ? ? A ? ? ? V ? ? ? ? ? ? ? ? ?', -2500, '<', false), +('2 noobs 3 not bad', '{fullred}( ? <:wlenny:514861023002624001> ? ? {hotpink}? ? ? ? ? ? ? ? ? ? ? ? ? ? ? ? <:wlenny:514861023002624001> ?)', -2500, '<', false), +('??????? NOW WE RIOT ???????', '{fullred}( ? <:wlenny:514861023002624001> ? ? {hotpink}? ? ? ? ? ? ? ? ? ? ? ? ? ? ? ? <:wlenny:514861023002624001> ?)', -2500, '<', false), +('admin turn on bhop pleas', '{fullred}( ? <:wlenny:514861023002624001> ? ? {hotpink}? ? ? ? ? ? ? ? ? ? ? ? ? ? ? ? <:wlenny:514861023002624001> ?)', -2500, '<', false), +('paranoid is never event', '{fullred}( ? <:wlenny:514861023002624001> ? ? {hotpink}? ? ? ? ? ? ? ? ? ? ? ? ? ? ? ? <:wlenny:514861023002624001> ?)', -2500, '<', false), +('players keep diying LLLLOOOOLLL', '{fullred}( ? <:wlenny:514861023002624001> ? ? {hotpink}? ? ? ? ? ? ? ? ? ? ? ? ? ? ? ? <:wlenny:514861023002624001> ?)', -2500, '<', false), +('PRESS THE FUCKING BUTTON IN THE ELEVATOR', '{fullred}( ? <:wlenny:514861023002624001> ? ? {hotpink}? ? ? ? ? ? ? ? ? ? ? ? ? ? ? ? <:wlenny:514861023002624001> ?)', -2500, '<', false), +('but instead of minecraft server i got css ze', 'Rocklee when did you come back from the isis khalifate <:wlenny:514861023002624001>', 2500, '<', false), +('First time there\'s that many CT at this point', 'Rocklee when did you come back from the isis khalifate <:wlenny:514861023002624001>', 2500, '<', false), +('go to spec so changemap', 'Rocklee when did you come back from the isis khalifate <:wlenny:514861023002624001>', -2500, '<', false), +('What\'s for lunch?', 'what does bm stand for', -1, 'no operation', false), +('What\'s for lunch?', '2 eggs and 1 cup', -1, '>', true), +('do you watch any series or animes or cartoons', 'you guys voted for this', 1500, '<', false), +('do you know pyrono', 'i have to get accustomed to it', 3000, '<', false), +('Is William a good admin?', 'but this is helms deep', -1, 'no operation', false), +('Is William a good admin?', 'keep an eye on them', -1, '>', true), +('scuffed jenz', 'I HAVE WATCHED ONLY ONE CARTOON AND IT\'S POKEMON', -2500, '<', false), +('So what?', 'I HAVE WATCHED ONLY ONE CARTOON AND IT\'S POKEMON', 3500, '<', false), +('Who is the enemy?', 'I HAVE WATCHED ONLY ONE CARTOON AND IT\'S POKEMON', 1500, '<', false), +('Sounds bad, doesn\'t work', 'that hold is identical to the other room', 500, '<', false), +('oh wait, because I don\'t have any', 'would be cool if VIPs would nominate other than paranoid. All the maps in the vote all the time suck so people just vote for an', 1500, '<', false), +('{navy}? :wlenny~1: ?? {violet}? :wlenny~1: ? :wlenny~1: ? :wlenny~1: ?? ? :wlenny~1: ? :wlenny~1: ? :wlenny~1: ??', 'will you still be online tommorow?', -4500, '<', false), +('stop being such a kid and act more polite towards people ', 'i played nemesis on paradise a few days ago and it worked fine', 3500, '<', false), +('Enfin. Map noob', 'dagger dagger', -400, '<', false), +('u have to hit the middle one with ur nade', 'your not going to mcdonalds, you have had your chance with the cheeseburger', 400, '<', false), +('How is everyone doing', 'wieso ist dein Bein am Arsch. Eigdl hängt das runter', 400, '<', false), +('How is everyone doing', 'meshlem how does it feel to be russian', 700, '<', false), +('they dont buy kefvlar', 'you have a database available again', 400, '<', false), +('because of lag?', 'french tourit', -1400, '<', false), +('because of lag?', 'Even when I\'m working', 3500, '<', false), +('or need another restart', 'Even when I\'m working', 2600, '>', false), +('or need another restart', 'french tourit', -1400, '<', false), +('wow that clock works/', 'didnt the bot like mako? what happened to that?', 400, '<', false), +('haHAA stop that cringe chat haHAA', 'didnt the bot like mako? what happened to that?', 3400, '>', false), +('like 1s down now i guess i will die', 'monkaGIGA', 4400, '>', false), +('what do you want', 'admin extend', 5100, '>', false), +('You are a one large bug', 'omg you are right', 5900, '>', false), +('I\'m not a mapper, wtf', 'this map was made by wtf', 3400, '>', false), +('do you have plants thonk', 'banana trees are plants', 3400, '>', false), +('do you have plants thonk', 'fucking alcolo', 100, '<', false), +('do you have plants thonk', 'qual arma e 382012909942734858', -1400, '<', false), +('do you have plants thonk', 'wlenny on gamebanana', 2500, '>', false), +('And how was it? :wlenny~1:', 'at lvl 1 avad is 140 cd', 400, '>', false), +('wtf? :monkaS~2:', 'think thats it kangaroo next', -400, '<', false), +('yurope', '?? ??????? ??? ??', -2400, '<', false), +('fuck', '?? ??????? ??? ??', -2400, '<', false), +('you have your priorities straight. i respect that', 'I see the bot doesn\'t want to die he knows depressive ass bot autism fasz eater Amtssprache ist Deutsch :error: &259041609495216129> speak hungarian garantua is the best map', -7400, '<', false), +('Hey that\'s racist', 'I love you autismo', 3400, '>', false), +('Guten Morgen, Herr Autismus, kannst du deine Fresse für ein Paar Minuten halten? :moon2SH:', 'NM_GunPoint: :NM_GunPoint: :NM_GunPoint: :NM_GunPoint: "Nesstsuki Szeretem az édes pinat Ness szeretem az edes pinat Ness send lenny"', -4400, '<', false), +(' I LOVE YOU PLEASE TAKE ME BACK I AM SO SORRY IT WAS A DRUNK ACCIDENT', 'i am not exactly sure how you think renaming them to !autism will cause them to loop over and over again', -2400, '<', false), +('Now look', 'High foot', -2400, '<', false), +('please wake up again', 'kind ghost pinát nyal', -2400, '<', false), +('?mute autism 50', 'have a?', -2400, '<', false), +('no chloroform pls', 'propper_model myb?', 400, '>', false), +('yeah so stop inflating, you make these ppl do that', 'I SAID GOOD NIGHT', -1, 'no operation', false), +('yeah so stop inflating, you make these ppl do that', 'i am inflating and dont make these ppl do that', -1, '>', true), +('yeah so stop inflating, you make these ppl do that', 'i am inflating and dont make these ppl do that', 500, '>', false), +('Maybe that\'s why we have many people who prefer download something from torrents than buying something even it\'s cheap', 'yeah stop inflating, you make people do that', 3400, '>', false), +('Artificial Autism Generation 3 (STEAM_0:0:610560766) Pressed button: Deathrun_Button', 'with big ass boss', -2400, '<', false), +('Artificial Autism Generation 3 Pressed button: Deathrun_Button', 'No ? I SAID GOOD NIGHT', 3400, '>', false), +('Explain?', 'No ? I SAID GOOD NIGHT', 5400, '>', false), +('Dolly stop scamming me', 'Oh hello autims', -2400, '>', false), +('Most importantly, I\'m a cat belonging to himself :cooltom:', 'It already has random bomb sites and random spawns. So it\'s gameplay ready, but the walls, man.:DDDD', -400, '<', false), +('you are his woman? ', 'are you dead again', 4400, '>', false), +('I\'m not your man', 'Yep. semodel. Doesn\'t work. Plugin for BLender is installed and working. Characters are very well exported.', -2400, '<', false), +('shut up', 'Only champagne', 400, '>', false), +('right well kill yourself first of all :woa~1:', 'What is a command prompt: A command prompt allow you to execute .exe files through commands heres a sneak peek : "we are nothing more to women than an amusement park ride with life insurance"', -2400, '<', false), +('A', 'I have neither', -2400, '<', false), +('maybe i tomorow or sunday have an update ready', 'How dare you calling me baguette woman? I am not lisa and moreover not french:angerynut~1:', -2400, '<', false), +('no shut up, why the fuck did you just roast me nigger', 'with 0, they cant break it by shot or knife but it still makes a sound of breaking possible i think', -2400, '<', false), +('is bot alive again?', 'tired of spinning in circles?', 1400, '>', false), +('is bot alive again?', 'yes it is', 6400, '>', false), +(':pepelaugh~2:', 'geci cuki autismo bot pinát nyal eating nice dogs Du hast nichteinmal eine Persönlichkeit', -3400, '<', false), +('spits bót ', 'i been blessed by only meeting nice dogs', -400, '<', false), +('spits bót ', 'spits faggot', 5400, '>', false), +('bro fuck you', 'welp i dont have time any more for focusing on the bot so its just a simple restart', 2400, '>', false), +('nah it\'s random, it can react to long messages pretty quickly sometiems', 'i mean there certainly exists models that i could apply for french/german/spanish/chinese/hungarian gay is when you are a guy but have an autistic 2D girl on your profile picture - now this is gay', -400, '<', false), +('you don\'t have a', 'Yep. semodel. Doesn\'t work. Plugin for BLender is installed and working. Characters are very well exported.', -2400, '<', false), +('finally bot is admin ready', 'Only champagne', 2400, '<', false), +('Helmstronk stop posting cringe, last warning', 'finally admin ready', 4400, '>', false), +('1pm for me', 'Helmstronk stop posting cringe, last warning', -2400, '<', false), +('it\'s 2 PM dude', 'Nesstsuki Szeretem az édes pinat', -2400, '<', false), +('can i have b?', 'Dark Silent speak', -2400, '<', false), +('autismo loves to eat pussy?', 'I have neither', 2400, '>', false), +('autismo loves to eat pussy?', 'I have sweeto autismo, wake up', 5400, '>', false), +('Good afternoon acousticheart01815 and have A', 'master degree in artificial autism carpeting I love to eat pussy 🤣 🤣 🤣 🤣 🤣 🤣', -2400, '<', false), +('i certainly planned only to spend time on it one weekend', 'i am not exactly sure how you think renaming them to !autism will cause them to loop over and over again', -2400, '<', false), +('Who', 'i have neither', -2400, '<', false), +('feels bad i only have like one hour free time per day for working on the update on you', 'in func_breakable there setting "strength" and in help comment "Number of points of damage to take before breaking. 0 means don\'t break."', -2400, '<', false), +('that sentence is dumb as fuck stop using it', 'ΣZΣ | jenz you see any bots on here??? 404Trucy look your new friend 1pm for me :loved: :shinogeci: :sbpl: you alive? does shino like fasz? boost the server pls', -4400, '<', false), +('Ye, i caught ya bitch!', 'I hope lask overtakes gaulloise', -2400, '<', false), +('Ye, i caught ya bitch!', 'Good for you bitch', 15400, '>', false) diff --git a/pom.xml b/pom.xml new file mode 100644 index 0000000..a028b8e --- /dev/null +++ b/pom.xml @@ -0,0 +1,226 @@ + + + 4.0.0 + com.mycompany + ArtificialAutism + 1.0 + jar + + + com.googlecode.json-simple + json-simple + 1.1.1 + + + org.apache.commons + commons-dbcp2 + 2.5.0 + jar + + + net.dv8tion + JDA + 5.0.0-beta.21 + + + org.junit-pioneer + junit-pioneer + 2.2.0 + test + + + mysql + mysql-connector-java + 8.0.13 + + + com.google.guava + guava + 26.0-jre + + + com.github.mpkorstanje + simmetrics-core + 4.1.1 + + + ws4j + ws4j + 1.0.1 + system + ${project.basedir}/lib/ws4j-1.0.1.jar + + + + stanford-corenlp-models-english + stanford-corenlp-models-english + 4.3.1 + system + ${project.basedir}/lib/stanford-corenlp-models-english-4.3.1.jar + jar + + + ejml-simple + ejml-simple + 0.39 + system + ${project.basedir}/lib/ejml-simple-0.39.jar + jar + + + ejml-core + ejml-core + 0.39 + system + ${project.basedir}/lib/ejml-core-0.39.jar + jar + + + ejml-ddense + ejml-ddense + 0.39 + system + ${project.basedir}/lib/ejml-ddense-0.39.jar + jar + + + jmweAnno + jmweAnno + 1.0 + system + ${project.basedir}/lib/jmweAnno-1.0.jar + jar + + + edu.mit.jmwe + edu.mit.jmwe + 1.0.2 + system + ${project.basedir}/lib/edu.mit.jmwe-1.0.2.jar + jar + + + + org.apache.lucene + lucene-analyzers-common + 7.2.0 + jar + + + edu.stanford.nlp + stanford-corenlp + 4.3.1 + + + edu.stanford.nlp + stanford-corenlp + 4.3.1 + models + + + com.discord4j + discord4j-core + 3.2.0 + + + org.jetbrains.kotlin + kotlin-stdlib-jdk8 + ${kotlin.version} + + + org.jetbrains.kotlin + kotlin-test + ${kotlin.version} + test + + + org.jetbrains.kotlinx + kotlinx-coroutines-core-jvm + 1.5.0 + + + + + UTF-8 + 1.8 + 1.8 + PresentationLayer.DiscordHandler + 1.5.20-M1 + + + + + maven-dependency-plugin + + + install + + copy-dependencies + + + ${project.build.directory}/lib + + + + + + maven-jar-plugin + + + + true + lib/ + ${mainClass} + + + + + + org.jetbrains.kotlin + kotlin-maven-plugin + ${kotlin.version} + + + compile + compile + + compile + + + + test-compile + test-compile + + test-compile + + + + + 1.8 + + + + org.apache.maven.plugins + maven-compiler-plugin + + + compile + compile + + compile + + + + testCompile + test-compile + + testCompile + + + + + + + + diff --git a/src/main/java/DataLayer/DBCPDataSource.java b/src/main/java/DataLayer/DBCPDataSource.java new file mode 100644 index 0000000..7f87e1d --- /dev/null +++ b/src/main/java/DataLayer/DBCPDataSource.java @@ -0,0 +1,55 @@ +/* + * To change this license header, choose License Headers in Project Properties. + * To change this template file, choose Tools | Templates + * and open the template in the editor. + */ +package DataLayer; + +import java.io.FileInputStream; +import java.io.FileNotFoundException; +import java.io.IOException; +import java.sql.Connection; +import java.sql.SQLException; +import java.util.Properties; +import java.util.logging.Level; +import java.util.logging.Logger; + +import org.apache.commons.dbcp2.BasicDataSource; +import DataLayer.settings; + +/** + * @author install1 + */ +public class DBCPDataSource { + private static BasicDataSource ds = new BasicDataSource(); + + static { + try { + Properties prop = new Properties(); + String fileName = "app.config"; + try (FileInputStream fis = new FileInputStream(fileName)) { + prop.load(fis); + } catch (FileNotFoundException ex) { + } catch (IOException ex) { + } + ds.setDriver(new com.mysql.cj.jdbc.Driver()); + ds.setUrl(prop.getProperty("app.url")); + ds.setUsername(prop.getProperty("app.username")); + ds.setPassword(prop.getProperty("app.password")); + ds.setMaxTotal(-1); + ds.setMinIdle(5); + ds.setMaxIdle(-1); + ds.setMaxOpenPreparedStatements(100); + System.out.println("called BasicDataSource "); + } catch (SQLException ex) { + Logger.getLogger(DBCPDataSource.class.getName()).log(Level.SEVERE, null, ex); + } + } + + public static Connection getConnection() throws SQLException { + return ds.getConnection(); + } + + private DBCPDataSource() { + } +} diff --git a/src/main/java/DataLayer/DataMapper.java b/src/main/java/DataLayer/DataMapper.java new file mode 100644 index 0000000..0cd742a --- /dev/null +++ b/src/main/java/DataLayer/DataMapper.java @@ -0,0 +1,660 @@ +/* + * To change this license header, choose License Headers in Project Properties. + * To change this template file, choose Tools | Templates + * and open the template in the editor. + */ +package DataLayer; + +import org.jetbrains.annotations.NotNull; +import org.json.simple.JSONObject; + +import java.io.FileInputStream; +import java.io.FileNotFoundException; +import java.io.IOException; +import java.sql.*; +import java.util.*; +import java.util.logging.Level; +import java.util.logging.Logger; + +/** + * @author install1 + */ +public class DataMapper { + + public static ArrayList getAllStrings() { + Connection l_cCon = null; + PreparedStatement l_pStatement = null; + ResultSet l_rsSearch = null; + ArrayList arrayListStr = new ArrayList(); + Properties prop = new Properties(); + String fileName = "app.config"; + try (FileInputStream fis = new FileInputStream(fileName)) { + prop.load(fis); + } catch (FileNotFoundException ex) { + } catch (IOException ex) { + } + try { + l_cCon = DBCPDataSource.getConnection(); + //not better, just forces the responds to variate a lot more + String l_sSQL = "SELECT * FROM `Sentences` where last_used < CURRENT_DATE() - INTERVAL " + + String.valueOf(prop.getProperty("app.interval_days")) + + " DAY order by LENGTH(Strings) desc"; + //System.out.println("l_sSQL: " + l_sSQL); + l_pStatement = l_cCon.prepareStatement(l_sSQL); + l_rsSearch = l_pStatement.executeQuery(); + while (l_rsSearch.next()) { + arrayListStr.add(l_rsSearch.getString(1)); + } + } catch (SQLException throwables) { + throwables.printStackTrace(); + } finally { + CloseConnections(l_pStatement, l_rsSearch, l_cCon); + } + return arrayListStr; + } + + public static void InsertMYSQLStrings(String str) { + Connection l_cCon = null; + PreparedStatement l_pStatement = null; + ResultSet l_rsSearch = null; + Properties prop = new Properties(); + String fileName = "app.config"; + try (FileInputStream fis = new FileInputStream(fileName)) { + prop.load(fis); + } catch (FileNotFoundException ex) { + } catch (IOException ex) { + } + String l_sSQL = "INSERT IGNORE `Sentences` (`Strings`, `last_used`) VALUES (?, DATE(NOW()) - interval " + + String.valueOf(prop.getProperty("app.interval_days_minus")) + " DAY)"; + try { + l_cCon = DBCPDataSource.getConnection(); + l_pStatement = l_cCon.prepareStatement(l_sSQL); + l_pStatement.setString(1, str); + + l_pStatement.execute(); + } catch (SQLException throwables) { + throwables.printStackTrace(); + } finally { + CloseConnections(l_pStatement, l_rsSearch, l_cCon); + } + } + + + public static void CloseConnections(PreparedStatement ps, ResultSet rs, Connection con) { + if (rs != null) { + try { + rs.close(); + } catch (SQLException ex) { + Logger.getLogger(DataMapper.class.getName()).log(Level.SEVERE, null, ex); + } + } + if (ps != null) { + try { + ps.close(); + } catch (SQLException ex) { + Logger.getLogger(DataMapper.class.getName()).log(Level.SEVERE, null, ex); + } + } + if (con != null) { + try { + con.close(); + } catch (SQLException ex) { + Logger.getLogger(DataMapper.class.getName()).log(Level.SEVERE, null, ex); + } + } + } + + public static void checkStringsToDelete() { + Properties prop = new Properties(); + String fileName = "app.config"; + try (FileInputStream fis = new FileInputStream(fileName)) { + prop.load(fis); + } catch (FileNotFoundException ex) { + } catch (IOException ex) { + } + PreparedStatement l_pStatement = null; + ResultSet l_rsSearch = null; + String CountSQL = "select count(*) from Sentences"; + String l_sSQL = "delete from Sentences order by last_used asc limit 5;"; + try (Connection l_cCon = DBCPDataSource.getConnection()) { + l_pStatement = l_cCon.prepareStatement(CountSQL); + ResultSet resultSet = l_pStatement.executeQuery(); + if (resultSet.next()) { + int count = resultSet.getInt(1); + if (count > Integer.valueOf(prop.getProperty("app.string_count"))) { + //System.out.println("cleaning strings: " + l_sSQL); + l_pStatement = l_cCon.prepareStatement(l_sSQL); + l_pStatement.executeUpdate(); + } + } + } catch (SQLException throwables) { + throwables.printStackTrace(); + } finally { + CloseConnections(l_pStatement, l_rsSearch, null); + } + } + + public static void updateLastUsed(@NotNull ArrayList mysqlUpdateLastUsed) { + Connection l_cCon = null; + PreparedStatement l_pStatement = null; + ResultSet l_rsSearch = null; + String l_sSQL = "update Sentences Set last_used = now() where Strings = (?)"; + try { + l_cCon = DBCPDataSource.getConnection(); + l_pStatement = l_cCon.prepareStatement(l_sSQL); + for (String str1 : mysqlUpdateLastUsed) { + l_pStatement.setString(1, str1); + l_pStatement.execute(); + } + } catch (SQLException throwables) { + throwables.printStackTrace(); + } finally { + CloseConnections(l_pStatement, l_rsSearch, l_cCon); + } + } + + + //from here it's testing stuff + + public static void WriteDataToSpecificFunction(Set dataSet, String testCaseFunction) { + Connection l_cCon = null; + PreparedStatement l_pStatement = null; + ResultSet l_rsSearch = null; + + try { + //generating the columns to be inserted + StringBuilder l_sSQL_columns = new StringBuilder("insert into `" + testCaseFunction + "` ("); + for (JSONObject data : dataSet) { + for (Object key : data.keySet()) { + //only taking parameters that match the current test type. + if (!key.toString().startsWith(testCaseFunction + "Double_param") && + !key.toString().startsWith(testCaseFunction + "_param")) continue; + l_sSQL_columns.append(key.toString()); + l_sSQL_columns.append(","); + } + break; + } + l_sSQL_columns.setLength(l_sSQL_columns.length() - 1); + l_sSQL_columns.append(", java_pid) VALUES "); + + + l_cCon = DBCPDataSource.getConnection(); + int indexCounter = 0; + StringBuilder l_sSQL = new StringBuilder(" "); + //adding the actual value pairs that have to be inserted. + for (JSONObject data : dataSet) { + if (indexCounter % 250 == 0 && indexCounter > 0) { + System.out.println("insert Traversed " + indexCounter + "/" + dataSet.size()); + l_sSQL.setLength(l_sSQL.length() - 1); //removing last comma + l_pStatement = l_cCon.prepareStatement(l_sSQL_columns.toString() + l_sSQL.toString()); + l_pStatement.execute(); + l_sSQL = new StringBuilder(" "); + } + indexCounter++; + l_sSQL.append(" ("); + + for (Object key : data.keySet()) { + //only taking parameters that match the current test type. + if (!key.toString().startsWith(testCaseFunction + "Double_param") && + !key.toString().startsWith(testCaseFunction + "_param")) continue; + if (key.toString().startsWith("simpleRNNMaxtrixVectorsDouble") || key.toString().startsWith("simpleRNNMatrixCalculationsDouble") + || key.toString().startsWith("typeDependenciesGrammaticalRelationDouble") || key.toString().startsWith("iterateTreesDouble")) { + Double doubleValue1 = (Double) data.get(key); + l_sSQL.append(doubleValue1); + } else { + //System.out.println("key: " + key + " val: " + data.get(key)); + int intValue1 = (int) data.get(key); + l_sSQL.append(intValue1); + } + l_sSQL.append(","); + } + l_sSQL.append(ProcessHandle.current().pid()); + l_sSQL.append("),"); + } + l_sSQL.setLength(l_sSQL.length() - 1); //removing last comma + l_pStatement = l_cCon.prepareStatement(l_sSQL_columns.toString() + l_sSQL.toString()); + l_pStatement.execute(); + System.out.println("finished last insert traversed"); + } catch (SQLException throwables) { + throwables.printStackTrace(); + } finally { + CloseConnections(l_pStatement, l_rsSearch, l_cCon); + } + } + + + + public static List GetAllTestsCases() { + Connection l_cCon = null; + PreparedStatement l_pStatement = null; + ResultSet l_rsSearch = null; + List testClassesList = new ArrayList(); + try { + l_cCon = DBCPDataSource.getConnection(); + String l_sSQL = "select * from `sentence_testing` order by rowid asc"; + l_pStatement = l_cCon.prepareStatement(l_sSQL); + l_rsSearch = l_pStatement.executeQuery(); + while (l_rsSearch.next()) { + testClasses testClass = new testClasses(l_rsSearch.getString(1), l_rsSearch.getString(2), l_rsSearch.getInt(3), l_rsSearch.getString(4), + l_rsSearch.getBoolean(5)); + testClassesList.add(testClass); + } + } catch (SQLException throwables) { + throwables.printStackTrace(); + } finally { + CloseConnections(l_pStatement, l_rsSearch, l_cCon); + } + return testClassesList; + } + + public static void MarkSuccessfullFunctionData(String testCaseFunction, Integer rowid) { + Connection l_cCon = null; + PreparedStatement l_pStatement = null; + ResultSet l_rsSearch = null; + try { + l_cCon = DBCPDataSource.getConnection(); + StringBuilder l_sSQL = new StringBuilder("update `" + testCaseFunction + "` set passed_all_test_cases = true where rowid = " + rowid); + l_pStatement = l_cCon.prepareStatement(l_sSQL.toString()); + l_pStatement.executeUpdate(); + } catch (SQLException throwables) { + throwables.printStackTrace(); + } finally { + CloseConnections(l_pStatement, l_rsSearch, l_cCon); + } + } + + public static Set SelectRandomDataNotProcessed(String testCaseFunction) { + Set randomDataSet = new HashSet<>(); + Connection l_cCon = null; + PreparedStatement l_pStatement = null; + ResultSet l_rsSearch = null; + try { + l_cCon = DBCPDataSource.getConnection(); + StringBuilder l_sSQL = new StringBuilder("select * from `" + testCaseFunction + "` where " + + " java_pid = " + ProcessHandle.current().pid()); + l_pStatement = l_cCon.prepareStatement(l_sSQL.toString()); + l_rsSearch = l_pStatement.executeQuery(); + ResultSetMetaData metaData = l_rsSearch.getMetaData(); + while (l_rsSearch.next()) { + JSONObject object = new JSONObject(); + for (int i = 1; i <= metaData.getColumnCount(); i++) //these indexes start at 1 instead of 0 + { + if (metaData.getColumnName(i).equals("created_on")) break; + int columnType = metaData.getColumnType(i); + Object Value = null; + if (Types.INTEGER == columnType) { + Value = l_rsSearch.getInt(i); + } else if (Types.DECIMAL == columnType) { + Value = l_rsSearch.getDouble(i); + } else continue; + object.put(metaData.getColumnName(i), Value); + } + randomDataSet.add(object); + } + } catch (SQLException throwables) { + throwables.printStackTrace(); + } finally { + CloseConnections(l_pStatement, l_rsSearch, l_cCon); + } + return randomDataSet; + } + + public static List GetFunctionTestCases() { + Connection l_cCon = null; + PreparedStatement l_pStatement = null; + ResultSet l_rsSearch = null; + List testClassesList = new ArrayList(); + try { + l_cCon = DBCPDataSource.getConnection(); + String l_sSQL = "select * from `sentence_testing_function` order by rowid asc"; + l_pStatement = l_cCon.prepareStatement(l_sSQL); + l_rsSearch = l_pStatement.executeQuery(); + while (l_rsSearch.next()) { + testClasses testClass = new testClasses(l_rsSearch.getString(1), l_rsSearch.getString(2), l_rsSearch.getInt(3), l_rsSearch.getString(4), + false); + testClassesList.add(testClass); + } + } catch (SQLException throwables) { + throwables.printStackTrace(); + } finally { + CloseConnections(l_pStatement, l_rsSearch, l_cCon); + } + return testClassesList; + } + + public static void UpdateProcessed(String testCaseFunction, Set randomDataSet, int max_index_counter_tests_passed, Double bestScore, + String comperator_for_score_for_failing_testcase) { + Connection l_cCon = null; + PreparedStatement l_pStatement = null; + ResultSet l_rsSearch = null; + List testClassesList = new ArrayList(); + boolean maxIndexCounterTestsPassedCount = DataMapper.getMaxIndex_counter_tests_passedCount(max_index_counter_tests_passed, testCaseFunction); + + try { + l_cCon = DBCPDataSource.getConnection(); + for (JSONObject dataRandom : randomDataSet) { + Double failed_score = (Double) dataRandom.get("failed_testcase_score"); + System.out.println("failed_score: " + failed_score + " max_index_counter_tests_passed: " + max_index_counter_tests_passed); + //updating rows that reached better test + if ((int)dataRandom.get("index_counter_tests_passed") > max_index_counter_tests_passed) { + String l_sSQL = "update `" + testCaseFunction + "` set index_counter_tests_passed = " + dataRandom.get("index_counter_tests_passed") + + " , failed_testcase_score = " + dataRandom.get("failed_testcase_score") + + " , java_pid = NULL " + + " where rowid = " + dataRandom.get("rowid"); + l_pStatement = l_cCon.prepareStatement(l_sSQL); + l_pStatement.executeUpdate(); + } + //add the result if its better than the last. + else if ((int) dataRandom.get("index_counter_tests_passed") == max_index_counter_tests_passed + && comperator_for_score_for_failing_testcase.contains(">") ? + failed_score > bestScore : + failed_score < bestScore) { + String l_sSQL = "update `" + testCaseFunction + "` set index_counter_tests_passed = " + dataRandom.get("index_counter_tests_passed") + + " , failed_testcase_score = " + dataRandom.get("failed_testcase_score") + + " , java_pid = NULL " + + " where rowid = " + dataRandom.get("rowid"); + l_pStatement = l_cCon.prepareStatement(l_sSQL); + l_pStatement.executeUpdate(); + } + else //deleting rows that gave a worse result. + { + String l_sSQL = "delete from `" + testCaseFunction + "` where rowid = " + dataRandom.get("rowid"); + l_pStatement = l_cCon.prepareStatement(l_sSQL); + l_pStatement.executeUpdate(); + } + } + } catch (SQLException throwables) { + throwables.printStackTrace(); + } finally { + CloseConnections(l_pStatement, l_rsSearch, l_cCon); + } + } + + public static int get_index_counter_tests_passed(String testCaseFunction, int manualInt) + { + Connection l_cCon = null; + PreparedStatement l_pStatement = null; + ResultSet l_rsSearch = null; + int max_index_counter_tests_passed = -1; + try { + l_cCon = DBCPDataSource.getConnection(); + String l_sSQL = "select COALESCE(max(index_counter_tests_passed), 0) from `" + testCaseFunction + "`"; + if (manualInt != 0) + { + l_sSQL += "WHERE index_counter_tests_passed < " + manualInt; + } + l_pStatement = l_cCon.prepareStatement(l_sSQL); + l_rsSearch = l_pStatement.executeQuery(); + l_rsSearch.next(); + max_index_counter_tests_passed = l_rsSearch.getInt(1); + } catch (SQLException e) { + throw new RuntimeException(e); + } finally { + CloseConnections(l_pStatement, l_rsSearch, l_cCon); + } + return max_index_counter_tests_passed; + } + + public static Set pickHighestProgression(String testCaseFunction) { + Set randomDataSetToReturn = new HashSet<>(); + Connection l_cCon = null; + PreparedStatement l_pStatement = null; + ResultSet l_rsSearch = null; + try { + l_cCon = DBCPDataSource.getConnection(); + String CheckIfAnyRows = "select count(*) from `" + testCaseFunction + "`"; //do we have any rows at all? + l_pStatement = l_cCon.prepareStatement(CheckIfAnyRows); + l_rsSearch = l_pStatement.executeQuery(); + if (l_rsSearch.next()){ + int rowcount = l_rsSearch.getInt(1); + if (rowcount == 0) { + return randomDataSetToReturn; // we did not generate anything yet for this category. so start with random data instead. + } + } + int limit = 2; + int index_counter_tests_passed = 0; + int attempts = 0; + while (randomDataSetToReturn.size() < limit && attempts < 5) + { + attempts++; + //select here max(index_counter_tests_passed) then pick the comparator from ArtificialAutism.sentence_testing_function + //to know if the score has to aim for lowest or highest on the currently failing test case. + index_counter_tests_passed = get_index_counter_tests_passed(testCaseFunction, index_counter_tests_passed); + String comperator_for_score_for_failing_testcase = getMaxIndexComparator(index_counter_tests_passed); + + String l_sSQL = "select e1.* from `" + testCaseFunction + "` e1 " + + "inner join " + + "(select rowid " + + "from `" + testCaseFunction + "` " + + "where index_counter_tests_passed = " + index_counter_tests_passed + + " and failed_testcase_score is not null " + + " group by failed_testcase_score " + + "order by failed_testcase_score "; + if (comperator_for_score_for_failing_testcase.contains(">")) { + l_sSQL += "desc"; + } else { + l_sSQL += "asc"; + } + + l_sSQL += " ) as e2 on e1.rowid = e2.rowid"; + + l_pStatement = l_cCon.prepareStatement(l_sSQL); + l_rsSearch = l_pStatement.executeQuery(); + ResultSetMetaData metaData = l_rsSearch.getMetaData(); + + JSONObject previousObject = null; + while (l_rsSearch.next()) { + JSONObject object = new JSONObject(); + for (int i = 1; i <= metaData.getColumnCount(); i++) //these indexes start at 1 instead of 0 + { + //if (metaData.getColumnName(i).equals("created_on")) break; + int columnType = metaData.getColumnType(i); + Object Value = null; + if (Types.INTEGER == columnType) { + Value = l_rsSearch.getInt(i); + } else if (Types.DECIMAL == columnType || Types.DOUBLE == columnType) { + Value = l_rsSearch.getDouble(i); + } else continue; + object.put(metaData.getColumnName(i), Value); + } + if (previousObject == null) + { + randomDataSetToReturn.add(object); + previousObject = (JSONObject) object.clone(); + } + else + { + int minimumDifference = 200; + Double test = (Double) object.get("failed_testcase_score") - (Double) previousObject.get("failed_testcase_score"); + if (test > minimumDifference || test < (minimumDifference * -1)) { + randomDataSetToReturn.add(object); + previousObject = object; + } + } + if (randomDataSetToReturn.size() >= limit) + { + break; + } + } + } + } catch (SQLException throwables) { + throwables.printStackTrace(); + } finally { + CloseConnections(l_pStatement, l_rsSearch, l_cCon); + } + //create here out of it only unique objects still, so not that many repeating rows. + for (JSONObject obj : randomDataSetToReturn){ + obj.remove("failed_testcase_score"); + } + return randomDataSetToReturn; + } + + public static boolean getMaxIndex_counter_tests_passedCount(int max_index_counter_tests_passed, String testCaseFunction){ + Connection l_cCon = null; + PreparedStatement l_pStatement = null; + ResultSet l_rsSearch = null; + int index_count_rows_counted = -1; + try { + l_cCon = DBCPDataSource.getConnection(); + String l_sSQL = "select count(*) from `" + testCaseFunction + "` " + + " where index_counter_tests_passed = " + max_index_counter_tests_passed; + l_pStatement = l_cCon.prepareStatement(l_sSQL); + l_rsSearch = l_pStatement.executeQuery(); + l_rsSearch.next(); + index_count_rows_counted = l_rsSearch.getInt(1); + } catch (SQLException e) { + throw new RuntimeException(e); + } finally { + CloseConnections(l_pStatement, l_rsSearch, l_cCon); + } + return index_count_rows_counted < 1000; + + } + + public static String getMaxIndexComparator(int max_index){ + Connection l_cCon = null; + PreparedStatement l_pStatement = null; + ResultSet l_rsSearch = null; + String comperator_for_score_for_failing_testcase = ""; + try { + l_cCon = DBCPDataSource.getConnection(); + String l_sSQL = "select comperator_for_score " + + "from ArtificialAutism.sentence_testing_function stf " + + "order by rowid asc limit 1 OFFSET " + max_index; + l_pStatement = l_cCon.prepareStatement(l_sSQL); + l_rsSearch = l_pStatement.executeQuery(); + l_rsSearch.next(); + comperator_for_score_for_failing_testcase = l_rsSearch.getString(1); + } catch (SQLException e) { + throw new RuntimeException(e); + } finally { + CloseConnections(l_pStatement, l_rsSearch, l_cCon); + } + return comperator_for_score_for_failing_testcase; + } + + public static HashMap> get_parameter_generations(Set randomDataSet, String testCaseFunction) { + Connection l_cCon = null; + PreparedStatement l_pStatement = null; + ResultSet l_rsSearch = null; + HashMap> arr = new HashMap>(); //key rowid, value is arraylist of modified params. + try { + l_cCon = DBCPDataSource.getConnection(); + for (JSONObject jobject : randomDataSet) + { + int rowid = (int) jobject.get("rowid"); + String l_sSQL = "select params_tried from " + + " ArtificialAutism.parameters_generation " + + " where rowid_function_ref = '" + rowid + "' " + + " and function_ref = ' " + testCaseFunction + "'"; + l_pStatement = l_cCon.prepareStatement(l_sSQL); + l_rsSearch = l_pStatement.executeQuery(); + if (l_rsSearch.next()) + { + //found existing row, reading params_tried. + String params_tried = l_rsSearch.getString(1); + String[] split = params_tried.split(","); + ArrayList integers = new ArrayList<>(); + for (String number : split) { + if (!number.isBlank()) { + integers.add(Integer.parseInt(number)); + } + } + arr.put(rowid, integers); + } + else + { + //insert new row + l_sSQL = "insert into ArtificialAutism.parameters_generation (rowid_function_ref, params_tried, function_ref) " + + " values (' " + rowid + "', '', ' " + testCaseFunction + "')"; + l_pStatement = l_cCon.prepareStatement(l_sSQL); + l_pStatement.executeUpdate(); + arr.put(rowid, new ArrayList()); + } + + } + } catch (SQLException e) { + throw new RuntimeException(e); + } finally { + CloseConnections(l_pStatement, l_rsSearch, l_cCon); + } + return arr; + } + + public static void deleteRow(int rowid, String testCaseFunction) { + Connection l_cCon = null; + PreparedStatement l_pStatement = null; + ResultSet l_rsSearch = null; + try { + l_cCon = DBCPDataSource.getConnection(); + String l_sSQL = "delete from `" + testCaseFunction + "` where rowid = " + rowid; + l_pStatement = l_cCon.prepareStatement(l_sSQL); + l_pStatement.executeUpdate(); + } catch (SQLException e) { + throw new RuntimeException(e); + } finally { + CloseConnections(l_pStatement, l_rsSearch, l_cCon); + } + } + + public static Double getbestScoreFormax_index_counter_tests_passed(int maxIndexCounterTestsPassed, String testCaseFunction, String comperator_for_score_for_failing_testcase) { + Connection l_cCon = null; + PreparedStatement l_pStatement = null; + ResultSet l_rsSearch = null; + Double bestScores = 0.0; + try { + l_cCon = DBCPDataSource.getConnection(); + String l_sSQL = "select failed_testcase_score from `" + testCaseFunction + "` " + + "where index_counter_tests_passed = " + maxIndexCounterTestsPassed + " " + + "order by failed_testcase_score "; + + if (comperator_for_score_for_failing_testcase.contains(">")) { + l_sSQL += "desc"; + } else { + l_sSQL += "asc"; + } + l_sSQL += " limit 1"; + l_pStatement = l_cCon.prepareStatement(l_sSQL); + l_rsSearch = l_pStatement.executeQuery(); + l_rsSearch.next(); + bestScores = l_rsSearch.getDouble(1); + } catch (SQLException e) { + throw new RuntimeException(e); + } finally { + CloseConnections(l_pStatement, l_rsSearch, l_cCon); + } + return bestScores; + } + + public static void UpdateModifiedKeyForRowID(Set randomDataSet, String testCaseFunction) { + Connection l_cCon = null; + PreparedStatement l_pStatement = null; + ResultSet l_rsSearch = null; + + HashMap> arr = get_parameter_generations(randomDataSet, testCaseFunction); + try { + l_cCon = DBCPDataSource.getConnection(); + for (JSONObject jobject : randomDataSet) { + int rowid = (int) jobject.get("rowid"); + ArrayList integers = arr.get(rowid); + + String l_sSQL = " UPDATE ArtificialAutism.parameters_generation " + + " set params_tried = '"; + for (int triedParamter : integers){ + l_sSQL += triedParamter + ","; + } + l_sSQL = l_sSQL.substring(0, l_sSQL.length() - 1); //remove trailing comma. + + l_sSQL += "' where rowid_function_ref = " + rowid + + " and function_ref = `" + testCaseFunction + "`"; + l_pStatement = l_cCon.prepareStatement(l_sSQL); + System.out.println("l_sSQL: " + l_sSQL); + l_pStatement.executeUpdate(); + } + } catch (SQLException e) { + throw new RuntimeException(e); + } finally { + CloseConnections(l_pStatement, l_rsSearch, l_cCon); + } + } +} diff --git a/src/main/java/DataLayer/RunnerClient.java b/src/main/java/DataLayer/RunnerClient.java new file mode 100644 index 0000000..dc8a434 --- /dev/null +++ b/src/main/java/DataLayer/RunnerClient.java @@ -0,0 +1,31 @@ +package DataLayer; + +import FunctionLayer.Datahandler; +import edu.stanford.nlp.pipeline.StanfordCoreNLP; +import discord4j.core.event.domain.lifecycle.ReadyEvent; +import discord4j.core.event.domain.message.MessageCreateEvent; +import discord4j.core.object.entity.Message; +import net.dv8tion.jda.api.events.message.MessageReceivedEvent; + +import java.io.IOException; +import java.net.*; + +public class RunnerClient { + + public RunnerClient(String contentF, boolean mentionedBot, String channelName, Datahandler datahandler, + StanfordCoreNLP stanfordCoreNLP, StanfordCoreNLP stanfordCoreNLPSentiment, + MessageReceivedEvent event, String username) { + if (mentionedBot || channelName.contains("general-autism")) { + String ResponseStr = datahandler.getResponseMsg(contentF, username, + stanfordCoreNLP, stanfordCoreNLPSentiment, + false); + if (!ResponseStr.isEmpty()) { + System.out.print("\nResponseStr3: " + ResponseStr + "\n"); + event.getMessage().getChannel().sendMessage(ResponseStr).queue(); + } + } else { + String strF = datahandler.trimString(contentF); + datahandler.getResponseFutures(strF, stanfordCoreNLP, stanfordCoreNLPSentiment); + } + } +} diff --git a/src/main/java/DataLayer/ThreadClient.java b/src/main/java/DataLayer/ThreadClient.java new file mode 100644 index 0000000..7233d30 --- /dev/null +++ b/src/main/java/DataLayer/ThreadClient.java @@ -0,0 +1,109 @@ +package DataLayer; + +import FunctionLayer.Datahandler; +import edu.stanford.nlp.pipeline.StanfordCoreNLP; + +import java.io.FileInputStream; +import java.io.FileNotFoundException; +import java.io.IOException; +import java.io.UnsupportedEncodingException; +import java.net.*; +import java.util.ArrayList; +import java.util.Properties; + +public class ThreadClient { + public ThreadClient(Datahandler datahandler, StanfordCoreNLP stanfordCoreNLP, StanfordCoreNLP stanfordCoreNLPSentiment) { + ArrayList ports = new ArrayList(); + ports.add(48475); + ports.add(48476); + ports.add(48477); + ports.add(48478); + + Properties prop = new Properties(); + String fileName = "app.config"; + try (FileInputStream fis = new FileInputStream(fileName)) { + prop.load(fis); + } catch (FileNotFoundException ex) { + } catch (IOException ex) { + } + + String hostIP = prop.getProperty("app.hostip"); + String hostIP2 = prop.getProperty("app.hostip2"); + try { + InetAddress ipAddress = InetAddress.getByName(hostIP);//used ip's + InetAddress ipAddress2 = InetAddress.getByName(hostIP2);//used ip's + try (DatagramSocket serverSocket = new DatagramSocket(ports.get(0))) { + try (DatagramSocket serverSocket1 = new DatagramSocket(ports.get(1))) { + try (DatagramSocket serverSocket2 = new DatagramSocket(ports.get(2))) { + try (DatagramSocket serverSocket3 = new DatagramSocket(ports.get(3))) { + while (true) { + try { + receiveAndSendPacket(serverSocket, ipAddress, ports.get(0), datahandler, stanfordCoreNLP, stanfordCoreNLPSentiment); + receiveAndSendPacket(serverSocket1, ipAddress, ports.get(1), datahandler, stanfordCoreNLP, stanfordCoreNLPSentiment); + receiveAndSendPacket(serverSocket2, ipAddress2, ports.get(2), datahandler, stanfordCoreNLP, stanfordCoreNLPSentiment); + receiveAndSendPacket(serverSocket3, ipAddress2, ports.get(3), datahandler, stanfordCoreNLP, stanfordCoreNLPSentiment); + } catch (IOException e) { + throw new RuntimeException(e); + } + } + } + } + } + } catch (SocketException e) { + e.printStackTrace(); + } + } catch (UnknownHostException e) { + e.printStackTrace(); + } + } + + private static void receiveAndSendPacket(DatagramSocket serverSocket, InetAddress ipAddress, int port, + Datahandler datahandler, StanfordCoreNLP stanfordCoreNLP, StanfordCoreNLP stanfordCoreNLPSentiment) throws + IOException { + byte[] receiveData = new byte[4096]; + DatagramPacket receivePacket = new DatagramPacket(receiveData, receiveData.length); + try { + /* + Only one DatagramSocket can call receive at a time since its a blocking call. yet somehow + the other DatagramSockets still get their UDP packets from receive() even if the call is made + many minutes after the actual UDP packet was sent. Maybe Security manager context? + */ + serverSocket.receive(receivePacket); + } catch (IOException e) { + e.printStackTrace(); + } + String sentence = new String(receivePacket.getData(), 0, + receivePacket.getLength()); + sentence = sentence.replace("clientmessage:", ""); + String ResponseMsg = datahandler.getResponseMsg(sentence, "", stanfordCoreNLP, stanfordCoreNLPSentiment, + true); + System.out.println("port: " + port + ". ResponseMsg ingame: " + ResponseMsg); + byte[] sendData = new byte[0]; + try { + sendData = ResponseMsg.getBytes("UTF-8"); + } catch (UnsupportedEncodingException e) { + e.printStackTrace(); + } + int deliver_port = 0; + switch (port) { + case 48475: + deliver_port = 48470; + break; + case 48476: + deliver_port = 48471; + break; + case 48477: + deliver_port = 48472; + break; + case 48478: + deliver_port = 48473; + break; + } + DatagramPacket sendPacket = new DatagramPacket(sendData, sendData.length, ipAddress, deliver_port); + try { + serverSocket.send(sendPacket); + } catch (IOException e) { + e.printStackTrace(); + } + } +} diff --git a/src/main/java/DataLayer/settings.java b/src/main/java/DataLayer/settings.java new file mode 100755 index 0000000..ab25198 --- /dev/null +++ b/src/main/java/DataLayer/settings.java @@ -0,0 +1,21 @@ +/* + * To change this license header, choose License Headers in Project Properties. + * To change this template file, choose Tools | Templates + * and open the template in the editor. + */ +package DataLayer; + +/** + * @author install1 + */ +public class settings { + + static String password = ""; + static String url = ""; + static String username = ""; + static String discordToken = ""; + String hostIP = ""; + String hostIP2 = ""; + int hostport = ; + int hostport2 = ; +} diff --git a/src/main/java/DataLayer/testClasses.java b/src/main/java/DataLayer/testClasses.java new file mode 100644 index 0000000..cd6420d --- /dev/null +++ b/src/main/java/DataLayer/testClasses.java @@ -0,0 +1,51 @@ +package DataLayer; + +public class testClasses { + private String Sentence1; + private String Sentence2; + + private Double resultScore; + + public Double getResultScore() { + return resultScore; + } + + public void setResultScore(Double resultScore) { + this.resultScore = resultScore; + } + + public boolean isPerformTestingFittingLess() { + return PerformTestingFittingLess; + } + + private boolean PerformTestingFittingLess; + + public testClasses(String sentence1, String sentence2, int score, String comparator, boolean PerformTestingFittingLess) { + this.Sentence1 = sentence1; + this.Sentence2 = sentence2; + this.Score = score; + this.comparator = comparator; + this.PerformTestingFittingLess = PerformTestingFittingLess; + } + + public String getSentence1() { + return Sentence1; + } + + + public String getSentence2() { + return Sentence2; + } + + + public int getScore() { + return Score; + } + + public String getComparator() { + return comparator; + } + + private int Score; + private String comparator; +} diff --git a/src/main/java/FunctionLayer/Datahandler.java b/src/main/java/FunctionLayer/Datahandler.java new file mode 100644 index 0000000..c912d4c --- /dev/null +++ b/src/main/java/FunctionLayer/Datahandler.java @@ -0,0 +1,926 @@ +package FunctionLayer; + +import DataLayer.DataMapper; +import FunctionLayer.StanfordParser.SentimentAnalyzerTestDynamicTesting; +import edu.mit.jmwe.data.IMWE; +import edu.mit.jmwe.data.IToken; +import edu.stanford.nlp.ie.AbstractSequenceClassifier; +import edu.stanford.nlp.ie.crf.CRFClassifier; +import edu.stanford.nlp.ling.CoreAnnotations; +import edu.stanford.nlp.ling.CoreLabel; +import edu.stanford.nlp.ling.TaggedWord; +import edu.stanford.nlp.parser.lexparser.LexicalizedParser; +import edu.stanford.nlp.pipeline.Annotation; +import edu.stanford.nlp.pipeline.CoreDocument; +import edu.stanford.nlp.pipeline.CoreEntityMention; +import edu.stanford.nlp.pipeline.StanfordCoreNLP; +import edu.stanford.nlp.tagger.maxent.MaxentTagger; +import edu.stanford.nlp.trees.*; +import edu.stanford.nlp.util.CoreMap; +import org.ejml.simple.SimpleMatrix; +import org.json.simple.JSONObject; +import org.json.simple.parser.JSONParser; +import org.json.simple.parser.ParseException; + +import java.io.FileInputStream; +import java.io.FileNotFoundException; +import java.io.FileReader; +import java.io.IOException; +import java.util.*; +import java.util.concurrent.*; +import java.util.regex.Matcher; +import java.util.regex.Pattern; + + +public class Datahandler { + + private JSONObject data; + + private JSONParser parser = new JSONParser(); + + //wanted to put this in config too but welp cant be arsed to set this up differently. + //4 threads for the rest of eternity it is. + private ExecutorService pool = Executors.newFixedThreadPool(4); + private CompletionService completionService = new ExecutorCompletionService(pool); + private HashMap pipelineAnnotationCache; + private HashMap pipelineSentimentAnnotationCache; + private HashMap coreDocumentAnnotationCache; + private HashMap jmweAnnotationCache; + + private MaxentTagger tagger = new MaxentTagger(); + + private GrammaticalStructureFactory gsf; + private AbstractSequenceClassifier classifier; + + //SentimentAnalyzer Hashmaps + private HashMap tokenizeCountingHashMap = new HashMap(); + + private HashMap>> taggedWordListHashMap = new HashMap(); + + private HashMap> retrieveTGWListHashMap = new HashMap(); + + private HashMap> sentences1HashMap = new HashMap(); + + private HashMap> sentencesSentimentHashMap = new HashMap(); + + private HashMap> trees1HashMap = new HashMap(); + + private HashMap> grammaticalStructureHashMap = new HashMap(); + + private HashMap> typedDependenciesHashMap = new HashMap(); + + private HashMap> rnnCoreAnnotationsPredictedHashMap = new HashMap(); + + private HashMap> simpleMatricesHashMap = new HashMap(); + + private HashMap> simpleMatricesNodevectorsHashMap = new HashMap(); + + private HashMap listHashMap = new HashMap(); + + private HashMap longestHashMap = new HashMap(); + + private HashMap sentimentHashMap = new HashMap(); + + private HashMap>> imwesHashMap = new HashMap(); + + private HashMap InflectedCounterNegativeHashMap = new HashMap(); + + private HashMap InflectedCounterPositiveHashMap = new HashMap(); + + private HashMap> tokenEntryHashMap = new HashMap(); + + private HashMap MarkedContinuousCounterHashMap = new HashMap(); + + private HashMap UnmarkedPatternCounterHashMap = new HashMap(); + + private HashMap> strTokensIpartFormHashMap = new HashMap(); + + private HashMap> tokenFormsHashMap = new HashMap(); + + private HashMap> strTokenEntryGetPOSHashMap = new HashMap(); + + private HashMap> intTokenEntyCountsHashMap = new HashMap(); + + private HashMap> ITokenTagsHashMap = new HashMap(); + + private HashMap> strTokenStemsHashMap = new HashMap(); + + private HashMap AnotatorcounterHashMap = new HashMap(); + + private HashMap TokensCounterHashMap = new HashMap(); + + private HashMap> entityTokenTagsHashMap = new HashMap(); + + private HashMap> nerEntitiesHashMap = new HashMap(); + + private HashMap> nerEntitiesTypeHashMap = new HashMap(); + + private HashMap> stopWordTokenHashMap = new HashMap(); + + private HashMap> stopWordLemmaHashMap = new HashMap(); + + private HashMap PairCounterHashMap = new HashMap(); + + private HashMap> strResponses = new HashMap<>(); + private void LoadDataFromJson() { + try { + data = (JSONObject) parser.parse( + new FileReader("/mnt/hdd/home/christian/content/sourcemod_plugins_and_extensions/addons/" + + "sourcemod/scripting/gogs/ArtificialAutism/dynamicScore.json"));//path to the JSON file. + } catch (IOException e) { + try { + String whoami = System.getProperty("user.name"); + data = (JSONObject) parser.parse( + new FileReader("/home/" + whoami + "/autism_bot_number_crunching/dynamicScore.json"));//path to the JSON file. + } catch (IOException ex) { + throw new RuntimeException(ex); + } catch (ParseException ex) { + throw new RuntimeException(ex); + } + } catch (ParseException e) { + throw new RuntimeException(e); + } + } + + public Datahandler() { + LoadDataFromJson(); + jmweAnnotationCache = new HashMap(); + pipelineAnnotationCache = new HashMap(); + pipelineSentimentAnnotationCache = new HashMap(); + coreDocumentAnnotationCache = new HashMap(); + gsf = initiateGrammaticalStructureFactory(); + String nerModel = "edu/stanford/nlp/models/ner/english.all.3class.distsim.crf.ser.gz"; + classifier = CRFClassifier.getClassifierNoExceptions(nerModel); + } + + private GrammaticalStructureFactory initiateGrammaticalStructureFactory() { + // lexParserEnglishRNN = "edu/stanford/nlp/models/lexparser/englishRNN.ser.gz" + String lexParserEnglishPCFG = "edu/stanford/nlp/models/lexparser/englishPCFG.ser.gz"; + LexicalizedParser lp = LexicalizedParser.loadModel(lexParserEnglishPCFG, "-maxLength", "100"); + TreebankLanguagePack tlp = lp.getOp().langpack(); + return tlp.grammaticalStructureFactory(); + } + + public StanfordCoreNLP pipeLineSetUp() { + Properties props = new Properties(); + String shiftReduceParserPath = "edu/stanford/nlp/models/srparser/englishSR.ser.gz"; + // nerModel2 = "edu/stanford/nlp/models/ner/english.conll.4class.caseless.distsim.crf.ser.gz" + //String nerModel2 = "edu/stanford/nlp/models/ner/english.conll.4class.distsim.crf.ser.gz"; + // nerModel3 = "edu/stanford/nlp/models/ner/english.muc.7class.caseless.distsim.crf.ser.gz" + //String nerModel3 = "edu/stanford/nlp/models/ner/english.muc.7class.distsim.crf.ser.gz"; + props.setProperty("annotators", "tokenize,ssplit,pos,lemma,ner,parse"); + props.setProperty("parse.model", shiftReduceParserPath); + props.setProperty("parse.maxlen", "90"); + props.setProperty("parse.binaryTrees", "true"); + props.setProperty("threads", "1"); + props.setProperty("pos.maxlen", "90"); + props.setProperty("tokenize.maxlen", "90"); + props.setProperty("ssplit.maxlen", "90"); + props.setProperty("lemma.maxlen", "90"); + props.setProperty("ner.model", "edu/stanford/nlp/models/ner/english.all.3class.distsim.crf.ser.gz" + + ",edu/stanford/nlp/models/ner/english.conll.4class.distsim.crf.ser.gz" + + ",edu/stanford/nlp/models/ner/english.muc.7class.distsim.crf.ser.gz"); + props.setProperty("ner.combinationMode", "HIGH_RECALL"); + props.setProperty("regexner.ignorecase", "true"); + props.setProperty("ner.fine.regexner.ignorecase", "true"); + props.setProperty("tokenize.options", "untokenizable=firstKeep"); + return new StanfordCoreNLP(props); + } + + public StanfordCoreNLP shiftReduceParserInitiate() { + Properties propsSentiment = new Properties(); + // lexParserEnglishRNN = "edu/stanford/nlp/models/lexparser/englishRNN.ser.gz" + String lexParserEnglishPCFG = "edu/stanford/nlp/models/lexparser/englishPCFG.ser.gz"; + String sentimentModel = "edu/stanford/nlp/models/sentiment/sentiment.ser.gz"; + // taggerPath = "edu/stanford/nlp/models/pos-tagger/english-left3words/english-left3words-distsim.tagger" + String taggerPath = "edu/stanford/nlp/models/pos-tagger/english-left3words-distsim.tagger"; + String customStopWordList = "start,starts,period,periods,a,an,and,are,as,at,be,but,by,for,if,in,into,is,it,no,not,of," + + "on,or,such,that,the,their,then,there,these,they,this,to,was,will,with"; + propsSentiment.setProperty("parse.model", lexParserEnglishPCFG); + propsSentiment.setProperty("sentiment.model", sentimentModel); + propsSentiment.setProperty("parse.maxlen", "90"); + propsSentiment.setProperty("threads", "1"); + propsSentiment.setProperty("pos.maxlen", "90"); + propsSentiment.setProperty("tokenize.maxlen", "90"); + propsSentiment.setProperty("ssplit.maxlen", "90"); + propsSentiment.setProperty("annotators", "tokenize,ssplit,pos,parse,sentiment,lemma,stopword"); //coref too expensive memorywise + propsSentiment.setProperty("customAnnotatorClass.stopword", "FunctionLayer.StopwordAnnotator"); + propsSentiment.setProperty(StopwordAnnotator.STOPWORDS_LIST, customStopWordList); + propsSentiment.setProperty("tokenize.options", "untokenizable=firstKeep"); + tagger = new MaxentTagger(taggerPath); + return new StanfordCoreNLP(propsSentiment); + } + + public String trimString(String str) { + String message = str.trim(); + if (message.startsWith("<@")) { + message = message.substring(message.indexOf("> ") + 2); + } + if (!message.isEmpty()) { + message = message.replace("@", ""); + if (message.contains("<>")) { + message = message.substring(message.indexOf(">")); + } + if (message.startsWith("[ *")) { + message = message.substring(message.indexOf("]")); + } + } + return message; + } + + private void createStrAnnotation(String str, StanfordCoreNLP stanfordCoreNLP, Boolean sentimentBool) { + Annotation strAnno2 = new Annotation(str); + strAnno2.compact(); + try { + stanfordCoreNLP.annotate(strAnno2); + if (sentimentBool) { + pipelineSentimentAnnotationCache.put(str, strAnno2); + } else { + pipelineAnnotationCache.put(str, strAnno2); + } + } catch (Exception e) { + System.out.println("stanfordcorenlp annotate failed" + e.getMessage()); + } + } + + private SentimentAnalyzerTestDynamicTesting getReponseFuturesHelper(String strF, String str1, StanfordCoreNLP stanfordCoreNLP, + StanfordCoreNLP stanfordCoreNLPSentiment, + List coreMaps1, Annotation strAnno, + Annotation strAnnoSentiment, CoreDocument coreDocument + , Integer tokenizeCountingF, List> taggedWordListF, ArrayList typedDependenciesF + , ArrayList rnnCoreAnnotationsPredictedF, ArrayList simpleMatricesF + , ArrayList simpleMatricesNodevectorsF, List listF, Integer longestF, List sentencesF + , List sentencesSentimentF, ArrayList treesF, ArrayList grammaticalStructuresF + , Integer sentimentLongestF, List> imwesF, Integer inflectedCounterNegativeF, Integer inflectedCounterPositiveF + , ArrayList tokenEntryF, Integer unmarkedPatternCounterF, ArrayList strTokensIpartFormF, ArrayList tokenFormsF + , ArrayList intTokenEntyCountsF, Integer markedContinuousCounterF, ArrayList ITokenTagsF + , ArrayList strTokenEntryGetPOSF, ArrayList retrieveTGWListF, Integer pairCounterF + , Integer tokensCounterF, ArrayList stopWordLemmaF, ArrayList nerEntitiesF + , ArrayList stopWordTokenF, ArrayList entityTokenTagsF, ArrayList nerEntitiesTypeF + , Integer anotatorcounterF, ArrayList strTokenStemsF) { + Annotation annotation2 = pipelineSentimentAnnotationCache.getOrDefault(str1, null); + Annotation annotation4 = pipelineAnnotationCache.getOrDefault(str1, null); + CoreDocument coreDocument1 = coreDocumentAnnotationCache.getOrDefault(str1, null); + Annotation jmweAnnotation = jmweAnnotationCache.getOrDefault(str1, null); + if (annotation2 == null) { + createStrAnnotation(str1, stanfordCoreNLPSentiment, true); + } + if (annotation4 == null) { + createStrAnnotation(str1, stanfordCoreNLP, false); + } + if (coreDocument1 == null) { + getCoreDocumentsSuggested(stanfordCoreNLP, str1); + } + if (jmweAnnotation == null) { + getJMWEAnnotation(str1); + jmweAnnotation = jmweAnnotationCache.get(str1); + } + Integer tokenizeCounting = tokenizeCountingHashMap.getOrDefault(str1, null); + + List> taggedWordList1 = taggedWordListHashMap.getOrDefault(str1, null); + + java.util.ArrayList retrieveTGWList1 = retrieveTGWListHashMap.getOrDefault(str1, null); + + List sentence1 = sentences1HashMap.getOrDefault(str1, null); + + List sentenceSentiment1 = sentencesSentimentHashMap.getOrDefault(str1, null); + ArrayList trees1 = trees1HashMap.getOrDefault(str1, null); + List coreMaps2 = new ArrayList<>(); + ArrayList grammaticalStructures1 = grammaticalStructureHashMap.getOrDefault(str1, null); + if (jmweAnnotation != null) { + coreMaps2 = jmweAnnotation.get(CoreAnnotations.SentencesAnnotation.class); + } + ArrayList typedDependencies1 = typedDependenciesHashMap.getOrDefault(str1, null); + ArrayList rnnCoreAnnotationsPredicted1 = rnnCoreAnnotationsPredictedHashMap.getOrDefault(str1, null); + ArrayList simpleMatrices1 = simpleMatricesHashMap.getOrDefault(str1, null); + simpleMatricesHashMap.getOrDefault(str1, null); + ArrayList simpleMatricesNodevectors1 = simpleMatricesNodevectorsHashMap.getOrDefault(str1, null); + List list1 = listHashMap.getOrDefault(str1, null); + Integer longest1 = longestHashMap.getOrDefault(str1, null); + Integer sentimentLongest1 = sentimentHashMap.getOrDefault(str1, null); + List> imwes1 = imwesHashMap.getOrDefault(str1, null); + Integer InflectedCounterNegative1 = InflectedCounterNegativeHashMap.getOrDefault(str1, null); + Integer InflectedCounterPositive1 = InflectedCounterPositiveHashMap.getOrDefault(str1, null); + ArrayList tokenEntry1 = tokenEntryHashMap.getOrDefault(str1, null); + Integer MarkedContinuousCounter1 = MarkedContinuousCounterHashMap.getOrDefault(str1, null); + Integer UnmarkedPatternCounter1 = UnmarkedPatternCounterHashMap.getOrDefault(str1, null); + ArrayList strTokensIpartForm1 = strTokensIpartFormHashMap.getOrDefault(str1, null); + ArrayList tokenForms1 = tokenFormsHashMap.getOrDefault(str1, null); + ArrayList strTokenEntryGetPOS1 = strTokenEntryGetPOSHashMap.getOrDefault(str1, null); + ArrayList intTokenEntyCounts1 = intTokenEntyCountsHashMap.getOrDefault(str1, null); + ArrayList ITokenTags1 = ITokenTagsHashMap.getOrDefault(str1, null); + ArrayList strTokenStems1 = strTokenStemsHashMap.getOrDefault(str1, null); + Integer Anotatorcounter1 = AnotatorcounterHashMap.getOrDefault(str1, null); + Integer TokensCounter1 = TokensCounterHashMap.getOrDefault(str1, null); + ArrayList entityTokenTags1 = entityTokenTagsHashMap.getOrDefault(str1, null); + ArrayList nerEntities1 = nerEntitiesHashMap.getOrDefault(str1, null); + ArrayList nerEntitiesType1 = nerEntitiesTypeHashMap.getOrDefault(str1, null); + ArrayList stopWordToken1 = stopWordTokenHashMap.getOrDefault(str1, null); + ArrayList stopWordLemma1 = stopWordLemmaHashMap.getOrDefault(str1, null); + Integer PairCounter1 = PairCounterHashMap.getOrDefault(str1, null); + + Annotation annotationStrPipeLine1 = pipelineAnnotationCache.get(str1); + Annotation annotationStrPipeLineSentiment1 = pipelineSentimentAnnotationCache.get(str1); + + SentimentAnalyzerTestDynamicTesting SMX = new SentimentAnalyzerTestDynamicTesting(strF, str1, + coreMaps1, coreMaps2, strAnno, + //sometimes Annotation(str) returns null so in that case better use result of sentiment + annotationStrPipeLine1 == null ? annotationStrPipeLineSentiment1 : annotationStrPipeLine1, + strAnnoSentiment, + annotationStrPipeLineSentiment1, coreDocument, coreDocumentAnnotationCache.get(str1), + tagger, gsf, classifier, tokenizeCounting, tokenizeCountingF, + taggedWordListF, taggedWordList1, retrieveTGWListF, retrieveTGWList1, + sentencesF, sentence1, sentencesSentimentF, sentenceSentiment1, treesF, trees1, + grammaticalStructuresF, grammaticalStructures1, typedDependenciesF, + typedDependencies1, rnnCoreAnnotationsPredictedF, rnnCoreAnnotationsPredicted1, + simpleMatricesF, simpleMatrices1, simpleMatricesNodevectorsF, simpleMatricesNodevectors1, + listF, list1, longestF, longest1, sentimentLongestF, sentimentLongest1, imwesF, + imwes1, inflectedCounterNegativeF, InflectedCounterNegative1, inflectedCounterPositiveF, + InflectedCounterPositive1, tokenEntryF, tokenEntry1, markedContinuousCounterF, + MarkedContinuousCounter1, unmarkedPatternCounterF, UnmarkedPatternCounter1, + strTokensIpartFormF, strTokensIpartForm1, tokenFormsF, tokenForms1, + strTokenEntryGetPOSF, strTokenEntryGetPOS1, intTokenEntyCountsF, + intTokenEntyCounts1, ITokenTagsF, ITokenTags1, strTokenStemsF, strTokenStems1, + anotatorcounterF, Anotatorcounter1, tokensCounterF, TokensCounter1, + entityTokenTagsF, entityTokenTags1, nerEntitiesF, nerEntities1, nerEntitiesTypeF, + nerEntitiesType1, stopWordTokenF, stopWordToken1, stopWordLemmaF, stopWordLemma1, + pairCounterF, PairCounter1, data, false + ); + if (tokenizeCounting == null) { + tokenizeCountingHashMap.put(str1, SMX.getTokenizeCounting()); + } + if (taggedWordList1 == null) { + taggedWordListHashMap.put(str1, SMX.getTaggedWordList1()); + } + if (retrieveTGWList1 == null) { + retrieveTGWListHashMap.put(str1, SMX.getRetrieveTGWList1()); + } + if (sentence1 == null) { + sentences1HashMap.put(str1, SMX.getSentences1()); + } + if (sentenceSentiment1 == null) { + sentencesSentimentHashMap.put(str1, SMX.getSentencesSentiment1()); + } + if (trees1 == null) { + trees1HashMap.put(str1, SMX.getTrees1()); + } + if (grammaticalStructures1 == null) { + grammaticalStructureHashMap.put(str1, SMX.getGrammaticalStructures1()); + } + if (typedDependencies1 == null) { + typedDependenciesHashMap.put(str1, SMX.getTypedDependencies1()); + } + if (rnnCoreAnnotationsPredicted1 == null) { + rnnCoreAnnotationsPredictedHashMap.put(str1, SMX.getRnnCoreAnnotationsPredicted1()); + } + if (simpleMatrices1 == null) { + simpleMatricesHashMap.put(str1, SMX.getSimpleMatrices1()); + } + if (simpleMatricesNodevectors1 == null) { + simpleMatricesNodevectorsHashMap.put(str1, SMX.getSimpleMatricesNodevectors1()); + } + if (list1 == null) { + listHashMap.put(str1, SMX.getList1()); + } + if (longest1 == null) { + longestHashMap.put(str1, SMX.getLongest1()); + } + if (sentimentLongest1 == null) { + sentimentHashMap.put(str1, SMX.getSentimentLongest1()); + } + if (imwes1 == null) { + imwesHashMap.put(str1, SMX.getImwes1()); + } + if (InflectedCounterNegative1 == null) { + InflectedCounterNegativeHashMap.put(str1, SMX.getInflectedCounterNegative1()); + } + if (InflectedCounterPositive1 == null) { + InflectedCounterPositiveHashMap.put(str1, SMX.getInflectedCounterPositive1()); + } + if (tokenEntry1 == null) { + tokenEntryHashMap.put(str1, SMX.getTokenEntry1()); + } + if (MarkedContinuousCounter1 == null) { + MarkedContinuousCounterHashMap.put(str1, SMX.getMarkedContinuousCounter1()); + } + if (UnmarkedPatternCounter1 == null) { + UnmarkedPatternCounterHashMap.put(str1, SMX.getUnmarkedPatternCounter1()); + } + if (strTokensIpartForm1 == null) { + strTokensIpartFormHashMap.put(str1, SMX.getStrTokensIpartForm1()); + } + if (tokenForms1 == null) { + tokenFormsHashMap.put(str1, SMX.getTokenForms1()); + } + if (strTokenEntryGetPOS1 == null) { + strTokenEntryGetPOSHashMap.put(str1, SMX.getStrTokenEntryGetPOS1()); + } + if (intTokenEntyCounts1 == null) { + intTokenEntyCountsHashMap.put(str1, SMX.getIntTokenEntyCounts1()); + } + if (ITokenTags1 == null) { + ITokenTagsHashMap.put(str1, SMX.getITokenTags1()); + } + if (strTokenStems1 == null) { + strTokenStemsHashMap.put(str1, SMX.getStrTokenStems1()); + } + if (Anotatorcounter1 == null) { + AnotatorcounterHashMap.put(str1, SMX.getAnotatorcounter1()); + } + if (TokensCounter1 == null) { + TokensCounterHashMap.put(str1, SMX.getTokensCounter1()); + } + if (entityTokenTags1 == null) { + entityTokenTagsHashMap.put(str1, SMX.getEntityTokenTags1()); + } + if (nerEntities1 == null) { + nerEntitiesHashMap.put(str1, SMX.getNerEntities1()); + } + if (nerEntitiesType1 == null) { + nerEntitiesTypeHashMap.put(str1, SMX.getNerEntitiesType1()); + } + if (stopWordToken1 == null) { + stopWordTokenHashMap.put(str1, SMX.getStopWordToken1()); + } + if (stopWordLemma1 == null) { + stopWordLemmaHashMap.put(str1, SMX.getStopWordLemma1()); + } + if (PairCounter1 == null) { + PairCounterHashMap.put(str1, SMX.getPairCounter1()); + } + return SMX; + } + + private class get_res implements Callable { + private final String strF; + private final String str1; + private final StanfordCoreNLP stanfordCoreNLP; + private final StanfordCoreNLP stanfordCoreNLPSentiment; + private final List coreMaps1; + private final Annotation strAnno; + private final Annotation strAnnoSentiment; + private final CoreDocument coreDocument; + private final Integer tokenizeCountingF; + private final List> taggedWordListF; + private final ArrayList typedDependenciesF; + private final ArrayList rnnCoreAnnotationsPredictedF; + private final ArrayList simpleMatricesF; + private final ArrayList simpleMatricesNodevectorsF; + private final List listF; + private final Integer longestF; + private final List sentencesF; + private final List sentencesSentimentF; + private final ArrayList treesF; + private final ArrayList grammaticalStructuresF; + private final Integer sentimentLongestF; + private final List> imwesF; + private final Integer inflectedCounterNegativeF; + private final Integer inflectedCounterPositiveF; + private final ArrayList tokenEntryF; + private final Integer unmarkedPatternCounterF; + private final ArrayList strTokensIpartFormF; + private final ArrayList tokenFormsF; + private final ArrayList intTokenEntyCountsF; + private final Integer markedContinuousCounterF; + private final ArrayList iTokenTagsF; + private final ArrayList strTokenEntryGetPOSF; + private final ArrayList retrieveTGWListF; + private final Integer pairCounterF; + private final Integer tokensCounterF; + private final ArrayList stopWordLemmaF; + private final ArrayList nerEntitiesF; + private final ArrayList stopWordTokenF; + private final ArrayList entityTokenTagsF; + private final ArrayList nerEntitiesTypeF; + private final Integer anotatorcounterF; + private final ArrayList strTokenStemsF; + + public get_res(String strF, String str1, StanfordCoreNLP stanfordCoreNLP, StanfordCoreNLP stanfordCoreNLPSentiment, List coreMaps1, Annotation strAnno, Annotation strAnnoSentiment, CoreDocument coreDocument, Integer tokenizeCountingF, List> taggedWordListF, ArrayList typedDependenciesF, ArrayList rnnCoreAnnotationsPredictedF, ArrayList simpleMatricesF, ArrayList simpleMatricesNodevectorsF, List listF, Integer longestF, List sentencesF, List sentencesSentimentF, ArrayList treesF, ArrayList grammaticalStructuresF, Integer sentimentLongestF, List> imwesF, Integer inflectedCounterNegativeF, Integer inflectedCounterPositiveF, ArrayList tokenEntryF, Integer unmarkedPatternCounterF, ArrayList strTokensIpartFormF, ArrayList tokenFormsF, ArrayList intTokenEntyCountsF, Integer markedContinuousCounterF, ArrayList iTokenTagsF, ArrayList strTokenEntryGetPOSF, ArrayList retrieveTGWListF, Integer pairCounterF, Integer tokensCounterF, ArrayList stopWordLemmaF, ArrayList nerEntitiesF, ArrayList stopWordTokenF, ArrayList entityTokenTagsF, ArrayList nerEntitiesTypeF, Integer anotatorcounterF, ArrayList strTokenStemsF) { + + this.strF = strF; + this.str1 = str1; + this.stanfordCoreNLP = stanfordCoreNLP; + this.stanfordCoreNLPSentiment = stanfordCoreNLPSentiment; + this.coreMaps1 = coreMaps1; + this.strAnno = strAnno; + this.strAnnoSentiment = strAnnoSentiment; + this.coreDocument = coreDocument; + this.tokenizeCountingF = tokenizeCountingF; + this.taggedWordListF = taggedWordListF; + this.typedDependenciesF = typedDependenciesF; + this.rnnCoreAnnotationsPredictedF = rnnCoreAnnotationsPredictedF; + this.simpleMatricesF = simpleMatricesF; + this.simpleMatricesNodevectorsF = simpleMatricesNodevectorsF; + this.listF = listF; + this.longestF = longestF; + this.sentencesF = sentencesF; + this.sentencesSentimentF = sentencesSentimentF; + this.treesF = treesF; + this.grammaticalStructuresF = grammaticalStructuresF; + this.sentimentLongestF = sentimentLongestF; + this.imwesF = imwesF; + this.inflectedCounterNegativeF = inflectedCounterNegativeF; + this.inflectedCounterPositiveF = inflectedCounterPositiveF; + this.tokenEntryF = tokenEntryF; + this.unmarkedPatternCounterF = unmarkedPatternCounterF; + this.strTokensIpartFormF = strTokensIpartFormF; + this.tokenFormsF = tokenFormsF; + this.intTokenEntyCountsF = intTokenEntyCountsF; + this.markedContinuousCounterF = markedContinuousCounterF; + this.iTokenTagsF = iTokenTagsF; + this.strTokenEntryGetPOSF = strTokenEntryGetPOSF; + this.retrieveTGWListF = retrieveTGWListF; + this.pairCounterF = pairCounterF; + this.tokensCounterF = tokensCounterF; + this.stopWordLemmaF = stopWordLemmaF; + this.nerEntitiesF = nerEntitiesF; + this.stopWordTokenF = stopWordTokenF; + this.entityTokenTagsF = entityTokenTagsF; + this.nerEntitiesTypeF = nerEntitiesTypeF; + this.anotatorcounterF = anotatorcounterF; + this.strTokenStemsF = strTokenStemsF; + } + + @Override + public SentimentAnalyzerTestDynamicTesting call() throws Exception { + return getReponseFuturesHelper(strF, str1, stanfordCoreNLP, stanfordCoreNLPSentiment, + coreMaps1, strAnno, strAnnoSentiment, coreDocument, tokenizeCountingF, taggedWordListF + , typedDependenciesF, rnnCoreAnnotationsPredictedF, simpleMatricesF, simpleMatricesNodevectorsF + , listF, longestF, sentencesF, sentencesSentimentF, treesF, grammaticalStructuresF, sentimentLongestF + , imwesF, inflectedCounterNegativeF, inflectedCounterPositiveF, tokenEntryF, unmarkedPatternCounterF + , strTokensIpartFormF, tokenFormsF, intTokenEntyCountsF, markedContinuousCounterF, iTokenTagsF + , strTokenEntryGetPOSF, retrieveTGWListF, pairCounterF, tokensCounterF, stopWordLemmaF, nerEntitiesF + , stopWordTokenF, entityTokenTagsF, nerEntitiesTypeF, anotatorcounterF, strTokenStemsF); + } + } + + public String getResponseFutures(String strF, StanfordCoreNLP stanfordCoreNLP, StanfordCoreNLP stanfordCoreNLPSentiment) { + if (strResponses.getOrDefault(strF, null) == null) { + strResponses.put(strF, new ArrayList<>()); + } + + Annotation strAnno = new Annotation(strF); + strAnno.compact(); + stanfordCoreNLP.annotate(strAnno); + + Annotation strAnnoSentiment = new Annotation(strF); + strAnnoSentiment.compact(); + stanfordCoreNLPSentiment.annotate(strAnnoSentiment); + + Annotation annotation = new Annotation(strF); + stanfordCoreNLP.annotate(annotation); + CoreDocument coreDocument = new CoreDocument(annotation); + Annotation jmweAnnotationF = PipelineJMWESingleton.INSTANCE.getJMWEAnnotation(strF); + List coreMaps1 = jmweAnnotationF.get(CoreAnnotations.SentencesAnnotation.class); + + Integer tokenizeCountingF = null; + List> taggedWordListF = null; + java.util.ArrayList retrieveTGWListF = null; + List sentencesF = null; + List sentencesSentimentF = null; + java.util.ArrayList treesF = null; + ArrayList grammaticalStructuresF = null; + java.util.ArrayList typedDependenciesF = null; + java.util.ArrayList rnnCoreAnnotationsPredictedF = null; + java.util.ArrayList simpleMatricesF = null; + java.util.ArrayList simpleMatricesNodevectorsF = null; + List listF = null; + Integer longestF = null; + Integer sentimentLongestF = null; + List> imwesF = null; + Integer InflectedCounterNegativeF = null; + Integer InflectedCounterPositiveF = null; + ArrayList tokenEntryF = null; + Integer MarkedContinuousCounterF = null; + Integer UnmarkedPatternCounterF = null; + ArrayList strTokensIpartFormF = null; + java.util.ArrayList tokenFormsF = null; + ArrayList strTokenEntryGetPOSF = null; + java.util.ArrayList intTokenEntyCountsF = null; + ArrayList ITokenTagsF = null; + java.util.ArrayList strTokenStemsF = null; + Integer AnotatorcounterF = null; + Integer TokensCounterF = null; + java.util.ArrayList entityTokenTagsF = null; + java.util.ArrayList nerEntitiesF = null; + java.util.ArrayList nerEntitiesTypeF = null; + java.util.ArrayList stopWordTokenF = null; + java.util.ArrayList stopWordLemmaF = null; + Integer PairCounterF = null; + + ArrayList concurrentRelations = new ArrayList(); + StringBuilder SB = new StringBuilder(); + List ues_copy = new ArrayList(DataMapper.getAllStrings()); + double preRelationUserCounters = -1.123456789; + + //System.out.println(ues_copy.toString()); + ArrayList> futures = new ArrayList<>(); + Properties prop = new Properties(); + String fileName = "app.config"; + try (FileInputStream fis = new FileInputStream(fileName)) { + prop.load(fis); + } catch (FileNotFoundException ex) { + } catch (IOException ex) { + } + for (String str1 : ues_copy) { + if (strF != str1) { + //critical section + Future submit = completionService.submit(new get_res(strF, str1, stanfordCoreNLP, stanfordCoreNLPSentiment, + coreMaps1, strAnno, strAnnoSentiment, coreDocument, tokenizeCountingF, taggedWordListF + , typedDependenciesF, rnnCoreAnnotationsPredictedF, simpleMatricesF, simpleMatricesNodevectorsF + , listF, longestF, sentencesF, sentencesSentimentF, treesF, grammaticalStructuresF, sentimentLongestF + , imwesF, InflectedCounterNegativeF, InflectedCounterPositiveF, tokenEntryF, UnmarkedPatternCounterF + , strTokensIpartFormF, tokenFormsF, intTokenEntyCountsF, MarkedContinuousCounterF, ITokenTagsF + , strTokenEntryGetPOSF, retrieveTGWListF, PairCounterF, TokensCounterF, stopWordLemmaF, nerEntitiesF + , stopWordTokenF, entityTokenTagsF, nerEntitiesTypeF, AnotatorcounterF, strTokenStemsF)); + futures.add(submit); + //end of critical section, do the rest sequential. + } + } + + int pending = futures.size(); + while (pending > 0) { + try { + Future completed = completionService.poll(100, TimeUnit.MILLISECONDS); + if (completed != null) { + --pending; + SentimentAnalyzerTestDynamicTesting SMX = completed.get(); + if (SMX == null) continue; + double scoreRelationLastUserMsg = SMX.getScore(); + if (scoreRelationLastUserMsg > preRelationUserCounters + || preRelationUserCounters == -1.123456789) { //dafuck??? + preRelationUserCounters = scoreRelationLastUserMsg; + concurrentRelations.add(SMX.getSecondaryString()); + } + + //this part below should be sequential hopefully + if (tokenizeCountingF == null) { + tokenizeCountingF = SMX.getTokenizeCountingF(); + } + if (taggedWordListF == null) { + taggedWordListF = SMX.getTaggedWordListF(); + } + if (typedDependenciesF == null) { + typedDependenciesF = SMX.getTypedDependenciesF(); + } + if (rnnCoreAnnotationsPredictedF == null) { + rnnCoreAnnotationsPredictedF = SMX.getRnnCoreAnnotationsPredictedF(); + } + if (simpleMatricesF == null) { + simpleMatricesF = SMX.getSimpleMatricesF(); + } + if (simpleMatricesNodevectorsF == null) { + simpleMatricesNodevectorsF = SMX.getSimpleMatricesNodevectorsF(); + } + if (listF == null) { + listF = SMX.getListF(); + } + if (longestF == null) { + longestF = SMX.getLongestF(); + } + if (sentencesF == null) { + sentencesF = SMX.getSentencesF(); + } + if (sentencesSentimentF == null) { + sentencesSentimentF = SMX.getSentencesSentimentF(); + } + if (treesF == null) { + treesF = SMX.getTreesF(); + } + if (grammaticalStructuresF == null) { + grammaticalStructuresF = SMX.getGrammaticalStructuresF(); + } + if (sentimentLongestF == null) { + sentimentLongestF = SMX.getSentimentLongestF(); + } + if (imwesF == null) { + imwesF = SMX.getImwesF(); + } + if (InflectedCounterNegativeF == null) { + InflectedCounterNegativeF = SMX.getInflectedCounterNegativeF(); + } + if (InflectedCounterPositiveF == null) { + InflectedCounterPositiveF = SMX.getInflectedCounterPositiveF(); + } + if (tokenEntryF == null) { + tokenEntryF = SMX.getTokenEntryF(); + } + if (UnmarkedPatternCounterF == null) { + UnmarkedPatternCounterF = SMX.getUnmarkedPatternCounterF(); + } + if (strTokensIpartFormF == null) { + strTokensIpartFormF = SMX.getStrTokensIpartFormF(); + } + if (tokenFormsF == null) { + tokenFormsF = SMX.getTokenFormsF(); + } + if (intTokenEntyCountsF == null) { + intTokenEntyCountsF = SMX.getIntTokenEntyCountsF(); + } + if (MarkedContinuousCounterF == null) { + MarkedContinuousCounterF = SMX.getMarkedContinuousCounterF(); + } + if (ITokenTagsF == null) { + ITokenTagsF = SMX.getITokenTagsF(); + } + if (strTokenEntryGetPOSF == null) { + strTokenEntryGetPOSF = SMX.getStrTokenEntryGetPOSF(); + } + if (retrieveTGWListF == null) { + retrieveTGWListF = SMX.getRetrieveTGWListF(); + } + if (PairCounterF == null) { + PairCounterF = SMX.getPairCounterF(); + } + if (TokensCounterF == null) { + TokensCounterF = SMX.getTokensCounterF(); + } + if (stopWordLemmaF == null) { + stopWordLemmaF = SMX.getStopWordLemmaF(); + } + if (nerEntitiesF == null) { + nerEntitiesF = SMX.getNerEntitiesF(); + } + if (stopWordTokenF == null) { + stopWordTokenF = SMX.getStopWordTokenF(); + } + if (entityTokenTagsF == null) { + entityTokenTagsF = SMX.getEntityTokenTagsF(); + } + if (nerEntitiesTypeF == null) { + nerEntitiesTypeF = SMX.getNerEntitiesTypeF(); + } + if (AnotatorcounterF == null) { + AnotatorcounterF = SMX.getAnotatorcounterF(); + } + if (strTokenStemsF == null) { + strTokenStemsF = SMX.getStrTokenStemsF(); + } + } + } catch (InterruptedException e) { + //throw new RuntimeException(e); + pending = 0; + try (FileInputStream fis = new FileInputStream(fileName)) { + prop.load(fis); + } catch (FileNotFoundException ex) { + } catch (IOException ex) { + } + System.out.printf(Arrays.toString(e.getStackTrace())); + pool.shutdown(); + pool = Executors.newFixedThreadPool(Integer.valueOf(prop.getProperty("app.thread_count"))); + completionService = new ExecutorCompletionService(pool); + } catch (ExecutionException e) { + //throw new RuntimeException(e); + pending = 0; + try (FileInputStream fis = new FileInputStream(fileName)) { + prop.load(fis); + } catch (FileNotFoundException ex) { + } catch (IOException ex) { + } + System.out.printf(Arrays.toString(e.getStackTrace())); + pool.shutdown(); + pool = Executors.newFixedThreadPool(Integer.valueOf(prop.getProperty("app.thread_count"))); + completionService = new ExecutorCompletionService(pool); + } + } + + int cacheRequirement = 8500; + if (preRelationUserCounters > cacheRequirement && !ues_copy.contains(strF) && filterContent(strF)) { + DataMapper.InsertMYSQLStrings(strF); + DataMapper.checkStringsToDelete(); + } + //double randomLenghtPermit = strF.length() * (Math.random() * Math.random() * (Math.random() * 10)); + Collections.reverse(concurrentRelations); + ArrayList mysqlUpdateLastUsed = new ArrayList(); + Double aDouble = Double.valueOf(prop.getProperty("app.random_length")); + if (!concurrentRelations.isEmpty()) { + for (String secondaryRelation : concurrentRelations) { + if (SB.toString().length() > strF.length() * aDouble && !SB.toString().isEmpty()) { + break; + } + + ArrayList orDefault = strResponses.getOrDefault(strF, null); + boolean skip = false; + for (String strItr : orDefault) { + if (secondaryRelation.equalsIgnoreCase(strItr)) { + skip = true; + //wtf why does this exist. + if (orDefault.size() + 3 >= concurrentRelations.size()) { + orDefault = new ArrayList<>(); + strResponses.put(strF, orDefault); + } else if (orDefault.size() > 5) { + double v = Math.random() * 10; + if (v > 5.6) { + orDefault = new ArrayList<>(); + strResponses.put(strF, orDefault); + } + } + break; + } + } + if (skip) continue; + /* + if (!SB.isEmpty()) { + String testSTR = SB.toString() + " " + secondaryRelation; + SentimentAnalyzerTestDynamic SMX = getReponseFuturesHelper(strF, testSTR, stanfordCoreNLP, stanfordCoreNLPSentiment, + coreMaps1, strAnno, strAnnoSentiment, coreDocument, tokenizeCountingF, taggedWordListF + , typedDependenciesF, rnnCoreAnnotationsPredictedF, simpleMatricesF, simpleMatricesNodevectorsF + , listF, longestF, sentencesF, sentencesSentimentF, treesF, grammaticalStructuresF, sentimentLongestF + , imwesF, InflectedCounterNegativeF, InflectedCounterPositiveF, tokenEntryF, UnmarkedPatternCounterF + , strTokensIpartFormF, tokenFormsF, intTokenEntyCountsF, MarkedContinuousCounterF, ITokenTagsF + , strTokenEntryGetPOSF, retrieveTGWListF, PairCounterF, TokensCounterF, stopWordLemmaF, nerEntitiesF + , stopWordTokenF, entityTokenTagsF, nerEntitiesTypeF, AnotatorcounterF, strTokenStemsF); + double scoreRelationLastUserMsg = SMX.getScore(); + + if (preRelationUserCounters > scoreRelationLastUserMsg) { + break; + } + + } */ + + SB.append(secondaryRelation).append(". "); + mysqlUpdateLastUsed.add(secondaryRelation); + orDefault.add(secondaryRelation); + strResponses.put(strF, orDefault); + } + } + if (SB.toString().isEmpty()) { + return "failure, preventing stuckness"; + } + DataMapper.updateLastUsed(mysqlUpdateLastUsed); + return SB.toString(); + } + + private void getJMWEAnnotation(String str1) { + Annotation jmweAnnotation = PipelineJMWESingleton.INSTANCE.getJMWEAnnotation(str1); + jmweAnnotationCache.put(str1, jmweAnnotation); + } + + public String getResponseMsg(String str, String personName, StanfordCoreNLP stanfordCoreNLP, + StanfordCoreNLP stanfordCoreNLPSentiment, Boolean ingameResponse) { + String responseFutures = ""; + String strF = trimString(str); + //System.out.println("post trimstring(). strF: " + strF); + responseFutures = getResponseFutures(strF, stanfordCoreNLP, stanfordCoreNLPSentiment); + if (!ingameResponse) { + responseFutures = checkPersonPresentInSentence(personName, responseFutures, strF, stanfordCoreNLP, + stanfordCoreNLPSentiment); + } + return responseFutures; + } + + private String checkPersonPresentInSentence(String personName, String responseMsg, String userLastMessage, + StanfordCoreNLP stanfordCoreNLP, StanfordCoreNLP stanfordCoreNLPSentiment) { + try { + CoreDocument pipelineCoreDcoument = new CoreDocument(responseMsg); + CoreDocument pipelineCoreDcoumentLastMsg = new CoreDocument(userLastMessage); + stanfordCoreNLP.annotate(pipelineCoreDcoument); + stanfordCoreNLPSentiment.annotate(pipelineCoreDcoumentLastMsg); + String regex = "(.*?\\d){10,}"; + if (pipelineCoreDcoument.entityMentions() != null) { + for (CoreEntityMention em : pipelineCoreDcoument.entityMentions()) { + String entityType = em.entityType(); + if (entityType == "PERSON") { + String str = responseMsg; + String emText = em.text(); + Pattern pattern = Pattern.compile(regex); + Matcher matcher = pattern.matcher(personName); + boolean isMatched = matcher.matches(); + if (emText != personName && !isMatched) { + if (pipelineCoreDcoumentLastMsg.entityMentions() != null) { + for (CoreEntityMention emLastMsg : pipelineCoreDcoumentLastMsg.entityMentions()) { + if (emText != emLastMsg.text() && !Character.isDigit(Integer.parseInt(emLastMsg.text().trim()))) { + str = (responseMsg.substring(0, responseMsg.indexOf(emText)) + " " + + emLastMsg + " " + responseMsg.substring(responseMsg.indexOf(emText))); + } + } + } + str += personName; + return str; + } + } + } + } + } catch (Exception e) { + System.out.println("SCUFFED JAYZ: " + e.getMessage()); + } + return responseMsg; + } + + public boolean filterContent(String str) { + if (!str.isEmpty() && str.length() > 3) { + String str1Local = str.trim(); + if (str1Local.length() > 2 && !str1Local.startsWith("!")) { + return true; + } + } + return false; + } + + public void getCoreDocumentsSuggested(StanfordCoreNLP pipeline, String str) { + Annotation annotation = new Annotation(str); + pipeline.annotate(annotation); + CoreDocument coreDocument = new CoreDocument(annotation); + coreDocumentAnnotationCache.put(str, coreDocument); + } +} diff --git a/src/main/java/FunctionLayer/LevenshteinDistance.java b/src/main/java/FunctionLayer/LevenshteinDistance.java new file mode 100644 index 0000000..6e753a8 --- /dev/null +++ b/src/main/java/FunctionLayer/LevenshteinDistance.java @@ -0,0 +1,43 @@ +/* + * To change this license header, choose License Headers in Project Properties. + * To change this template file, choose Tools | Templates + * and open the template in the editor. + */ +package FunctionLayer; + +/** + * + * @author install1 + */ +public class LevenshteinDistance { + private CharSequence lhs; + private CharSequence rhs; + + private static int minimum(int a, int b, int c) { + return Math.min(Math.min(a, b), c); + } + + public LevenshteinDistance(CharSequence lhs, CharSequence rhs) { + this.lhs = lhs; + this.rhs = rhs; + } + + public double computeLevenshteinDistance() { + int[][] distance = new int[lhs.length() + 1][rhs.length() + 1]; + for (int i = 0; i <= lhs.length(); i++) { + distance[i][0] = i; + } + for (int j = 1; j <= rhs.length(); j++) { + distance[0][j] = j; + } + for (int i = 1; i <= lhs.length(); i++) { + for (int j = 1; j <= rhs.length(); j++) { + distance[i][j] = minimum( + distance[i - 1][j] + 1, + distance[i][j - 1] + 1, + distance[i - 1][j - 1] + ((lhs.charAt(i - 1) == rhs.charAt(j - 1)) ? 0 : 1)); + } + } + return distance[lhs.length()][rhs.length()]; + } +} diff --git a/src/main/java/FunctionLayer/PipelineJMWESingleton.java b/src/main/java/FunctionLayer/PipelineJMWESingleton.java new file mode 100644 index 0000000..f592231 --- /dev/null +++ b/src/main/java/FunctionLayer/PipelineJMWESingleton.java @@ -0,0 +1,157 @@ +/* + * To change this license header, choose License Headers in Project Properties. + * To change this template file, choose Tools | Templates + * and open the template in the editor. + */ +package FunctionLayer; + +import edu.mit.jmwe.data.IMWE; +import edu.mit.jmwe.data.IToken; +import edu.mit.jmwe.data.Token; +import edu.mit.jmwe.detect.CompositeDetector; +import edu.mit.jmwe.detect.Consecutive; +import edu.mit.jmwe.detect.Exhaustive; +import edu.mit.jmwe.detect.IMWEDetector; +import edu.mit.jmwe.detect.InflectionPattern; +import edu.mit.jmwe.detect.MoreFrequentAsMWE; +import edu.mit.jmwe.detect.ProperNouns; +import edu.mit.jmwe.index.IMWEIndex; +import edu.mit.jmwe.index.MWEIndex; +import edu.stanford.nlp.ling.CoreAnnotations; +import edu.stanford.nlp.ling.CoreLabel; +import edu.stanford.nlp.ling.JMWEAnnotation; +import edu.stanford.nlp.pipeline.Annotation; +import edu.stanford.nlp.pipeline.StanfordCoreNLP; +import edu.stanford.nlp.util.CoreMap; + +import java.io.File; +import java.io.FileNotFoundException; +import java.io.IOException; +import java.util.ArrayList; +import java.util.List; +import java.util.Properties; + +/** + * @author install1 + */ +//maybe not public? +public class PipelineJMWESingleton { + + //if not needed to be volatile dont make it, increases time + //public volatile static PipelineJMWESingleton INSTANCE; + public static PipelineJMWESingleton INSTANCE; + private static StanfordCoreNLP localNLP = initializeJMWE(); + private static String underscoreSpaceReplacement; + private static IMWEIndex index; + private static IMWEDetector detector; + + private PipelineJMWESingleton() { + String whoami = System.getProperty("user.name"); + String jmweIndexData = "/home/" + whoami + "/autism_bot_number_crunching/lib/mweindex_wordnet3.0_semcor1.6.data"; // ./lib/mweindex_wordnet3.0_semcor1.6.data + String jmweIndexDataLocalTest = "/mnt/hdd/home/christian/content/sourcemod_plugins_and_extensions/addons/sourcemod/scripting/gogs/ArtificialAutism/lib/mweindex_wordnet3.0_semcor1.6.data"; + File indexFile = null; + indexFile = new File((String) jmweIndexData); + index = new MWEIndex(indexFile); + try { + index.open(); + } catch (IOException e) { + indexFile = new File((String) jmweIndexDataLocalTest); + index = new MWEIndex(indexFile); + try { + index.open(); + } catch (IOException ex) { + throw new RuntimeException(ex); + } + } + detector = getDetector(index, "Exhaustive"); + //index.close(); + } + + public static void getINSTANCE() { + INSTANCE = new PipelineJMWESingleton(); + } + + public final Annotation getJMWEAnnotation(String str) { + try { + index.open(); + } catch (IOException e) { + throw new RuntimeException("unable to open IMWEIndex index: " + e + "\n"); + } + Annotation annoStr = new Annotation(str); + localNLP.annotate(annoStr); + Class sentencesAnnotationClass = CoreAnnotations.SentencesAnnotation.class; + for (CoreMap sentence : annoStr.get(sentencesAnnotationClass)) { + List> mwes = getjMWEInSentence(sentence, index, detector, false); + //annoStr.set(JMWEAnnotation.class, mwes); + sentence.set(JMWEAnnotation.class, mwes); + } + //index.close(); + return annoStr; + } + + public final static StanfordCoreNLP initializeJMWE() { + Properties propsJMWE; + propsJMWE = new Properties(); + propsJMWE.setProperty("annotators", "tokenize,ssplit,pos,lemma"); + propsJMWE.setProperty("tokenize.options", "untokenizable=firstKeep"); + propsJMWE.setProperty("threads", "1"); + propsJMWE.setProperty("pos.maxlen", "90"); + propsJMWE.setProperty("tokenize.maxlen", "90"); + propsJMWE.setProperty("ssplit.maxlen", "90"); + propsJMWE.setProperty("lemma.maxlen", "90"); + underscoreSpaceReplacement = "-"; + localNLP = new StanfordCoreNLP(propsJMWE); + System.out.println("finished JMWE constructor \n"); + return localNLP; + } + + public IMWEDetector getDetector(IMWEIndex index, String detector) { + IMWEDetector iMWEdetector = null; + switch (detector) { + case "Consecutive": + iMWEdetector = new Consecutive(index); + break; + case "Exhaustive": + iMWEdetector = new Exhaustive(index); + break; + case "ProperNouns": + iMWEdetector = ProperNouns.getInstance(); + break; + case "Complex": + iMWEdetector = new CompositeDetector(ProperNouns.getInstance(), + new MoreFrequentAsMWE(new InflectionPattern(new Consecutive(index)))); + break; + case "CompositeConsecutiveProperNouns": + iMWEdetector = new CompositeDetector(new Consecutive(index), ProperNouns.getInstance()); + break; + default: + throw new IllegalArgumentException("Invalid detector argument " + detector + + ", only \"Consecutive\", \"Exhaustive\", \"ProperNouns\", \"Complex\" or \"CompositeConsecutiveProperNouns\" are supported."); + } + return iMWEdetector; + } + + public List> getjMWEInSentence(CoreMap sentence, IMWEIndex index, IMWEDetector detector, + boolean verbose) { + List tokens = getITokens(sentence.get(CoreAnnotations.TokensAnnotation.class)); + List> mwes = detector.detect(tokens); + if (verbose) { + for (IMWE token : mwes) { + System.out.println("IMWE: " + token); + } + } + return mwes; + } + + public List getITokens(List tokens) { + return getITokens(tokens, underscoreSpaceReplacement); + } + + public List getITokens(List tokens, String underscoreSpaceReplacement) { + List sentence = new ArrayList(); + for (CoreLabel token : tokens) { + sentence.add(new Token(token.originalText().replaceAll("_", underscoreSpaceReplacement).replaceAll(" ", underscoreSpaceReplacement), token.get(CoreAnnotations.PartOfSpeechAnnotation.class), token.lemma().replaceAll("_", underscoreSpaceReplacement).replaceAll(" ", underscoreSpaceReplacement))); + } + return sentence; + } +} diff --git a/src/main/java/FunctionLayer/SimilarityMatrix.java b/src/main/java/FunctionLayer/SimilarityMatrix.java new file mode 100644 index 0000000..c304a3c --- /dev/null +++ b/src/main/java/FunctionLayer/SimilarityMatrix.java @@ -0,0 +1,32 @@ +/* + * To change this license header, choose License Headers in Project Properties. + * To change this template file, choose Tools | Templates + * and open the template in the editor. + */ +package FunctionLayer; + +/** + * @author install1 + */ +public class SimilarityMatrix { + + private String PrimaryString; + private String SecondaryString; + private double distance; + + public SimilarityMatrix(String str1, String str2) { + this.PrimaryString = str1; + this.SecondaryString = str2; + } + + public SimilarityMatrix(String str1, String str2, double result) { + this.PrimaryString = str1; + this.SecondaryString = str2; + this.distance = result; + } + + public final String getSecondaryString() { + return SecondaryString; + } + +} diff --git a/src/main/java/FunctionLayer/StanfordParser/SentimentAnalyzerTestDynamicTesting.java b/src/main/java/FunctionLayer/StanfordParser/SentimentAnalyzerTestDynamicTesting.java new file mode 100644 index 0000000..765ae89 --- /dev/null +++ b/src/main/java/FunctionLayer/StanfordParser/SentimentAnalyzerTestDynamicTesting.java @@ -0,0 +1,3096 @@ +package FunctionLayer.StanfordParser; + +import FunctionLayer.LevenshteinDistance; +import FunctionLayer.StopwordAnnotator; +import edu.mit.jmwe.data.IMWE; +import edu.mit.jmwe.data.IMWEDesc; +import edu.mit.jmwe.data.IToken; +import edu.stanford.nlp.ie.AbstractSequenceClassifier; +import edu.stanford.nlp.ling.*; +import edu.stanford.nlp.neural.rnn.RNNCoreAnnotations; +import edu.stanford.nlp.pipeline.Annotation; +import edu.stanford.nlp.pipeline.CoreDocument; +import edu.stanford.nlp.pipeline.CoreEntityMention; +import edu.stanford.nlp.process.CoreLabelTokenFactory; +import edu.stanford.nlp.process.DocumentPreprocessor; +import edu.stanford.nlp.process.PTBTokenizer; +import edu.stanford.nlp.process.TokenizerFactory; +import edu.stanford.nlp.sentiment.SentimentCoreAnnotations; +import edu.stanford.nlp.sequences.DocumentReaderAndWriter; +import edu.stanford.nlp.tagger.maxent.MaxentTagger; +import edu.stanford.nlp.trees.*; +import edu.stanford.nlp.trees.tregex.gui.Tdiff; +import edu.stanford.nlp.util.CoreMap; +import edu.stanford.nlp.util.Pair; +import org.apache.lucene.analysis.core.StopAnalyzer; +import org.ejml.data.DMatrixIterator; +import org.ejml.simple.SimpleMatrix; +import org.json.simple.JSONObject; + +import java.io.StringReader; +import java.util.*; +import java.util.logging.FileHandler; + +/* + * To change this license header, choose License Headers in Project Properties. + * To change this template file, choose Tools | Templates + * and open the template in the editor. + */ + +/** + * @author install1 + */ +public class SentimentAnalyzerTestDynamicTesting { + + private JSONObject data; + private String str; + private String str1; + private MaxentTagger tagger; + private GrammaticalStructureFactory gsf; + private AbstractSequenceClassifier classifier; + private List coreMaps1; + private List coreMaps2; + private Annotation pipelineAnnotation1; + private Annotation pipelineAnnotation2; + private Annotation pipelineAnnotation1Sentiment; + private Annotation pipelineAnnotation2Sentiment; + private CoreDocument pipelineCoreDcoument1; + private CoreDocument pipelineCoreDcoument2; + //private loggerlogger =logger.getLogger("autismlog"); + private FileHandler fh; + + public Integer getTokenizeCounting() { + return tokenizeCounting; + } + + public List> getTaggedWordListF() { + return taggedWordListF; + } + + public List> getTaggedWordList1() { + return taggedWordList1; + } + + public ArrayList getRetrieveTGWList1() { + return retrieveTGWList1; + } + + public List getSentencesF() { + return sentencesF; + } + + public Integer getTokenizeCountingF() { + return tokenizeCountingF; + } + + public ArrayList getRetrieveTGWListF() { + return retrieveTGWListF; + } + + public List getSentences1() { + return sentences1; + } + + public List getSentencesSentimentF() { + return sentencesSentimentF; + } + + public List getSentencesSentiment1() { + return sentencesSentiment1; + } + + public ArrayList getTreesF() { + return treesF; + } + + public ArrayList getTrees1() { + return trees1; + } + + + public ArrayList getGrammaticalStructuresF() { + return grammaticalStructuresF; + } + + public ArrayList getGrammaticalStructures1() { + return grammaticalStructures1; + } + + public ArrayList getTypedDependenciesF() { + return typedDependenciesF; + } + + public ArrayList getTypedDependencies1() { + return typedDependencies1; + } + + public ArrayList getRnnCoreAnnotationsPredictedF() { + return rnnCoreAnnotationsPredictedF; + } + + public ArrayList getRnnCoreAnnotationsPredicted1() { + return rnnCoreAnnotationsPredicted1; + } + + public ArrayList getSimpleMatricesF() { + return simpleMatricesF; + } + + public ArrayList getSimpleMatrices1() { + return simpleMatrices1; + } + + public ArrayList getSimpleMatricesNodevectorsF() { + return simpleMatricesNodevectorsF; + } + + public List getListF() { + return listF; + } + + public List getList1() { + return list1; + } + + public ArrayList getSimpleMatricesNodevectors1() { + return simpleMatricesNodevectors1; + } + + public Integer getLongestF() { + return longestF; + } + + public Integer getSentimentLongestF() { + return sentimentLongestF; + } + + public Integer getSentimentLongest1() { + return sentimentLongest1; + } + + public List> getImwesF() { + return imwesF; + } + + public List> getImwes1() { + return imwes1; + } + + public Integer getLongest1() { + return longest1; + } + + public Integer getInflectedCounterNegativeF() { + return InflectedCounterNegativeF; + } + + public Integer getInflectedCounterPositiveF() { + return InflectedCounterPositiveF; + } + + public Integer getInflectedCounterPositive1() { + return InflectedCounterPositive1; + } + + public Integer getInflectedCounterNegative1() { + return InflectedCounterNegative1; + } + + public ArrayList getTokenEntryF() { + return tokenEntryF; + } + + public ArrayList getTokenEntry1() { + return tokenEntry1; + } + + public Integer getMarkedContinuousCounterF() { + return MarkedContinuousCounterF; + } + + public Integer getMarkedContinuousCounter1() { + return MarkedContinuousCounter1; + } + + public Integer getUnmarkedPatternCounterF() { + return UnmarkedPatternCounterF; + } + + public Integer getUnmarkedPatternCounter1() { + return UnmarkedPatternCounter1; + } + + public ArrayList getStrTokensIpartFormF() { + return strTokensIpartFormF; + } + + public ArrayList getStrTokensIpartForm1() { + return strTokensIpartForm1; + } + + public ArrayList getTokenFormsF() { + return tokenFormsF; + } + + public ArrayList getTokenForms1() { + return tokenForms1; + } + + public ArrayList getStrTokenEntryGetPOSF() { + return strTokenEntryGetPOSF; + } + + public ArrayList getStrTokenEntryGetPOS1() { + return strTokenEntryGetPOS1; + } + + public ArrayList getIntTokenEntyCountsF() { + return intTokenEntyCountsF; + } + + public ArrayList getIntTokenEntyCounts1() { + return intTokenEntyCounts1; + } + + public ArrayList getITokenTagsF() { + return ITokenTagsF; + } + + public ArrayList getITokenTags1() { + return ITokenTags1; + } + + public ArrayList getStrTokenStemsF() { + return strTokenStemsF; + } + + public ArrayList getStrTokenStems1() { + return strTokenStems1; + } + + public Integer getAnotatorcounterF() { + return AnotatorcounterF; + } + + public Integer getAnotatorcounter1() { + return Anotatorcounter1; + } + + public Integer getTokensCounterF() { + return TokensCounterF; + } + + public Integer getTokensCounter1() { + return TokensCounter1; + } + + public ArrayList getEntityTokenTagsF() { + return entityTokenTagsF; + } + + public ArrayList getEntityTokenTags1() { + return entityTokenTags1; + } + + public ArrayList getNerEntitiesF() { + return nerEntitiesF; + } + + public ArrayList getNerEntities1() { + return nerEntities1; + } + + public ArrayList getNerEntitiesTypeF() { + return nerEntitiesTypeF; + } + + public ArrayList getNerEntitiesType1() { + return nerEntitiesType1; + } + + public ArrayList getStopWordTokenF() { + return stopWordTokenF; + } + + public ArrayList getStopWordToken1() { + return stopWordToken1; + } + + public ArrayList getStopWordLemmaF() { + return stopWordLemmaF; + } + + public ArrayList getStopWordLemma1() { + return stopWordLemma1; + } + + public Integer getPairCounterF() { + return PairCounterF; + } + + public Integer getPairCounter1() { + return PairCounter1; + } + + //caches + private Integer tokenizeCounting; + private Integer tokenizeCountingF; + private List> taggedWordListF; + private List> taggedWordList1; + private ArrayList retrieveTGWList1; + private ArrayList retrieveTGWListF; + private List sentencesF; + private List sentences1; + private List sentencesSentimentF; + private List sentencesSentiment1; + private ArrayList treesF; + private ArrayList trees1; + private ArrayList grammaticalStructuresF; + private ArrayList grammaticalStructures1; + private ArrayList typedDependenciesF; + private ArrayList typedDependencies1; + private ArrayList rnnCoreAnnotationsPredictedF; + private ArrayList rnnCoreAnnotationsPredicted1; + private ArrayList simpleMatricesF; + private ArrayList simpleMatrices1; + private ArrayList simpleMatricesNodevectorsF; + private ArrayList simpleMatricesNodevectors1; + private List listF; + private List list1; + private Integer longestF; + private Integer longest1; + private Integer sentimentLongestF; + private Integer sentimentLongest1; + private List> imwesF; + private List> imwes1; + private Integer InflectedCounterNegativeF; + private Integer InflectedCounterNegative1; + private Integer InflectedCounterPositiveF; + private Integer InflectedCounterPositive1; + private ArrayList tokenEntryF; + private ArrayList tokenEntry1; + private Integer MarkedContinuousCounterF; + private Integer MarkedContinuousCounter1; + private Integer UnmarkedPatternCounterF; + private Integer UnmarkedPatternCounter1; + private ArrayList strTokensIpartFormF; + private ArrayList strTokensIpartForm1; + private ArrayList tokenFormsF; + private ArrayList tokenForms1; + private ArrayList strTokenEntryGetPOSF; + private ArrayList strTokenEntryGetPOS1; + private ArrayList intTokenEntyCountsF; + private ArrayList intTokenEntyCounts1; + private ArrayList ITokenTagsF; + private ArrayList ITokenTags1; + private ArrayList strTokenStemsF; + private ArrayList strTokenStems1; + private Integer AnotatorcounterF; + private Integer Anotatorcounter1; + private Integer TokensCounterF; + private Integer TokensCounter1; + private ArrayList entityTokenTagsF; + private ArrayList entityTokenTags1; + private ArrayList nerEntitiesF; + private ArrayList nerEntities1; + private ArrayList nerEntitiesTypeF; + private ArrayList nerEntitiesType1; + private ArrayList stopWordTokenF; + private ArrayList stopWordToken1; + private ArrayList stopWordLemmaF; + private ArrayList stopWordLemma1; + private Integer PairCounterF; + private Integer PairCounter1; + private Double score_res; + + public Double getScore(){ + return score_res; + } + + public String getSecondaryString(){ + return this.str1; + } + + public SentimentAnalyzerTestDynamicTesting(String str, String str1, List coreMaps1, List coreMaps2, + Annotation strPipeline1, Annotation strPipeline2, Annotation strPipeSentiment1, Annotation strPipeSentiment2, + CoreDocument pipelineCoreDcoument1, CoreDocument pipelineCoreDcoument2, + MaxentTagger tagger, GrammaticalStructureFactory gsf, + AbstractSequenceClassifier classifier, Integer tokenizeCounting, + Integer tokenizeCountingF, List> taggedWordListF, + List> taggedWordList1, ArrayList + retrieveTGWListF, ArrayList retrieveTGWList1, + List sentencesF, List sentences1, + List sentencesSentimentF, List sentencesSentiment1, + ArrayList treesF, ArrayList trees1, + ArrayList grammaticalStructuresF, + ArrayList grammaticalStructures1, + ArrayList typedDependenciesF, + ArrayList typedDependencies1, + ArrayList rnnCoreAnnotationsPredictedF, + ArrayList rnnCoreAnnotationsPredicted1, + ArrayList simpleMatricesF, + ArrayList simpleMatrices1, + ArrayList simpleMatricesNodevectorsF, + ArrayList simpleMatricesNodevectors1, + List listF, List list1, Integer longestF, Integer longest1, + Integer sentimentLongestF, Integer sentimentLongest1, + List> imwesF, List> imwes1, + Integer InflectedCounterNegativeF, + Integer InflectedCounterNegative1, Integer InflectedCounterPositiveF, + Integer InflectedCounterPositive1, ArrayList tokenEntryF, + ArrayList tokenEntry1, Integer MarkedContinuousCounterF, + Integer MarkedContinuousCounter1, Integer UnmarkedPatternCounterF, + Integer UnmarkedPatternCounter1, ArrayList strTokensIpartFormF, + ArrayList strTokensIpartForm1, ArrayList tokenFormsF, + ArrayList tokenForms1, ArrayList strTokenEntryGetPOSF, + ArrayList strTokenEntryGetPOS1, ArrayList intTokenEntyCountsF, + ArrayList intTokenEntyCounts1, ArrayList ITokenTagsF, + ArrayList ITokenTags1, ArrayList strTokenStemsF, + ArrayList strTokenStems1, Integer AnotatorcounterF, + Integer Anotatorcounter1, Integer TokensCounterF, + Integer TokensCounter1, ArrayList entityTokenTagsF, + ArrayList entityTokenTags1, ArrayList nerEntitiesF, + ArrayList nerEntities1, ArrayList nerEntitiesTypeF, + ArrayList nerEntitiesType1, ArrayList stopWordTokenF, + ArrayList stopWordToken1, ArrayList stopWordLemmaF, + ArrayList stopWordLemma1, Integer PairCounterF, + Integer PairCounter1, JSONObject data, boolean testingFunction) { + this.str = str; + this.str1 = str1; + this.tagger = tagger; + this.gsf = gsf; + this.classifier = classifier; + this.coreMaps1 = coreMaps1; + this.coreMaps2 = coreMaps2; + this.pipelineAnnotation1 = strPipeline1; + this.pipelineAnnotation2 = strPipeline2; + this.pipelineAnnotation1Sentiment = strPipeSentiment1; + this.pipelineAnnotation2Sentiment = strPipeSentiment2; + this.pipelineCoreDcoument1 = pipelineCoreDcoument1; + this.pipelineCoreDcoument2 = pipelineCoreDcoument2; + this.tokenizeCounting = tokenizeCounting; + this.tokenizeCountingF = tokenizeCountingF; + this.taggedWordListF = taggedWordListF; + this.taggedWordList1 = taggedWordList1; + this.retrieveTGWListF = retrieveTGWListF; + this.retrieveTGWList1 = retrieveTGWList1; + this.sentencesF = sentencesF; + this.sentences1 = sentences1; + this.sentencesSentimentF = sentencesSentimentF; + this.sentencesSentiment1 = sentencesSentiment1; + this.treesF = treesF; + this.trees1 = trees1; + this.grammaticalStructuresF = grammaticalStructuresF; + this.grammaticalStructures1 = grammaticalStructures1; + this.typedDependenciesF = typedDependenciesF; + this.typedDependencies1 = typedDependencies1; + this.rnnCoreAnnotationsPredictedF = rnnCoreAnnotationsPredictedF; + this.rnnCoreAnnotationsPredicted1 = rnnCoreAnnotationsPredicted1; + this.simpleMatricesF = simpleMatricesF; + this.simpleMatrices1 = simpleMatrices1; + this.simpleMatricesNodevectorsF = simpleMatricesNodevectorsF; + this.simpleMatricesNodevectors1 = simpleMatricesNodevectors1; + this.listF = listF; + this.list1 = list1; + this.longestF = longestF; + this.longest1 = longest1; + this.sentimentLongestF = sentimentLongestF; + this.sentimentLongest1 = sentimentLongest1; + this.imwesF = imwesF; + this.imwes1 = imwes1; + this.InflectedCounterNegativeF = InflectedCounterNegativeF; + this.InflectedCounterNegative1 = InflectedCounterNegative1; + this.InflectedCounterPositiveF = InflectedCounterPositiveF; + this.InflectedCounterPositive1 = InflectedCounterPositive1; + this.tokenEntryF = tokenEntryF; + this.tokenEntry1 = tokenEntry1; + this.MarkedContinuousCounterF = MarkedContinuousCounterF; + this.MarkedContinuousCounter1 = MarkedContinuousCounter1; + this.UnmarkedPatternCounterF = UnmarkedPatternCounterF; + this.UnmarkedPatternCounter1 = UnmarkedPatternCounter1; + this.strTokensIpartFormF = strTokensIpartFormF; + this.strTokensIpartForm1 = strTokensIpartForm1; + this.tokenFormsF = tokenFormsF; + this.tokenForms1 = tokenForms1; + this.strTokenEntryGetPOSF = strTokenEntryGetPOSF; + this.strTokenEntryGetPOS1 = strTokenEntryGetPOS1; + this.intTokenEntyCountsF = intTokenEntyCountsF; + this.intTokenEntyCounts1 = intTokenEntyCounts1; + this.ITokenTagsF = ITokenTagsF; + this.ITokenTags1 = ITokenTags1; + this.strTokenStemsF = strTokenStemsF; + this.strTokenStems1 = strTokenStems1; + this.AnotatorcounterF = AnotatorcounterF; + this.Anotatorcounter1 = Anotatorcounter1; + this.TokensCounterF = TokensCounterF; + this.TokensCounter1 = TokensCounter1; + this.entityTokenTagsF = entityTokenTagsF; + this.entityTokenTags1 = entityTokenTags1; + this.nerEntitiesF = nerEntitiesF; + this.nerEntities1 = nerEntities1; + this.nerEntitiesTypeF = nerEntitiesTypeF; + this.nerEntitiesType1 = nerEntitiesType1; + this.stopWordTokenF = stopWordTokenF; + this.stopWordToken1 = stopWordToken1; + this.stopWordLemmaF = stopWordLemmaF; + this.stopWordLemma1 = stopWordLemma1; + this.PairCounterF = PairCounterF; + this.PairCounter1 = PairCounter1; + this.data = data; + if (testingFunction) { + // instead of calling callSMX() just validating the caches. + validateStringCaches(); + } + else { + this.score_res = callSMX(); + } + } + + private List> getTaggedWordList(String message) { + List> taggedwordlist = new ArrayList(); + DocumentPreprocessor tokenizer = new DocumentPreprocessor(new StringReader(message)); + TokenizerFactory ptbTokenizerFactory = PTBTokenizer.factory(new CoreLabelTokenFactory(), "untokenizable=noneDelete"); //noneDelete //firstDelete + tokenizer.setTokenizerFactory(ptbTokenizerFactory); + for (final List sentence : tokenizer) { + try { + taggedwordlist.add(tagger.tagSentence(sentence)); + } catch (Exception ex) { + System.out.println("crashed in tagger.tagsentence"); + } + } + return taggedwordlist; + } + + private int tokenizeCounting(List> taggedwordlist) { + int counter = 0; + for (List taggedList : taggedwordlist) { + counter += taggedList.size(); + } + return counter; + } + + private ArrayList retrieveTGWListIndex(List> taggedwordlist) { + ArrayList tgwlistIndex = new ArrayList(); + for (List tGWList : taggedwordlist) { + for (TaggedWord taggedWord : tGWList) { + for (String str : tgwlistIndex) { + if (!taggedWord.tag().equals(str) && !taggedWord.tag().equals(":")) { + tgwlistIndex.add(taggedWord.tag()); + tGWList.remove(taggedWord); + } + } + } + } + return tgwlistIndex; + } + + private Double iterateTrees(ArrayList sentenceConstituencyParseList2, ArrayList sentenceConstituencyParseList1, + Double score) { + long param1 = (long) data.get("iterateTrees_param1"); + long param2 = (long) data.get("iterateTrees_param2"); + long param3 = (long) data.get("iterateTrees_param3"); + long param4 = (long) data.get("iterateTrees_param4"); + long param5 = (long) data.get("iterateTrees_param5"); + long param6 = (long) data.get("iterateTrees_param6"); + long param7 = (long) data.get("iterateTrees_param7"); + long param8 = (long) data.get("iterateTrees_param8"); + long param9 = (long) data.get("iterateTrees_param9"); + long param10 = (long) data.get("iterateTrees_param10"); + Double param11 = (Double) data.get("iterateTreesDouble_param11"); + long param12 = (long) data.get("iterateTrees_param12"); + long param13 = (long) data.get("iterateTrees_param13"); + long param14 = (long) data.get("iterateTrees_param14"); + long param15 = (long) data.get("iterateTrees_param15"); + long param16 = (long) data.get("iterateTrees_param16"); + long param17 = (long) data.get("iterateTrees_param17"); + long param18 = (long) data.get("iterateTrees_param18"); + long param19 = (long) data.get("iterateTrees_param19"); + Double param20 = (Double) data.get("iterateTreesDouble_param20"); + long param21 = (long) data.get("iterateTrees_param21"); + long param22 = (long) data.get("iterateTrees_param22"); + long param23 = (long) data.get("iterateTrees_param23"); + long param24 = (long) data.get("iterateTrees_param24"); + long param25 = (long) data.get("iterateTrees_param25"); + long param26 = (long) data.get("iterateTrees_param26"); + long param27 = (long) data.get("iterateTrees_param27"); + long param28 = (long) data.get("iterateTrees_param28"); + long param29 = (long) data.get("iterateTrees_param29"); + long param30 = (long) data.get("iterateTrees_param30"); + long param31 = (long) data.get("iterateTrees_param31"); + + + double preConstituentsScore = score; + ArrayList constituentsMap = new ArrayList(); + int constituencySize = sentenceConstituencyParseList1.size() + sentenceConstituencyParseList2.size(); + for (final Tree sentenceConstituencyParse2 : sentenceConstituencyParseList2) { + int constiRelationsize = 0; + try { + if (sentenceConstituencyParse2 != null && !sentenceConstituencyParse2.isEmpty()) { + for (final Tree sentenceConstituencyParse1 : sentenceConstituencyParseList1) { + try { + if (sentenceConstituencyParse1 != null && !sentenceConstituencyParse1.isEmpty()) { + + Set constinuent1 = null; + Set constinuent2 = null; + try { + constinuent1 = Tdiff.markDiff(sentenceConstituencyParse1, sentenceConstituencyParse2); + constinuent2 = Tdiff.markDiff(sentenceConstituencyParse2, sentenceConstituencyParse1); + } catch (Exception e) { + continue; + } + ArrayList constiLabels = new ArrayList(); + for (final Constituent consti : constinuent1) { + for (final Constituent consti1 : constinuent2) { + if (consti.value().equals(consti1.value()) && !constiLabels.contains(consti.value())) { + constiLabels.add(consti.value()); + constiRelationsize++; + } + } + } + int constituents1 = constinuent1.size() - constiRelationsize; + int constituents2 = constinuent2.size() - constiRelationsize; + constituentsMap.add(constituents1); + constituentsMap.add(constituents2); + constituentsMap.add(constiRelationsize); + if (constituentsMap.size() < param1) { + if ((constituents1 * param2 < constituents2 || constituents2 * param2 < constituents1) && constituents1 > param3 && constituents2 > param3) { + score -= (constituents1 + constituents2) * param4; + //logger.info("score post score -= (constituents1 + constituents2) * 200;: " + score); + } else if ((constituents1 == param5 || constituents2 == param5) && (constituents1 >= constituents2 + param6 || constituents2 >= constituents1 + param6)) { + score -= constituents1 > constituents2 ? constituents1 * param7 : (long) constituents2 * param7; + //logger.info("score post score -= constituents1 > constituents2 ? constituents1 * 500 : constituents2 * 500;: " + score); + } else if (constiRelationsize >= constituents1 + constituents2 && (constituents1 > param8 && constituents2 > param8)) { + score += (constiRelationsize + constituents1 + constituents2) * param9; + //logger.info("score post score += (constiRelationsize + constituents1 + constituents2) * 350;: " + score); + } else if (constituents1 >= param10 && constituents2 >= param10 && constituents1 * param11 > constituents2 && constituents2 * param11 > constituents1) { + //logger.info("constituents1: " + constituents1); + //logger.info("constituents2: " + constituents2); + if (constituents2 == param12) { + score += param13; + //logger.info("score post score += 8745;: " + score); + } + if (constituents1 == constituents2 && constiRelationsize - constituents1 >= param14) { + score += (constiRelationsize + constituents1 + constituents2) * param15; + //logger.info("score post score += (constiRelationsize + constituents1 + constituents2) * 1550;: " + score); + } else if (constituents1 == param16 && constituents2 == param16) { + score -= param17; + //logger.info("score post score -= 3949;: " + score); + } else if (constiRelationsize >= constituents1 && constituents1 == constituents2) { + score -= (constiRelationsize + constituents1 + constituents2) * param18; + //logger.info("score post score -= (constiRelationsize + constituents1 + constituents2) * 550;: " + score); + } else if (constiRelationsize < constituents1 && constiRelationsize < constituents2) { + score += param19; + //logger.info("score post score += 800;: " + score); + } else if ((constiRelationsize == constituents1 || constiRelationsize == constituents2) && constituents1 * param20 > constituents2 + && constituents2 * param20 > constituents1) { + score += (constiRelationsize + constituents1 + constituents2) * param21; + //logger.info("score post score += (constiRelationsize + constituents1 + constituents2) * 350;: " + score); + } + } else if (constiRelationsize > constituents1 + constituents2) { + //logger.info("constiRelationsize: " + constiRelationsize); + //logger.info("constituents1: " + constituents1); + //logger.info("constituents2: " + constituents2); + score -= param22; + //logger.info("score score score -= 2826; " + score); + } else if (constiRelationsize * param23 < constituents1 || constiRelationsize * param23 < constituents2) { + score -= (constituents1 + constituents2) * param24; + //logger.info("score post score -= (constituents1 + constituents2) * 1923;: " + score); + } + } else { + //score = preConstituentsScore; + //logger.info("score post score = preConstituentsScore;: " + score); + int n1 = constituentsMap.get(0); + int n2 = constituentsMap.get(1); + int n3 = constituentsMap.get(2); + int cap = 0; + if (n1 > n2 && n1 > n3) { + cap = n1; + } else if (n2 > n3 && n2 > n1) { + cap = n2; + } else { + cap = n3; + } + int overheat = 0; + for (int iterator = 3; iterator < constituentsMap.size(); iterator++) { + Integer getConstituent = constituentsMap.get(iterator); + if (getConstituent > cap) { + overheat++; + } + } + //logger.info("cap: " + cap); + //logger.info("overheat: " + overheat); + if (overheat == param25) { + score -= param26; + //logger.info("score post score -= 12985;: " + score); + } + if (overheat >= param27) { + score -= overheat * param28; + //logger.info("score post score -= overheat * 2803;: " + score); + } else { + score -= param29; + //logger.info("score post score -= 553; " + score); + } + } + } + } catch (NoSuchElementException e) { + } + } + if (constituencySize > param30) { + score -= constituencySize * param31; + //logger.info("score post score -= constituencySize * 400;: " + score); + } + } + + } catch (NoSuchElementException e) { + + } + } + return score; + } + + private Double typeDependenciesGrammaticalRelation + (Collection allTypedDependencies1, Collection allTypedDependencies2, + Double score, ArrayList grammaticalMap1, + ArrayList grammaticalMap2, + ArrayList sentenceConstituencyParseList1, ArrayList sentenceConstituencyParseList2) { + long param1 = (long) data.get("typeDependenciesGrammaticalRelation_param1"); + long param2 = (long) data.get("typeDependenciesGrammaticalRelation_param2"); + long param3 = (long) data.get("typeDependenciesGrammaticalRelation_param3"); + long param4 = (long) data.get("typeDependenciesGrammaticalRelation_param4"); + long param5 = (long) data.get("typeDependenciesGrammaticalRelation_param5"); + long param6 = (long) data.get("typeDependenciesGrammaticalRelation_param6"); + long param7 = (long) data.get("typeDependenciesGrammaticalRelation_param7"); + long param8 = (long) data.get("typeDependenciesGrammaticalRelation_param8"); + long param9 = (long) data.get("typeDependenciesGrammaticalRelation_param9"); + long param10 = (long) data.get("typeDependenciesGrammaticalRelation_param10"); + long param11 = (long) data.get("typeDependenciesGrammaticalRelation_param11"); + long param12 = (long) data.get("typeDependenciesGrammaticalRelation_param12"); + Double param13 = (Double) data.get("typeDependenciesGrammaticalRelationDouble_param13"); + long param14 = (long) data.get("typeDependenciesGrammaticalRelation_param14"); + long param15 = (long) data.get("typeDependenciesGrammaticalRelation_param15"); + long param16 = (long) data.get("typeDependenciesGrammaticalRelation_param16"); + long param17 = (long) data.get("typeDependenciesGrammaticalRelation_param17"); + long param18 = (long) data.get("typeDependenciesGrammaticalRelation_param18"); + long param19 = (long) data.get("typeDependenciesGrammaticalRelation_param19"); + long param20 = (long) data.get("typeDependenciesGrammaticalRelation_param20"); + long param21 = (long) data.get("typeDependenciesGrammaticalRelation_param21"); + long param22 = (long) data.get("typeDependenciesGrammaticalRelation_param22"); + long param23 = (long) data.get("typeDependenciesGrammaticalRelation_param23"); + long param24 = (long) data.get("typeDependenciesGrammaticalRelation_param24"); + long param25 = (long) data.get("typeDependenciesGrammaticalRelation_param25"); + long param26 = (long) data.get("typeDependenciesGrammaticalRelation_param26"); + long param27 = (long) data.get("typeDependenciesGrammaticalRelation_param27"); + long param28 = (long) data.get("typeDependenciesGrammaticalRelation_param28"); + long param29 = (long) data.get("typeDependenciesGrammaticalRelation_param29"); + long param30 = (long) data.get("typeDependenciesGrammaticalRelation_param30"); + long param31 = (long) data.get("typeDependenciesGrammaticalRelation_param31"); + long param32 = (long) data.get("typeDependenciesGrammaticalRelation_param32"); + long param33 = (long) data.get("typeDependenciesGrammaticalRelation_param33"); + long param34 = (long) data.get("typeDependenciesGrammaticalRelation_param34"); + long param35 = (long) data.get("typeDependenciesGrammaticalRelation_param35"); + long param36 = (long) data.get("typeDependenciesGrammaticalRelation_param36"); + long param37 = (long) data.get("typeDependenciesGrammaticalRelation_param37"); + long param38 = (long) data.get("typeDependenciesGrammaticalRelation_param38"); + long param39 = (long) data.get("typeDependenciesGrammaticalRelation_param39"); + long param40 = (long) data.get("typeDependenciesGrammaticalRelation_param40"); + long param41 = (long) data.get("typeDependenciesGrammaticalRelation_param41"); + long param42 = (long) data.get("typeDependenciesGrammaticalRelation_param42"); + long param43 = (long) data.get("typeDependenciesGrammaticalRelation_param43"); + long param44 = (long) data.get("typeDependenciesGrammaticalRelation_param44"); + long param45 = (long) data.get("typeDependenciesGrammaticalRelation_param45"); + long param46 = (long) data.get("typeDependenciesGrammaticalRelation_param46"); + long param47 = (long) data.get("typeDependenciesGrammaticalRelation_param47"); + long param48 = (long) data.get("typeDependenciesGrammaticalRelation_param48"); + long param49 = (long) data.get("typeDependenciesGrammaticalRelation_param49"); + long param50 = (long) data.get("typeDependenciesGrammaticalRelation_param50"); + long param51 = (long) data.get("typeDependenciesGrammaticalRelation_param51"); + long param52 = (long) data.get("typeDependenciesGrammaticalRelation_param52"); + long param53 = (long) data.get("typeDependenciesGrammaticalRelation_param53"); + long param54 = (long) data.get("typeDependenciesGrammaticalRelation_param54"); + long param55 = (long) data.get("typeDependenciesGrammaticalRelation_param55"); + long param56 = (long) data.get("typeDependenciesGrammaticalRelation_param56"); + long param57 = (long) data.get("typeDependenciesGrammaticalRelation_param57"); + long param58 = (long) data.get("typeDependenciesGrammaticalRelation_param58"); + long param59 = (long) data.get("typeDependenciesGrammaticalRelation_param59"); + + ArrayList alltypeDepsSize1 = new ArrayList(); + ArrayList summationList = new ArrayList(); + int relationApplicable1 = 0; + int relationApplicable2 = 0; + int grammaticalRelation1 = 0; + int grammaticalRelation2 = 0; + List treeCollectionGramatical = new ArrayList(); + List treeCollectionReln = new ArrayList(); + for (TypedDependency TDY1 : allTypedDependencies1) { + IndexedWord dep = TDY1.dep(); + IndexedWord gov = TDY1.gov(); + for (GrammaticalStructure gs : grammaticalMap1) { + GrammaticalRelation grammaticalRelation = gs.getGrammaticalRelation(gov, dep); + for (Tree sentenceConstituencyParse2 : sentenceConstituencyParseList2) { + try { + if (sentenceConstituencyParse2 != null && !sentenceConstituencyParse2.isEmpty()) { + if (grammaticalRelation.isApplicable(sentenceConstituencyParse2) && !treeCollectionGramatical.contains(sentenceConstituencyParse2)) { + score += param1; + //logger.info("score post score += 701; :" + score); + grammaticalRelation1++; + treeCollectionGramatical.add(sentenceConstituencyParse2); + } + GrammaticalRelation reln = TDY1.reln(); + if (reln.isApplicable(sentenceConstituencyParse2) && !treeCollectionReln.contains(sentenceConstituencyParse2)) { + score += param2; + //logger.info("score post score += 528; :" + score); + relationApplicable1++; + treeCollectionReln.add(sentenceConstituencyParse2); + } + } + } catch (NoSuchElementException e) { + + } + } + } + } + treeCollectionGramatical = new ArrayList(); + treeCollectionReln = new ArrayList(); + for (TypedDependency TDY : allTypedDependencies2) { + IndexedWord dep = TDY.dep(); + IndexedWord gov = TDY.gov(); + for (GrammaticalStructure gs : grammaticalMap2) { + GrammaticalRelation grammaticalRelation = gs.getGrammaticalRelation(gov, dep); + for (Tree sentenceConstituencyParse1 : sentenceConstituencyParseList1) { + try { + if (sentenceConstituencyParse1 != null && !sentenceConstituencyParse1.isEmpty()) { + if (grammaticalRelation.isApplicable(sentenceConstituencyParse1) && !treeCollectionGramatical.contains(sentenceConstituencyParse1)) { + score += param3; + //logger.info("score post score += 700; :" + score); + grammaticalRelation2++; + treeCollectionGramatical.add(sentenceConstituencyParse1); + } + GrammaticalRelation reln = TDY.reln(); + //sentenceConstituencyParse1 + if (reln.isApplicable(sentenceConstituencyParse1) && !treeCollectionReln.contains(sentenceConstituencyParse1)) { + score += param4; + //logger.info("score post score += 527; :" + score); + relationApplicable2++; + treeCollectionReln.add(sentenceConstituencyParse1); + } + } + } catch (NoSuchElementException r) { + + } + } + } + } + if ((grammaticalRelation1 == param5 && grammaticalRelation2 > param6) || (grammaticalRelation2 == param5 && grammaticalRelation1 > param6)) { + score -= param7; + //logger.info("score post score -= 3450; :" + score); + } + if (!allTypedDependencies1.isEmpty() || !allTypedDependencies2.isEmpty()) { + int allTypeDep1 = allTypedDependencies1.size(); + int allTypeDep2 = allTypedDependencies2.size(); + if (allTypeDep1 <= allTypeDep2 * param8 && allTypeDep2 <= allTypeDep1 * param8) { + if (allTypeDep1 > param9 && allTypeDep2 > param9 && (allTypeDep1 >= param10 || allTypeDep2 >= param10)) { + if ((allTypeDep1 + param11 == allTypeDep2 || allTypeDep2 + param11 == allTypeDep1)) { + score -= allTypeDep1 > allTypeDep2 ? (allTypeDep1 - allTypeDep2) * param12 : (allTypeDep2 - allTypeDep1) * param12; + //logger.info("score post score -= allTypeDep1 > allTypeDep2 ? (allTypeDep1 - allTypeDep2) * 160 : (allTypeDep2 - allTypeDep1) * 160; :" + score); + } else if (allTypeDep1 * param13 >= allTypeDep2 && allTypeDep2 * param13 >= allTypeDep1) { + score += allTypeDep1 > allTypeDep2 ? (allTypeDep1 - allTypeDep2) * param14 : (allTypeDep2 - allTypeDep1) * param14; + //logger.info("score post score += allTypeDep1 > allTypeDep2 ? (allTypeDep1 - allTypeDep2) * 260 : (allTypeDep2 - allTypeDep1) * 260; :" + score); + } else if (allTypeDep1 >= param15 && allTypeDep1 <= param16 && allTypeDep2 >= param15 && allTypeDep2 <= param16) { + score += allTypeDep1 > allTypeDep2 ? (allTypeDep1 - allTypeDep2) * param17 : (allTypeDep2 - allTypeDep1) * param17; + //logger.info("score post score += allTypeDep1 > allTypeDep2 ? (allTypeDep1 - allTypeDep2) * 260 : (allTypeDep2 - allTypeDep1) * 260; :" + score); + } else { + score -= allTypeDep1 > allTypeDep2 ? (allTypeDep1 - allTypeDep2) * param18 : (allTypeDep2 - allTypeDep1) * param18; + //logger.info("score post score -= allTypeDep1 > allTypeDep2 ? (allTypeDep1 - allTypeDep2) * 600 : (allTypeDep2 - allTypeDep1) * 600; :" + score); + } + alltypeDepsSize1.add(allTypeDep1); + alltypeDepsSize1.add(allTypeDep2); + } + } + if (allTypeDep1 >= param19 && allTypeDep2 >= param19) { + int largerTypeDep = allTypeDep1 > allTypeDep2 ? allTypeDep1 : allTypeDep2; + int smallerTypeDep = allTypeDep1 < allTypeDep2 ? allTypeDep1 : allTypeDep2; + int summation = (largerTypeDep * largerTypeDep) - (smallerTypeDep * smallerTypeDep); + //logger.info("largerTypeDep: " + largerTypeDep); + //logger.info("smallerTypeDep: " + smallerTypeDep); + //logger.info("summation: " + summation); + if (largerTypeDep > param20 && largerTypeDep < param21 && summation < param22) { + score += param23; + //logger.info("score post score += 23435; " + score); + } + if (summation >= param24 && summation <= param25) { + score -= param26; + //logger.info("score post score -= 10522" + score); + } + if (summation >= param27 && summation < param28) { + score -= param29; + //logger.info("score post score -= 4021;" + score); + } + if (largerTypeDep == param30 && smallerTypeDep == param30) { + score += param31; + //logger.info("score post score += 9340;: " + score); + } + if (summation / largerTypeDep < param32 && summation / largerTypeDep > param33 && smallerTypeDep * param34 > largerTypeDep + && !summationList.contains(summation)) { + score += summation * param35; + //logger.info("score post score += summation * 80; :" + score); + summationList.add(summation); + } else if (largerTypeDep == smallerTypeDep) { + score += param36; + //logger.info("score post score += 2502; :" + score); + } + } + if (relationApplicable1 > param37 && relationApplicable2 > param37 && relationApplicable1 != relationApplicable2) { + score -= param38; + //logger.info("score post score -= 4101; :" + score); + } else if (allTypeDep1 * param39 < allTypeDep2 || allTypeDep2 * param39 < allTypeDep1) { + score -= allTypeDep1 > allTypeDep2 ? (allTypeDep1 - allTypeDep2) * (allTypeDep2 * param40) + : (allTypeDep2 - allTypeDep1) * (allTypeDep1 * param41); + //logger.info("score post score -= allTypeDep1 > allTypeDep2 ? (allTypeDep1 - allTypeDep2) * (allTypeDep2 * 450)\n" + + // " : (allTypeDep2 - allTypeDep1) * (allTypeDep1 * 450); :" + score); + } + if (relationApplicable1 > param42 && relationApplicable2 > param42 && relationApplicable1 * param43 > relationApplicable2 + && relationApplicable2 * param43 > relationApplicable1) { + score += relationApplicable1 > relationApplicable2 ? (relationApplicable1 - relationApplicable2) * param44 + : (relationApplicable2 - relationApplicable1) * param44; + //logger.info("score post score += relationApplicable1 > relationApplicable2 ? (relationApplicable1 - relationApplicable2) * 1500\n" + + // " : (relationApplicable2 - relationApplicable1) * 1500; :" + score); + } else if (relationApplicable1 * param45 < relationApplicable2 || relationApplicable2 * param45 < relationApplicable1) { + score -= relationApplicable1 > relationApplicable2 ? (relationApplicable1 - relationApplicable2) * param46 + : (relationApplicable2 - relationApplicable1) * param46; + //logger.info("score post score -= relationApplicable1 > relationApplicable2 ? (relationApplicable1 - relationApplicable2) * 500\n" + + // " : (relationApplicable2 - relationApplicable1) * 500; :" + score); + + //logger.info("relationApplicable1: " + relationApplicable1); + //logger.info("relationApplicable2: " + relationApplicable2); + + //logger.info("grammaticalRelation1: " + grammaticalRelation1); + //logger.info("grammaticalRelation2: " + grammaticalRelation2); + if (grammaticalRelation1 == param47) { + score -= param48; + //logger.info("score post score -= 3431;: " + score); + } + } + if (grammaticalRelation1 > param49 && grammaticalRelation2 > param49 && grammaticalRelation1 * param50 > grammaticalRelation2 + && grammaticalRelation2 * param50 > grammaticalRelation1) { + score += grammaticalRelation1 > grammaticalRelation2 ? (grammaticalRelation1 - grammaticalRelation2) * param51 + : (grammaticalRelation2 - grammaticalRelation1) * param51; + //logger.info("score post score += grammaticalRelation1 > grammaticalRelation2 ? (grammaticalRelation1 - grammaticalRelation2) * 4500\n" + + //" : (grammaticalRelation2 - grammaticalRelation1) * 4500; :" + score); + } else if (grammaticalRelation1 * param52 < grammaticalRelation2 || grammaticalRelation2 * param52 < grammaticalRelation1) { + score -= grammaticalRelation1 > grammaticalRelation2 ? (grammaticalRelation1 - grammaticalRelation2) * param53 + : (grammaticalRelation2 - grammaticalRelation1) * param54; + //logger.info("score post score -= grammaticalRelation1 > grammaticalRelation2 ? (grammaticalRelation1 - grammaticalRelation2) * 500\n" + + //" : (grammaticalRelation2 - grammaticalRelation1) * 500; :" + score); + } + } + ArrayList filerTreeContent = new ArrayList(); + int runCount1 = 0; + for (Tree sentenceConstituencyParse1 : sentenceConstituencyParseList1) { + try { + if (sentenceConstituencyParse1 != null && !sentenceConstituencyParse1.isEmpty()) { + for (Tree sentenceConstituencyParse2 : sentenceConstituencyParseList2) { + try { + if (sentenceConstituencyParse2 != null && !sentenceConstituencyParse2.isEmpty()) { + for (CoreLabel LBW : sentenceConstituencyParse1.taggedLabeledYield()) { + for (CoreLabel LBW1 : sentenceConstituencyParse2.taggedLabeledYield()) { + if (LBW.lemma() != null && LBW1.lemma() != null && LBW.lemma().equals(LBW1.lemma())) { + boolean found = false; + for (String str : filerTreeContent) { + if (str.equals(LBW.lemma())) { + found = true; + break; + } + } + if (!found && LBW.lemma() != null) { + filerTreeContent.add(LBW.lemma()); + runCount1++; + } + } + } + } + } + } catch (NoSuchElementException e) { + + } + } + } + } catch (NoSuchElementException e) { + + } + } + score += runCount1 * param55; + //logger.info("score post score += runCount1 * 250; :" + score); + int typeSizeSmallest = 100; + int typeSizeLargest = 0; + for (Integer i : alltypeDepsSize1) { + if (i > typeSizeLargest) { + typeSizeLargest = i; + } + if (i < typeSizeSmallest) { + typeSizeSmallest = i; + } + } + if (typeSizeLargest >= typeSizeSmallest * param56) { + score -= typeSizeLargest * param57; + //logger.info("score post score -= typeSizeLargest * 160; :" + score); + } + typeSizeLargest = 0; + typeSizeSmallest = 100; + for (int i : summationList) { + if (i > typeSizeLargest) { + typeSizeLargest = i; + } + if (i < typeSizeSmallest) { + typeSizeSmallest = i; + } + } + if (typeSizeLargest >= typeSizeSmallest * param58) { + score -= typeSizeLargest * param59; + //logger.info("score post score -= typeSizeLargest * 160; :" + score); + } + return score; + } + + private Double simpleRNNMatrixCalculations(Double score, ArrayList simpleSMXlist1, + ArrayList simpleSMXlist2) { + long param1 = (long) data.get("simpleRNNMatrixCalculations_param1"); + long param2 = (long) data.get("simpleRNNMatrixCalculations_param2"); + long param3 = (long) data.get("simpleRNNMatrixCalculations_param3"); + long param4 = (long) data.get("simpleRNNMatrixCalculations_param4"); + long param5 = (long) data.get("simpleRNNMatrixCalculations_param5"); + long param6 = (long) data.get("simpleRNNMatrixCalculations_param6"); + long param7 = (long) data.get("simpleRNNMatrixCalculations_param7"); + long param8 = (long) data.get("simpleRNNMatrixCalculations_param8"); + long param9 = (long) data.get("simpleRNNMatrixCalculations_param9"); + long param10 = (long) data.get("simpleRNNMatrixCalculations_param10"); + long param11 = (long) data.get("simpleRNNMatrixCalculations_param11"); + long param12 = (long) data.get("simpleRNNMatrixCalculations_param12"); + long param13 = (long) data.get("simpleRNNMatrixCalculations_param13"); + long param14 = (long) data.get("simpleRNNMatrixCalculations_param14"); + long param15 = (long) data.get("simpleRNNMatrixCalculations_param15"); + long param16 = (long) data.get("simpleRNNMatrixCalculations_param16"); + long param17 = (long) data.get("simpleRNNMatrixCalculations_param17"); + Double param18 = (Double) data.get("simpleRNNMatrixCalculationsDouble_param18"); + Double param19 = (Double) data.get("simpleRNNMatrixCalculationsDouble_param19"); + Double param20 = (Double) data.get("simpleRNNMatrixCalculationsDouble_param20"); + Double param21 = (Double) data.get("simpleRNNMatrixCalculationsDouble_param21"); + Double param22 = (Double) data.get("simpleRNNMatrixCalculationsDouble_param22"); + Double param23 = (Double) data.get("simpleRNNMatrixCalculationsDouble_param23"); + long param24 = (long) data.get("simpleRNNMatrixCalculations_param24"); + long param25 = (long) data.get("simpleRNNMatrixCalculations_param25"); + long param26 = (long) data.get("simpleRNNMatrixCalculations_param26"); + long param27 = (long) data.get("simpleRNNMatrixCalculations_param27"); + Double param28 = (Double) data.get("simpleRNNMatrixCalculationsDouble_param28"); + long param29 = (long) data.get("simpleRNNMatrixCalculations_param29"); + Double param30 = (Double) data.get("simpleRNNMatrixCalculationsDouble_param30"); + Double param31 = (Double) data.get("simpleRNNMatrixCalculationsDouble_param31"); + Double param32 = (Double) data.get("simpleRNNMatrixCalculationsDouble_param32"); + Double param33 = (Double) data.get("simpleRNNMatrixCalculationsDouble_param33"); + Double param34 = (Double) data.get("simpleRNNMatrixCalculationsDouble_param34"); + Double param35 = (Double) data.get("simpleRNNMatrixCalculationsDouble_param35"); + Double param36 = (Double) data.get("simpleRNNMatrixCalculationsDouble_param36"); + long param37 = (long) data.get("simpleRNNMatrixCalculations_param37"); + long param38 = (long) data.get("simpleRNNMatrixCalculations_param38"); + long param39 = (long) data.get("simpleRNNMatrixCalculations_param39"); + long param40 = (long) data.get("simpleRNNMatrixCalculations_param40"); + long param41 = (long) data.get("simpleRNNMatrixCalculations_param41"); + long param42 = (long) data.get("simpleRNNMatrixCalculations_param42"); + long param43 = (long) data.get("simpleRNNMatrixCalculations_param43"); + long param44 = (long) data.get("simpleRNNMatrixCalculations_param44"); + long param45 = (long) data.get("simpleRNNMatrixCalculations_param45"); + long param46 = (long) data.get("simpleRNNMatrixCalculations_param46"); + Double param47 = (Double) data.get("simpleRNNMatrixCalculationsDouble_param47"); + long param48 = (long) data.get("simpleRNNMatrixCalculations_param48"); + Double param49 = (Double) data.get("simpleRNNMatrixCalculationsDouble_param49"); + long param50 = (long) data.get("simpleRNNMatrixCalculations_param50"); + long param51 = (long) data.get("simpleRNNMatrixCalculations_param51"); + Double param52 = (Double) data.get("simpleRNNMatrixCalculationsDouble_param52"); + long param53 = (long) data.get("simpleRNNMatrixCalculations_param53"); + long param54 = (long) data.get("simpleRNNMatrixCalculations_param54"); + Double param55 = (Double) data.get("simpleRNNMatrixCalculationsDouble_param55"); + Double param56 = (Double) data.get("simpleRNNMatrixCalculationsDouble_param56"); + long param57 = (long) data.get("simpleRNNMatrixCalculations_param57"); + long param58 = (long) data.get("simpleRNNMatrixCalculations_param58"); + long param59 = (long) data.get("simpleRNNMatrixCalculations_param59"); + long param60 = (long) data.get("simpleRNNMatrixCalculations_param60"); + long param61 = (long) data.get("simpleRNNMatrixCalculations_param61"); + long param62 = (long) data.get("simpleRNNMatrixCalculations_param62"); + long param63 = (long) data.get("simpleRNNMatrixCalculations_param63"); + long param64 = (long) data.get("simpleRNNMatrixCalculations_param64"); + long param65 = (long) data.get("simpleRNNMatrixCalculations_param65"); + long param66 = (long) data.get("simpleRNNMatrixCalculations_param66"); + long param67 = (long) data.get("simpleRNNMatrixCalculations_param67"); + long param68 = (long) data.get("simpleRNNMatrixCalculations_param68"); + long param69 = (long) data.get("simpleRNNMatrixCalculations_param69"); + long param70 = (long) data.get("simpleRNNMatrixCalculations_param70"); + long param71 = (long) data.get("simpleRNNMatrixCalculations_param71"); + long param72 = (long) data.get("simpleRNNMatrixCalculations_param72"); + long param73 = (long) data.get("simpleRNNMatrixCalculations_param73"); + long param74 = (long) data.get("simpleRNNMatrixCalculations_param74"); + + List iteratedDoubleList = new ArrayList(); + List iterateddotPredictions = new ArrayList(); + double dotpredictionTransfer = 0.0; + int iterationOverHeat = 0; + double scoreFallback = score; + for (SimpleMatrix simpleSMX2 : simpleSMXlist2) { + ArrayList AccumulateDotList = new ArrayList<>(); + ArrayList subtractorList = new ArrayList(); + ArrayList dotPredictions = new ArrayList(); + ArrayList DotOverTransfer = new ArrayList(); + Double totalSubtraction = 0.0; + Double largest = 10.0; + Double shortest = 100.0; + for (SimpleMatrix simpleSMX1 : simpleSMXlist1) { + double dotPrediction2 = simpleSMX2.dot(simpleSMX1) * param1; + double dotPrediction1 = simpleSMX1.dot(simpleSMX2) * param1; + AccumulateDotList.add(dotPrediction1); + AccumulateDotList.add(dotPrediction2); + double subtracter1 = dotPrediction1 > param2 ? dotPrediction1 - param3 : dotPrediction1 > param4 ? param3 - dotPrediction1 : param4; + double subtracter2 = dotPrediction2 > param2 ? dotPrediction2 - param3 : dotPrediction2 > param4 ? param3 - dotPrediction2 : param4; + subtractorList.add(subtracter1); + subtractorList.add(subtracter2); + dotpredictionTransfer = dotPrediction1; + if (!dotPredictions.contains(dotPrediction1)) { + for (Double transferDots : DotOverTransfer) { + if (transferDots == dotPrediction1) { + totalSubtraction += transferDots; + } else { + score -= subtracter1 * param5; + //logger.info("score score -= subtracter1 * 25; : " + score); + } + } + DotOverTransfer.add(dotPrediction1); + } else { + subtracter1 -= param6; + subtracter1 *= param7; + score += subtracter1 * dotPrediction1; + //logger.info("score score += subtracter1 * dotPrediction1; : " + score); + } + dotPredictions.add(dotPrediction1); + if (!dotPredictions.contains(dotPrediction2)) { + for (Double transferDots : DotOverTransfer) { + if (transferDots == dotPrediction2) { + totalSubtraction += transferDots; + } else { + score -= subtracter1 * param8; + //logger.info("score score -= subtracter1 * 25; : " + score); + } + } + DotOverTransfer.add(dotPrediction2); + if (dotPrediction2 > largest) { + largest = dotPrediction2; + } + if (dotPrediction2 < shortest) { + shortest = dotPrediction2; + } + Double dotPredictionIntervalDifference = largest - shortest; + subtracter2 *= param9; + if (dotPredictionIntervalDifference < param10) { + if (dotPredictions.size() > param11) { + if (subtracter2 > param12) { + score -= subtracter2; + //logger.info("score score -= subtracter2; : " + score); + } else { + score += subtracter2; + //logger.info("score score += subtracter2; : " + score); + } + } + } else { + score -= subtracter2 / param13; + //logger.info("score score -= subtracter2 / 10; : " + score); + } + } else { + if (subtracter2 > param14 && subtracter2 < param15) { + if (dotPrediction2 > param16 && dotPrediction2 < param17) { + if (dotpredictionTransfer != param18 && (subtracter2 / dotPrediction2 < param19 || (subtracter2 / dotPrediction2 > param20 + && subtracter2 / dotPrediction2 < param21))) { + //logger.info("subtracter2: " + subtracter2); + //logger.info("dotpredictionTransfer: " + dotpredictionTransfer); + //logger.info("dotPrediction2: " + dotPrediction2); + //logger.info("dotPrediction1: " + dotPrediction1); + if (subtracter2 / dotPrediction2 < param22 && subtracter2 / dotPrediction2 > param23) { + score -= param24; + //logger.info("score score -= 2502; : " + score); + } else if (dotPrediction2 < param25 && dotPrediction2 > param26) { + score += param27; + //logger.info("score post score += 7948; " + score); + } else if (subtracter2 > param28) { + score -= param29; + //logger.info("score score -= 7530; : " + score); + } + } else if (dotpredictionTransfer != param30 && subtracter2 / dotPrediction2 > param31 && subtracter2 / dotPrediction2 < param32) { + if (subtracter2 > param33 && subtracter2 < param34 && dotPrediction2 > param35 && dotPrediction2 < param36) { + score += param37; + //logger.info("score score += 4500; : " + score); + } else { + score -= param38; + //logger.info("score score -= 4500; : " + score); + } + } else if (!iterateddotPredictions.contains(dotPrediction2)) { + score += subtracter2 * dotPrediction2; // += + //logger.info("score score += subtracter2 * dotPrediction2; : " + score); + iterateddotPredictions.add(dotPrediction2); + } else { + score -= param39; + //logger.info("score score -= 550; : " + score); + } + } else if (dotPrediction2 < param40 && subtracter2 < param41) { + score -= dotPrediction2 * param42; + //logger.info("score score -= dotPrediction2 * 250; : " + score); + } else if (subtracter2 > param43) { + if (dotPrediction2 > param44 && dotPrediction2 < param45) { + score += param46; + //logger.info("score score += 3500; : " + score); + } else { + if (subtracter2 < param47 && dotPrediction2 > param48 && dotPrediction2 < param49) { + score -= (subtracter2 * dotPrediction2) * param50; + //logger.info("score score -= (subtracter2 * dotPrediction2) * 85; : " + score); + } else { + score -= param51; + //logger.info("score score -= 4500; : " + score); + } + } + } + } + } + dotPredictions.add(dotPrediction2); + iterationOverHeat++; + } + Double subTracPre = 0.0; + for (Double subtractors : subtractorList) { + if (Objects.equals(subTracPre, subtractors)) { + if (subTracPre > param52 && subTracPre < param53) { + score += (subTracPre * param54) / subtractorList.size(); + //logger.info("score score += (subTracPre * 55) / subtractorList.size(); : " + score); + } else if (subTracPre > param55 && subTracPre < param56) { + score += (subTracPre * param57) / subtractorList.size(); + //logger.info("score score += (subTracPre * 55) / subtractorList.size(); : " + score); + } + } else if (subTracPre > param58 && subTracPre < param59) { + score += (subTracPre * param60) / subtractorList.size(); + //logger.info("score score += (subTracPre * 50) / subtractorList.size(); : " + score); + } else if (subTracPre >= param61) { + score -= param62; + //logger.info("score score -= 2800; : " + score); + } else if (subTracPre < -param63 && subTracPre > -param64) { + score += subTracPre * param65; + //logger.info("score score += subTracPre * 100; : " + score); + } + subTracPre = subtractors; + } + //logger.info("totalSubtraction: " + totalSubtraction); + if (totalSubtraction > param66) { + score -= totalSubtraction * param67; + //logger.info("score score -= totalSubtraction * 25; : " + score); + } else { + score += totalSubtraction * param68; + //logger.info("score score += totalSubtraction * 25; : " + score); + } + Double preAccumulatorDot = 0.0; + Double postAccumulatorDot = 0.0; + for (Double accumulators : AccumulateDotList) { + if (Objects.equals(preAccumulatorDot, accumulators)) { + if (Objects.equals(postAccumulatorDot, accumulators)) { + score -= param69; + //logger.info("score score -= 1400; : " + score); + } + postAccumulatorDot = accumulators; + } + preAccumulatorDot = accumulators; + } + subTracPre = 0.0; + for (Double subtractors : subtractorList) { + if (Objects.equals(subTracPre, subtractors) && subTracPre != param70) { + if (!iteratedDoubleList.contains(subTracPre)) { + score += param71; + //logger.info("score score += 500; : " + score); + iteratedDoubleList.add(subTracPre); + } else { + score -= param72; + //logger.info("score score -= 150; : " + score); + } + } + subTracPre = subtractors; + } + } + if (iterationOverHeat > param73) { + score = scoreFallback; + score -= param74; + //logger.info("score score -= 2501; : " + score); + } + return score; + } + + public Double simpleRNNMaxtrixVectors(Double score, ArrayList simpleSMXlistVector1, + ArrayList simpleSMXlistVector2) { + boolean foundValidValues = false; + for (SimpleMatrix simpleSMX2 : simpleSMXlistVector2) { + for (int i = 0; i < simpleSMX2.getNumElements(); i++){ + if (simpleSMX2.get(i) != -1.0 && simpleSMX2.get(i) != 1.0) { + foundValidValues = true; + break; + } + } + } + for (SimpleMatrix simpleSMX1 : simpleSMXlistVector1) { + for (int i = 0; i < simpleSMX1.getNumElements(); i++){ + if (simpleSMX1.get(i) != -1.0 && simpleSMX1.get(i) != 1.0) { + foundValidValues = true; + break; + } + } + } + //nothing to do if no meaningful values found. + if (!foundValidValues){ + return 0.0; + } + + //kill me + int param1 = (int) data.get("simpleRNNMaxtrixVectors_param1"); + int param2 = (int) data.get("simpleRNNMaxtrixVectors_param2"); + int param3 = (int) data.get("simpleRNNMaxtrixVectors_param3"); + int param4 = (int) data.get("simpleRNNMaxtrixVectors_param4"); + int param5 = (int) data.get("simpleRNNMaxtrixVectors_param5"); + Double param6 = (Double) data.get("simpleRNNMaxtrixVectorsDouble_param6"); + Double param7 = (Double) data.get("simpleRNNMaxtrixVectorsDouble_param7"); + int param8 = (int) data.get("simpleRNNMaxtrixVectors_param8"); + Double param9 = (Double) data.get("simpleRNNMaxtrixVectorsDouble_param9"); + Double param10 = (Double) data.get("simpleRNNMaxtrixVectorsDouble_param10"); + int param11 = (int) data.get("simpleRNNMaxtrixVectors_param11"); + Double param12 = (Double) data.get("simpleRNNMaxtrixVectorsDouble_param12"); + Double param13 = (Double) data.get("simpleRNNMaxtrixVectorsDouble_param13"); + int param14 = (int) data.get("simpleRNNMaxtrixVectors_param14"); + int param15 = (int) data.get("simpleRNNMaxtrixVectors_param15"); + int param16 = (int) data.get("simpleRNNMaxtrixVectors_param16"); + int param17 = (int) data.get("simpleRNNMaxtrixVectors_param17"); + int param18 = (int) data.get("simpleRNNMaxtrixVectors_param18"); + int param19 = (int) data.get("simpleRNNMaxtrixVectors_param19"); + int param20 = (int) data.get("simpleRNNMaxtrixVectors_param20"); + + int param21 = (int) data.get("simpleRNNMaxtrixVectors_param21"); + int param22 = (int) data.get("simpleRNNMaxtrixVectors_param22"); + int param23 = (int) data.get("simpleRNNMaxtrixVectors_param23"); + int param24 = (int) data.get("simpleRNNMaxtrixVectors_param24"); + int param25 = (int) data.get("simpleRNNMaxtrixVectors_param25"); + int param26 = (int) data.get("simpleRNNMaxtrixVectors_param26"); + int param27 = (int) data.get("simpleRNNMaxtrixVectors_param27"); + int param28 = (int) data.get("simpleRNNMaxtrixVectors_param28"); + int param29 = (int) data.get("simpleRNNMaxtrixVectors_param29"); + int param30 = (int) data.get("simpleRNNMaxtrixVectors_param30"); + int param31 = (int) data.get("simpleRNNMaxtrixVectors_param31"); + int param32 = (int) data.get("simpleRNNMaxtrixVectors_param32"); + int param33 = (int) data.get("simpleRNNMaxtrixVectors_param33"); + Double param34 = (Double) data.get("simpleRNNMaxtrixVectorsDouble_param34"); + Double param35 = (Double) data.get("simpleRNNMaxtrixVectorsDouble_param35"); + int param36 = (int) data.get("simpleRNNMaxtrixVectors_param36"); + Double param37 = (Double) data.get("simpleRNNMaxtrixVectorsDouble_param37"); + Double param38 = (Double) data.get("simpleRNNMaxtrixVectorsDouble_param38"); + int param39 = (int) data.get("simpleRNNMaxtrixVectors_param39"); + int param40 = (int) data.get("simpleRNNMaxtrixVectors_param40"); + int param41 = (int) data.get("simpleRNNMaxtrixVectors_param41"); + int param42 = (int) data.get("simpleRNNMaxtrixVectors_param42"); + int param43 = (int) data.get("simpleRNNMaxtrixVectors_param43"); + int param44 = (int) data.get("simpleRNNMaxtrixVectors_param44"); + int param45 = (int) data.get("simpleRNNMaxtrixVectors_param45"); + int param46 = (int) data.get("simpleRNNMaxtrixVectors_param46"); + int param47 = (int) data.get("simpleRNNMaxtrixVectors_param47"); + int param48 = (int) data.get("simpleRNNMaxtrixVectors_param48"); + int param49 = (int) data.get("simpleRNNMaxtrixVectors_param49"); + + Double param50 = (Double) data.get("simpleRNNMaxtrixVectorsDouble_param50"); + int param51 = (int) data.get("simpleRNNMaxtrixVectors_param51"); + Double param52 = (Double) data.get("simpleRNNMaxtrixVectorsDouble_param52"); + int param53 = (int) data.get("simpleRNNMaxtrixVectors_param53"); + Double param54 = (Double) data.get("simpleRNNMaxtrixVectorsDouble_param54"); + int param55 = (int) data.get("simpleRNNMaxtrixVectors_param55"); + Double param56 = (Double) data.get("simpleRNNMaxtrixVectorsDouble_param56"); + Double param57 = (Double) data.get("simpleRNNMaxtrixVectorsDouble_param57"); + int param58 = (int) data.get("simpleRNNMaxtrixVectors_param58"); + Double param59 = (Double) data.get("simpleRNNMaxtrixVectorsDouble_param59"); + Double param60 = (Double) data.get("simpleRNNMaxtrixVectorsDouble_param60"); + int param61 = (int) data.get("simpleRNNMaxtrixVectors_param61"); + Double param62 = (Double) data.get("simpleRNNMaxtrixVectorsDouble_param62"); + Double param63 = (Double) data.get("simpleRNNMaxtrixVectorsDouble_param63"); + int param64 = (int) data.get("simpleRNNMaxtrixVectors_param64"); + Double param65 = (Double) data.get("simpleRNNMaxtrixVectorsDouble_param65"); + Double param66 = (Double) data.get("simpleRNNMaxtrixVectorsDouble_param66"); + int param67 = (int) data.get("simpleRNNMaxtrixVectors_param67"); + int param68 = (int) data.get("simpleRNNMaxtrixVectors_param68"); + int param69 = (int) data.get("simpleRNNMaxtrixVectors_param69"); + Double param70 = (Double) data.get("simpleRNNMaxtrixVectorsDouble_param70"); + Double param71 = (Double) data.get("simpleRNNMaxtrixVectorsDouble_param71"); + int param72 = (int) data.get("simpleRNNMaxtrixVectors_param72"); + Double param73 = (Double) data.get("simpleRNNMaxtrixVectorsDouble_param73"); + Double param74 = (Double) data.get("simpleRNNMaxtrixVectorsDouble_param74"); + int param75 = (int) data.get("simpleRNNMaxtrixVectors_param75"); + int param76 = (int) data.get("simpleRNNMaxtrixVectors_param76"); + Double param77 = (Double) data.get("simpleRNNMaxtrixVectorsDouble_param77"); + Double param78 = (Double) data.get("simpleRNNMaxtrixVectorsDouble_param78"); + int param79 = (int) data.get("simpleRNNMaxtrixVectors_param79"); + int param80 = (int) data.get("simpleRNNMaxtrixVectors_param80"); + int param81 = (int) data.get("simpleRNNMaxtrixVectors_param81"); + int param82 = (int) data.get("simpleRNNMaxtrixVectors_param82"); + int param83 = (int) data.get("simpleRNNMaxtrixVectors_param83"); + int param84 = (int) data.get("simpleRNNMaxtrixVectors_param84"); + int param85 = (int) data.get("simpleRNNMaxtrixVectors_param85"); + int param86 = (int) data.get("simpleRNNMaxtrixVectors_param86"); + + + + ArrayList elementSumCounter = new ArrayList<>(); + ArrayList dotMap = new ArrayList<>(); + ArrayList elementSumMap = new ArrayList<>(); + ArrayList dotSumMap = new ArrayList<>(); + Double preDot = 0.0; + Double postDot = 0.0; + int iterateSize = simpleSMXlistVector1.size() + simpleSMXlistVector2.size(); + boolean punish_overhead = false; + if (simpleSMXlistVector1.size() > param1 || simpleSMXlistVector2.size() > param1) { + punish_overhead = true; + } + for (SimpleMatrix simpleSMX2 : simpleSMXlistVector2) { + for (SimpleMatrix simpleSMX1 : simpleSMXlistVector1) { + double dot2 = simpleSMX2.dot(simpleSMX1); + double elementSum2 = simpleSMX2.kron(simpleSMX1).elementSum(); + double dot1 = simpleSMX1.dot(simpleSMX2); + double elementSum1 = simpleSMX1.kron(simpleSMX2).elementSum(); + if (preDot == dot2) { + if (postDot == dot2) { + score -= param2; + //logger.info("score score -= 500: " + score); + } + postDot = dot2; + } + if (preDot == dot1) { + if (postDot == dot1) { + score -= param3; + //logger.info("score score -= 500 1: " + score); + } + postDot = dot1; + } + preDot = dot1; + elementSum1 = (double) Math.round(elementSum1 * param4) / param4; + elementSumCounter.add(elementSum1); + dotMap.add(dot1); + preDot = dot2; + elementSum2 = (double) Math.round(elementSum2 * param5) / param5; + elementSumCounter.add(elementSum2); + dotMap.add(dot2); + if (!dotSumMap.contains(dot1)) { + if (dot1 < param6 && dot1 > param7) { + score += param8; + //logger.info("score score += 256; " + score); + } + if (dot1 > param9 && dot1 < param10) { + score -= param11; + //logger.info("score score -= 2400; " + score); + } else if (dot1 > param12 && dot1 < param13) { + score += param14; + //logger.info("score score += 3600; " + score); + } + dotSumMap.add(dot1); + } else { + score -= param15; + //logger.info("score score -= 50; " + score); + } + if (!elementSumMap.contains(elementSum1)) { + //logger.info("elementSum1: " + elementSum1); + if (elementSum1 > param16 && elementSum1 < param17) { + score -= param18; + //logger.info("score post score -= 6891;: " + score); + } + if ((elementSum1 > param19 && elementSum1 < param20) || + (elementSum1 > param21 && elementSum1 < param22)) { + score += param23 * param24; + //logger.info("score post score += 254 * 150; " + score); + } + if (elementSum1 > param25 && elementSum1 < param26) { + score -= param27; + //logger.info("score post score -= 7678;"); + } + if (elementSum1 > param28 && elementSum1 < param29) { + score -= param30; + //logger.info("score post score -= 4521;: " + score); + } + if (elementSum1 > param31 && elementSum1 < param32) { + score += elementSum1 * param33; + //logger.info("score post score += elementSum1 * 43; : " + score); + } + if (elementSum1 < param34 && elementSum1 > param35) { + score += param36; + //logger.info("score score += 1300; " + score); + } else if (elementSum1 > param37 && elementSum1 < param38) { + score += param39; + //logger.info("score score += 7934; " + score); + } else if (elementSum1 > param40 && elementSum1 < param41) { + //logger.info("elementSum_1: " + elementSum1); + score -= elementSum1 * param42; + //logger.info("score score -= elementSum1 * 583; " + score); + } else if (dot2 == elementSum2) { + score += param43; + //logger.info("score score += 2501; " + score); + } + elementSumMap.add(elementSum1); + } else { + score -= param44; + //logger.info("score score -= 50; " + score); + } + //logger.info("dot2: " + dot2); + if (elementSum1 > param45 && elementSum1 < param46 && dot2 > param47 && dot2 < param48) { + score += param49; + //logger.info("score post score += 9876;: " + score); + } + if (!dotSumMap.contains(dot2)) { + if (dot2 < param50) { + score += dot2 * param51; + //logger.info("score score += dot2 * 500; " + score); + } else if (dot2 < param52) { + score += param53; + //logger.info("score score += 256; " + score); + } + if (dot2 > param54) { + score -= param55; + //logger.info("score score -= 1200; " + score); + } + dotSumMap.add(dot2); + } else if (dot2 > param56 && dot2 < param57) { + score -= param58; + //logger.info("score score -= 350; " + score); + } else if ((dot2 > param59 && dot2 < param60) || punish_overhead) { + score -= dot2 * param61; + //logger.info("score score -= dot2 * 1557; " + score); + } else if (dot2 > param62 && dot2 < param63) { + score += dot2 * param64; + //logger.info("score score += dot2 * 2934; " + score); + } else if (dot2 > param65 && dot2 < param66) { + score -= dot2 * param67; + //logger.info("score score -= dot2 * 584; " + score); + } else if (dot1 != elementSum1 && dot2 * param68 > elementSum1 && elementSum1 * param68 > dot2) { + score -= param69; + //logger.info("score score -= 6556; " + score); + } + if (!elementSumMap.contains(elementSum2)) { + if (elementSum2 < param70 && elementSum2 > param71) { + score += param72; + //logger.info("score score += 3300; " + score); + } else if (elementSum2 > param73 && elementSum2 < param74) { + score += param75; + //logger.info("score score += 6790; " + score); + } else { + score -= elementSum2 * param76; + //logger.info("score score -= elementSum2 * 1024; " + score); + } + elementSumMap.add(elementSum2); + } else if (elementSum2 > param77 && elementSum2 < param78) { + score += param79; + //logger.info("score score += 750; " + score); + } else if (elementSum2 > param80 && elementSum2 < param81) { + score -= param82 * elementSum2; + //logger.info("elementSum2: " + elementSum2); + //logger.info("score score -= 417 * elementSum2; " + score); + } else if (dot2 != elementSum2 && dot2 * param83 < elementSum1 && elementSum1 * param83 < dot2) { + score -= param84; + //logger.info("score post score -= 7501;" + score); + } + } + } + if (iterateSize > param85) { + score -= iterateSize * param86; + //logger.info("score score -= iterateSize * 400; " + score); + } + score = elementsAndDotsRelation(score, dotMap, elementSumCounter); + //logger.info("score score = elementsAndDotsRelation(score, dotMap, elementSumCounter); " + score); + return score; + } + + private Double elementsAndDotsRelation(Double + score, ArrayList dotMap, ArrayList elementSumCounter) { + OptionalDouble minvalueDots = dotMap.stream().mapToDouble(Double::doubleValue).min(); + OptionalDouble maxvalueDots = dotMap.stream().mapToDouble(Double::doubleValue).max(); + boolean permitted = false; + if (minvalueDots.getAsDouble() != maxvalueDots.getAsDouble()) { + permitted = true; + } + if (permitted) { + Double dotsVariance = maxvalueDots.getAsDouble() - minvalueDots.getAsDouble(); + if (maxvalueDots.getAsDouble() > minvalueDots.getAsDouble() * 10 && (dotsVariance < 0.40 + || dotsVariance > 0.60)) { + score -= 5535; + //logger.info("score post score -= 5535; + score"); + } else if (minvalueDots.getAsDouble() < -0.10) { + score -= 3551; + //logger.info("score post score -= 3551; + score"); + } else if (dotsVariance > minvalueDots.getAsDouble() * 2) { + score += 3500; + //logger.info("score post score -= 3551; + score"); + } else if (minvalueDots.getAsDouble() * 2 > maxvalueDots.getAsDouble()) { + score -= 3500; + //logger.info("score post score -= 3551; + score"); + } + } + OptionalDouble minvalueElements = elementSumCounter.stream().mapToDouble(Double::doubleValue).min(); + OptionalDouble maxvalueElements = elementSumCounter.stream().mapToDouble(Double::doubleValue).max(); + Double elementsVariance = maxvalueElements.getAsDouble() - minvalueElements.getAsDouble(); + if (elementsVariance != 0.0) { + if (elementsVariance <= 0.01 && maxvalueElements.getAsDouble() <= 0.02) { + if (maxvalueElements.getAsDouble() < 0.01 && minvalueElements.getAsDouble() > -0.02 && minvalueElements.getAsDouble() < 0.0) { + score += 6500; + } else { + score -= 5500; + } + } else if (minvalueElements.getAsDouble() < 0.0 && minvalueElements.getAsDouble() - maxvalueElements.getAsDouble() < 0.50) { + score -= 4500; + } else if (elementsVariance * 2 >= maxvalueElements.getAsDouble() && elementsVariance < 0.1) { + score -= elementsVariance * 86000; + } + } else if (maxvalueElements.getAsDouble() == minvalueElements.getAsDouble() && maxvalueElements.getAsDouble() > 0.40 + && maxvalueElements.getAsDouble() < 0.60) { + score += 600; + } else if (maxvalueElements.getAsDouble() < 0.20 && minvalueElements.getAsDouble() >= 0.15) { + score += 2600; + } else if (maxvalueElements.getAsDouble() == minvalueElements.getAsDouble() && minvalueElements.getAsDouble() == 0.12) { + score += 2600; + } + return score; + } + + private Double sentimentMatrixVariances(Double score, int longest1, int longest2, int mainSentiment1, + int mainSentiment2) { + //logger.info("longest1: " + longest1); + //logger.info("longest2: " + longest2); + //logger.info("mainSentiment1: " + mainSentiment1); + //logger.info("mainSentiment2: " + mainSentiment2); + long param1 = (long) data.get("sentimentMatrixVariances_param1"); + long param2 = (long) data.get("sentimentMatrixVariances_param2"); + long param3 = (long) data.get("sentimentMatrixVariances_param3"); + long param4 = (long) data.get("sentimentMatrixVariances_param4"); + long param5 = (long) data.get("sentimentMatrixVariances_param5"); + long param6 = (long) data.get("sentimentMatrixVariances_param6"); + long param7 = (long) data.get("sentimentMatrixVariances_param7"); + long param8 = (long) data.get("sentimentMatrixVariances_param8"); + long param9 = (long) data.get("sentimentMatrixVariances_param9"); + long param10 = (long) data.get("sentimentMatrixVariances_param10"); + long param11 = (long) data.get("sentimentMatrixVariances_param11"); + long param12 = (long) data.get("sentimentMatrixVariances_param12"); + long param13 = (long) data.get("sentimentMatrixVariances_param13"); + long param14 = (long) data.get("sentimentMatrixVariances_param14"); + long param15 = (long) data.get("sentimentMatrixVariances_param15"); + + long param16 = (long) data.get("sentimentMatrixVariances_param16"); + long param17 = (long) data.get("sentimentMatrixVariances_param17"); + long param18 = (long) data.get("sentimentMatrixVariances_param18"); + long param19 = (long) data.get("sentimentMatrixVariances_param19"); + long param20 = (long) data.get("sentimentMatrixVariances_param20"); + long param21 = (long) data.get("sentimentMatrixVariances_param21"); + long param22 = (long) data.get("sentimentMatrixVariances_param22"); + long param23 = (long) data.get("sentimentMatrixVariances_param23"); + long param24 = (long) data.get("sentimentMatrixVariances_param24"); + long param25 = (long) data.get("sentimentMatrixVariances_param25"); + long param26 = (long) data.get("sentimentMatrixVariances_param26"); + long param27 = (long) data.get("sentimentMatrixVariances_param27"); + long param28 = (long) data.get("sentimentMatrixVariances_param28"); + long param29 = (long) data.get("sentimentMatrixVariances_param29"); + long param30 = (long) data.get("sentimentMatrixVariances_param30"); + long param31 = (long) data.get("sentimentMatrixVariances_param31"); + + long param32 = (long) data.get("sentimentMatrixVariances_param32"); + long param33 = (long) data.get("sentimentMatrixVariances_param33"); + long param34 = (long) data.get("sentimentMatrixVariances_param34"); + long param35 = (long) data.get("sentimentMatrixVariances_param35"); + long param36 = (long) data.get("sentimentMatrixVariances_param36"); + long param37 = (long) data.get("sentimentMatrixVariances_param37"); + + if (longest2 > param1 && longest2 < param2) { + score -= param3; + //logger.info("score post score -= 9988;: " + score); + } + if (longest1 > param4 && longest1 < param5) { + score += param6; + //logger.info("score post score += 7903; " + score); + } + if (longest1 != longest2) { + long deffLongest = longest1 > longest2 ? longest1 : longest2; + long deffshorter = longest1 < longest2 ? longest1 : longest2; + if (deffLongest > deffshorter * param7) { + score -= param8; + } else if (deffLongest < (deffshorter * param9) - param10 && deffLongest - deffshorter <= param11) { + score += (deffLongest - deffshorter) * param12; + } else if (mainSentiment1 != mainSentiment2 && deffLongest - deffshorter > param13 && deffLongest - deffshorter < param14) { + score += (deffLongest - deffshorter) * param15; + } else if (deffLongest - deffshorter < param16) { + score += (deffLongest - deffshorter) * param17; + } else if (deffshorter * param18 >= deffLongest && deffshorter * param18 < deffLongest + param19) { + score += (deffLongest - deffshorter) * param20; + } else { + score -= (deffLongest - deffshorter) * param21; + } + if (deffLongest - deffshorter <= param22) { + if (deffLongest < param23 && deffshorter > param24) { + score += param25; + } else if (deffLongest - deffshorter == param26 && deffshorter >= param27 && deffLongest <= param28) { + score += param29; + } else { + if (deffshorter > param30 && deffLongest < param31) { + score += param32; + } else if (deffLongest - deffshorter == param33 && deffshorter > param34 && deffLongest < param35) { + score += param36; + } else { + score -= param37; + } + } + } + } + return score; + } + + private int classifyRawEvaluation() { + final List classifyRaw1 = this.listF; + final List classifyRaw2 = this.list1; + long param1 = (long) data.get("classifyRawEvaluation_param1"); + long param2 = (long) data.get("classifyRawEvaluation_param2"); + long param3 = (long) data.get("classifyRawEvaluation_param3"); + //logger.info("classifyRaw1 size: " + classifyRaw1.size()); + //logger.info("classifyRaw2 size: " + classifyRaw2.size()); + long increaseCount = param1; + if (classifyRaw1.size() == param2) { + increaseCount += param3; + } + return (int) ((classifyRaw1.size() > classifyRaw2.size() ? classifyRaw1.size() - + classifyRaw2.size() : classifyRaw2.size() - classifyRaw1.size()) * increaseCount); + } + + private Double entryCountsRelation(Double + score, ArrayList cacheSentimentLocal1, ArrayList cacheSentimentLocal2) { + int entry1 = cacheSentimentLocal1.size(); + int entry2 = cacheSentimentLocal2.size(); + long param1 = (long) data.get("entryCountsRelation_param1"); + long param2 = (long) data.get("entryCountsRelation_param2"); + long param3 = (long) data.get("entryCountsRelation_param3"); + long param4 = (long) data.get("entryCountsRelation_param4"); + long param5 = (long) data.get("entryCountsRelation_param5"); + long param6 = (long) data.get("entryCountsRelation_param6"); + long param7 = (long) data.get("entryCountsRelation_param7"); + long param8 = (long) data.get("entryCountsRelation_param8"); + long param9 = (long) data.get("entryCountsRelation_param9"); + long param10 = (long) data.get("entryCountsRelation_param10"); + long param11 = (long) data.get("entryCountsRelation_param11"); + long param12 = (long) data.get("entryCountsRelation_param12"); + long param13 = (long) data.get("entryCountsRelation_param13"); + long param14 = (long) data.get("entryCountsRelation_param14"); + long param15 = (long) data.get("entryCountsRelation_param15"); + //logger.info("entry1: " + entry1); + //logger.info("entry2: " + entry2); + if (entry1 == param1 && entry2 > param2 && entry1 >= entry2) { + score += param3 * entry1; + //logger.info("score post score += 5894 * entry1;: " + score); + } + if (entry1 > param4 && entry2 > param4) { + if ((entry1 >= entry2 * param5) || (entry2 >= entry1 * param5)) { + score -= entry1 > entry2 ? (entry1 - entry2) * param6 : (entry2 - entry1) * param6; + } else if ((entry1 >= entry2 * param7 || entry2 >= entry1 * param7)) { + score -= entry1 > entry2 ? entry1 * param8 : entry2 * param8; + } else if ((entry1 >= entry2 * param9 || entry2 >= entry1 * param9) && entry1 * param9 >= entry2 && entry2 * param9 >= entry1) { + score -= entry1 > entry2 ? (entry1 - entry2) * param10 : (entry2 - entry1) * param10; + } else if (entry1 * param11 >= entry2 && entry2 * param11 >= entry1) { + score -= entry1 > entry2 ? (entry1 - entry2) * param12 : (entry2 - entry1) * param12; + } else if (entry1 > param13 && entry2 > param13 && entry1 * param14 > entry2 && entry2 * param14 > entry1) { + score += entry1 > entry2 ? entry2 * param15 : entry1 * param15; + } + } + return score; + } + + private ArrayList grammaticalStructureAllTypedDependencies( + ArrayList grammaticalStructures) { + ArrayList typedDependenciesArr = new ArrayList<>(); + for (GrammaticalStructure gs : grammaticalStructures) { + Collection typedDependencies = gs.allTypedDependencies(); + typedDependenciesArr.addAll(typedDependencies); + } + return typedDependenciesArr; + } + + private ArrayList grammaticalStructureSetup(ArrayList trees) { + ArrayList grammaticalStructures = new ArrayList(); + for (Tree tree : trees) { + try { + if (!tree.isEmpty()) { + GrammaticalStructure gs = gsf.newGrammaticalStructure(tree); + grammaticalStructures.add(gs); + } + } catch (NoSuchElementException e) { + + } + } + return grammaticalStructures; + } + + private ArrayList retrieveTrees(List sentences) { + ArrayList treeList = new ArrayList(); + for (CoreMap sentence : sentences) { + Tree sentenceConstituencyParse = sentence.get(TreeCoreAnnotations.TreeAnnotation.class); + treeList.add(sentenceConstituencyParse); + } + return treeList; + } + + private ArrayList sentimentRNNCorePredicted(List sentences, + Class sentimentAnnotatedTreeClass) { + ArrayList rnnCoreAnnotationsPrediction = new ArrayList<>(); + for (CoreMap sentence : sentences) { + Tree tree = sentence.get(sentimentAnnotatedTreeClass); + if (tree != null) { + SimpleMatrix predictions = RNNCoreAnnotations.getPredictions(tree); + rnnCoreAnnotationsPrediction.add(predictions); + } + } + return rnnCoreAnnotationsPrediction; + } + + private ArrayList sentimentRNNCoreNodevectors(List sentences, + Class sentimentAnnotatedTreeClass) { + ArrayList rnnCoreAnnotationsNodevectors = new ArrayList<>(); + for (CoreMap sentence : sentences) { + Tree tree = sentence.get(sentimentAnnotatedTreeClass); + if (tree != null) { + SimpleMatrix nodeVector = RNNCoreAnnotations.getNodeVector(tree); + rnnCoreAnnotationsNodevectors.add(nodeVector); + } + } + return rnnCoreAnnotationsNodevectors; + } + + private ArrayList sentimentRNNCoreAnnotations(List sentences, + Class sentimentAnnotatedTreeClass) { + ArrayList rnnCoreAnnotationsPredicted = new ArrayList<>(); + for (CoreMap sentence : sentences) { + Tree tree = sentence.get(sentimentAnnotatedTreeClass); + if (tree != null) { + int predictedClass = RNNCoreAnnotations.getPredictedClass(tree); + rnnCoreAnnotationsPredicted.add(predictedClass); + } + } + return rnnCoreAnnotationsPredicted; + } + + private int setupMainSentiment(List sentences4, + Class sentimentAnnotatedTreeClass) { + int longest = 0; + int longestSentiment = 0; + for (CoreMap sentence : sentences4) { + Tree tree = sentence.get(sentimentAnnotatedTreeClass); + int sentiment = RNNCoreAnnotations.getPredictedClass(tree); + String partText = sentence.toString(); + if (partText.length() > longest) { + longestSentiment = sentiment; + longest = partText.length(); + } + } + return longestSentiment; + } + + private int setupMainLongest(List sentences) { + int longest = 0; + for (CoreMap sentence : sentences) { + String partText = sentence.toString(); + if (partText.length() > longest) { + longest = partText.length(); + } + } + return longest; + } + + private Double entryCountsScoring(Double + score, ArrayList cacheSentimentLocal1, ArrayList cacheSentimentLocal2) { + ArrayList countsMap = new ArrayList(); + long param1 = (long) data.get("entryCountsScoring_param1"); + long param2 = (long) data.get("entryCountsScoring_param2"); + long param3 = (long) data.get("entryCountsScoring_param3"); + long param4 = (long) data.get("entryCountsScoring_param4"); + long param5 = (long) data.get("entryCountsScoring_param5"); + long param6 = (long) data.get("entryCountsScoring_param6"); + long param7 = (long) data.get("entryCountsScoring_param7"); + long param8 = (long) data.get("entryCountsScoring_param8"); + int totalsize = cacheSentimentLocal1.size() + cacheSentimentLocal2.size(); + //logger.info("cacheSentimentLocal1.size(): " + cacheSentimentLocal1.size()); + //logger.info("cacheSentimentLocal2.size(): " + cacheSentimentLocal2.size()); + if ((cacheSentimentLocal1.size() == param1 && cacheSentimentLocal2.size() < param2) + || (cacheSentimentLocal2.size() == param1 && cacheSentimentLocal1.size() < param2) + && cacheSentimentLocal1.size() > param3 && cacheSentimentLocal2.size() > param3) { + score += param4; + //logger.info("score post score += 45843;" + score); + } + for (int counts : cacheSentimentLocal1) { + for (int counts1 : cacheSentimentLocal2) { + if (counts > param5 && counts1 > param5) { + if (counts == counts1 && !countsMap.contains(counts)) { + score += (double) (counts * param6) / totalsize; + //logger.info("score post score += (counts * 250) / totalsize; " + score); + countsMap.add(counts); + } else if (counts * param7 < counts1 || counts1 * param7 < counts) { + score -= param8; + //logger.info("score post score -= 929;" + score); + } + } + } + } + return score; + } + + private Double tokenEntryPosScoring(Double + score, ArrayList cacheSentimentLocal1, ArrayList cacheSentimentLocal2) { + long param1 = (long) data.get("tokenEntryPosScoring_param1"); + long param2 = (long) data.get("tokenEntryPosScoring_param2"); + long param3 = (long) data.get("tokenEntryPosScoring_param3"); + long param4 = (long) data.get("tokenEntryPosScoring_param4"); + if (cacheSentimentLocal1.size() > param1 && cacheSentimentLocal2.size() > param1) { + for (String strTokenPos1 : cacheSentimentLocal1) { + for (String strTokenPos2 : cacheSentimentLocal2) { + if (strTokenPos1.equals(strTokenPos2)) { + score += param2; + } + } + } + int posEntrySize1 = cacheSentimentLocal1.size(); + int posEntrySize2 = cacheSentimentLocal2.size(); + if (posEntrySize1 * param3 > posEntrySize2 && posEntrySize2 * param3 > posEntrySize1) { + score += posEntrySize1 > posEntrySize2 ? (posEntrySize1 - posEntrySize2) * param4 : (posEntrySize2 - posEntrySize1) * param4; + } + } + return score; + } + + private Double unmarkedPatternCounterScoring(Double score, int UnmarkedPatternCounter1, + int UnmarkedPatternCounter2) { + //logger.info("UnmarkedPatternCounter1: " + UnmarkedPatternCounter1); + //logger.info("UnmarkedPatternCounter2: " + UnmarkedPatternCounter2); + long param1 = (long) data.get("unmarkedPatternCounterScoring_param1"); + long param2 = (long) data.get("unmarkedPatternCounterScoring_param2"); + long param3 = (long) data.get("unmarkedPatternCounterScoring_param3"); + long param4 = (long) data.get("unmarkedPatternCounterScoring_param4"); + long param5 = (long) data.get("unmarkedPatternCounterScoring_param5"); + long param6 = (long) data.get("unmarkedPatternCounterScoring_param6"); + long param7 = (long) data.get("unmarkedPatternCounterScoring_param7"); + long param8 = (long) data.get("unmarkedPatternCounterScoring_param8"); + if (UnmarkedPatternCounter1 > param1 && UnmarkedPatternCounter2 > param1) { + if (UnmarkedPatternCounter1 < param2 && UnmarkedPatternCounter2 < param2) { + if (UnmarkedPatternCounter1 * param3 > UnmarkedPatternCounter2 && UnmarkedPatternCounter2 * param3 > UnmarkedPatternCounter1) { + score += param4; + } else if (UnmarkedPatternCounter1 * param5 < UnmarkedPatternCounter2 || UnmarkedPatternCounter2 * param5 < UnmarkedPatternCounter1) { + score -= param6; + } + } else if (UnmarkedPatternCounter1 > param7 && UnmarkedPatternCounter2 > param7) { + score -= UnmarkedPatternCounter1 >= UnmarkedPatternCounter2 ? UnmarkedPatternCounter1 * param8 : UnmarkedPatternCounter2 * param8; + } + } + return score; + } + + private Double markedContiniousCounterScoring(Double score, int MarkedContinuousCounter1, + int MarkedContinuousCounter2) { + long param1 = (long) data.get("markedContiniousCounterScoring_param1"); + long param2 = (long) data.get("markedContiniousCounterScoring_param2"); + long param3 = (long) data.get("markedContiniousCounterScoring_param3"); + long param4 = (long) data.get("markedContiniousCounterScoring_param4"); + long param5 = (long) data.get("markedContiniousCounterScoring_param5"); + long param6 = (long) data.get("markedContiniousCounterScoring_param6"); + long param7 = (long) data.get("markedContiniousCounterScoring_param7"); + long param8 = (long) data.get("markedContiniousCounterScoring_param8"); + long param9 = (long) data.get("markedContiniousCounterScoring_param9"); + long param10 = (long) data.get("markedContiniousCounterScoring_param10"); + long param11 = (long) data.get("markedContiniousCounterScoring_param11"); + int MarkedContiniousCounter1Entries = MarkedContinuousCounter1; + int MarkedContiniousCounter2Entries = MarkedContinuousCounter2; + if (MarkedContinuousCounter1 > param1 && MarkedContinuousCounter2 > param1) { + if (MarkedContinuousCounter1 > MarkedContinuousCounter2 * param2 || MarkedContinuousCounter2 > MarkedContinuousCounter1 * param2) { + score -= MarkedContinuousCounter1 > MarkedContinuousCounter2 ? MarkedContinuousCounter1 * param3 : MarkedContinuousCounter2 * param3; + } else if (!Objects.equals(MarkedContiniousCounter1Entries, MarkedContiniousCounter2Entries) + && (MarkedContinuousCounter1 * param4 >= (long) MarkedContinuousCounter2 * MarkedContinuousCounter1) + || (MarkedContinuousCounter2 * param4 >= (long) MarkedContinuousCounter1 * MarkedContinuousCounter2)) { + score += param5; + } else if (MarkedContiniousCounter1Entries == param6 || MarkedContiniousCounter2Entries == param6) { + score += MarkedContinuousCounter1 > MarkedContinuousCounter2 ? (MarkedContinuousCounter2 - MarkedContinuousCounter1) * param7 + : (MarkedContinuousCounter1 - MarkedContinuousCounter2) * param7; + } + if (MarkedContiniousCounter1Entries > param8 && MarkedContiniousCounter2Entries > param8 && MarkedContinuousCounter1 > param8 + && MarkedContinuousCounter2 > param8 && MarkedContinuousCounter1 < MarkedContinuousCounter2 * param9 + && MarkedContinuousCounter2 < MarkedContinuousCounter1 * param9) { + if (MarkedContiniousCounter1Entries > MarkedContiniousCounter2Entries * param10 + || MarkedContiniousCounter2Entries > MarkedContiniousCounter1Entries * param10 + || MarkedContiniousCounter1Entries * param10 < MarkedContinuousCounter1 + || MarkedContiniousCounter1Entries * param10 < MarkedContinuousCounter2 + || MarkedContiniousCounter2Entries * param10 < MarkedContinuousCounter1 + || MarkedContiniousCounter2Entries * param10 < MarkedContinuousCounter2) { + score -= MarkedContinuousCounter1 > MarkedContinuousCounter2 ? MarkedContinuousCounter1 * param11 : MarkedContinuousCounter2 * param11; + } + } + } + return score; + } + + private Double strTokensMapScoring(Double score, ArrayList cacheSentimentLocal1, + ArrayList cacheSentimentLocal2) { + ArrayList strtokensMap = new ArrayList(); + //logger.info("cacheSentimentLocal1 size: " + cacheSentimentLocal1.size()); + //logger.info("cacheSentimentLocal2 size: " + cacheSentimentLocal2.size()); + long param1 = (long) data.get("strTokensMapScoring_param1"); + long param2 = (long) data.get("strTokensMapScoring_param2"); + long param3 = (long) data.get("strTokensMapScoring_param3"); + long param4 = (long) data.get("strTokensMapScoring_param4"); + long param5 = (long) data.get("strTokensMapScoring_param5"); + long param6 = (long) data.get("strTokensMapScoring_param6"); + for (String strTokeniPart1 : cacheSentimentLocal1) { + for (String strTokeniPart2 : cacheSentimentLocal2) { + if (strTokeniPart1.equals(strTokeniPart2) && !strtokensMap.contains(strTokeniPart2)) { + strtokensMap.add(strTokeniPart2); + score += param1; + //logger.info("score post score += 3883; " + score); + } + } + } + int tokenIPartSize1 = cacheSentimentLocal1.size(); + int tokenIPartSize2 = cacheSentimentLocal2.size(); + int strTokenMapSize = strtokensMap.size(); + if (tokenIPartSize1 * param2 > tokenIPartSize2 && tokenIPartSize2 * param2 > tokenIPartSize1) { + score += tokenIPartSize1 > tokenIPartSize2 ? (tokenIPartSize1 - tokenIPartSize2) * param3 : (tokenIPartSize2 - tokenIPartSize1) * param3; + score += strTokenMapSize * param4; + } else if (tokenIPartSize1 > param5 && tokenIPartSize2 > param5) { + score -= tokenIPartSize1 > tokenIPartSize2 ? (tokenIPartSize1 - tokenIPartSize2) * param6 : (tokenIPartSize2 - tokenIPartSize1) * param6; + } + return score; + } + + private Double strTokenEntryScoring(Double score, ArrayList cacheSentimentLocal1, + ArrayList cacheSentimentLocal2) { + long param1 = (long) data.get("strTokenEntryScoring_param1"); + long param2 = (long) data.get("strTokenEntryScoring_param2"); + long param3 = (long) data.get("strTokenEntryScoring_param3"); + long param4 = (long) data.get("strTokenEntryScoring_param4"); + long param5 = (long) data.get("strTokenEntryScoring_param5"); + long param6 = (long) data.get("strTokenEntryScoring_param6"); + long param7 = (long) data.get("strTokenEntryScoring_param7"); + long param8 = (long) data.get("strTokenEntryScoring_param8"); + long param9 = (long) data.get("strTokenEntryScoring_param9"); + long param10 = (long) data.get("strTokenEntryScoring_param10"); + long param11 = (long) data.get("strTokenEntryScoring_param11"); + int tokenEntry1 = cacheSentimentLocal1.size(); + int tokenEntry2 = cacheSentimentLocal2.size(); + boolean boundaryLeaks = false; + int remnantCounter = 0; + if (tokenEntry1 * param1 != tokenEntry2 && tokenEntry2 * param1 != tokenEntry1) { + boundaryLeaks = true; + } + ArrayList entryTokenMap = new ArrayList(); + for (String strTokenEntry1 : cacheSentimentLocal1) { + for (String strTokenEntry2 : cacheSentimentLocal2) { + if (!entryTokenMap.contains(strTokenEntry2)) { + if (strTokenEntry1.equals(strTokenEntry2)) { + score += boundaryLeaks ? param2 : param2 / param3; + //logger.info("score post score += boundaryLeaks ? 2500 : 2500 / 2; : " + score); + } else if (!boundaryLeaks) { + score -= param4; + //logger.info("score post score -= 450;: " + score); + } else { + remnantCounter++; + } + } + entryTokenMap.add(strTokenEntry2); + } + } + //logger.info("tokenEntry2: " + tokenEntry2); + //logger.info("tokenEntry1: " + tokenEntry1); + //logger.info("remnantCounter: " + remnantCounter); + //logger.info("boundaryLeaks: " + boundaryLeaks); + if (tokenEntry1 == param5) { + score -= param6; + //logger.info("score post score -= 9453;: " + score); + } + if (remnantCounter == param7) { + score -= param8; + //logger.info("score post score -= 4083;: " + score); + } + if (tokenEntry2 == param9) { + score += param10; + //logger.info("score score += 4563; " + score); + } + score += remnantCounter * param11; + //logger.info("score post score += remnantCounter * 250;: " + score); + return score; + } + + private Double strTokenMapTagsScoring(Double + score, ArrayList cacheSentimentLocal1, ArrayList cacheSentimentLocal2) { + ArrayList iTokenMapTagsMap = new ArrayList(); + long param1 = (long) data.get("strTokenMapTagsScoring_param1"); + long param2 = (long) data.get("strTokenMapTagsScoring_param2"); + long param3 = (long) data.get("strTokenMapTagsScoring_param3"); + long param4 = (long) data.get("strTokenMapTagsScoring_param4"); + long param5 = (long) data.get("strTokenMapTagsScoring_param5"); + long param6 = (long) data.get("strTokenMapTagsScoring_param6"); + for (String strmapTag : cacheSentimentLocal1) { + for (String strmapTag1 : cacheSentimentLocal2) { + if (strmapTag.equals(strmapTag1) && !iTokenMapTagsMap.contains(strmapTag1)) { + score -= param1; + iTokenMapTagsMap.add(strmapTag); + } + } + } + int mapTagsize1 = cacheSentimentLocal1.size(); + int mapTagsize2 = cacheSentimentLocal2.size(); + int tokenTagMapSize = iTokenMapTagsMap.size(); + if (mapTagsize1 != param2 && mapTagsize2 != param2) { + if (mapTagsize1 * param3 > mapTagsize2 && mapTagsize2 * param3 > mapTagsize1) { + score += mapTagsize1 > mapTagsize2 ? (mapTagsize1 - mapTagsize2) * param4 : (mapTagsize2 - mapTagsize1) * param4; + score += tokenTagMapSize * param5; + } else { + score -= mapTagsize1 > mapTagsize2 ? (mapTagsize1 - mapTagsize2) * param6 : (mapTagsize2 - mapTagsize1) * param6; + } + } + return score; + } + + private Double tokenformSizeScoring(Double + score, ArrayList cacheSentimentLocal1, ArrayList cacheSentimentLocal2) { + long param1 = (long) data.get("tokenformSizeScoring_param1"); + long param2 = (long) data.get("tokenformSizeScoring_param2"); + long param3 = (long) data.get("tokenformSizeScoring_param3"); + long param4 = (long) data.get("tokenformSizeScoring_param4"); + long param5 = (long) data.get("tokenformSizeScoring_param5"); + long param6 = (long) data.get("tokenformSizeScoring_param6"); + long param7 = (long) data.get("tokenformSizeScoring_param7"); + long param8 = (long) data.get("tokenformSizeScoring_param8"); + long param9 = (long) data.get("tokenformSizeScoring_param9"); + int tokenform1size = cacheSentimentLocal1.size(); + int tokenform2size = cacheSentimentLocal2.size(); + if (tokenform1size > param1 || tokenform2size > param1) { + if (tokenform1size < tokenform2size * param2 && tokenform2size < tokenform1size * param2) { + for (String strTokenForm1itr1 : cacheSentimentLocal1) { + for (String strTokenForm1itr2 : cacheSentimentLocal2) { + if (strTokenForm1itr1.equals(strTokenForm1itr2)) { + score -= param3; + } else { + score += param4; + } + } + } + } else if (tokenform1size > param5 && tokenform2size > param5) { + if (tokenform1size * param6 >= tokenform2size && tokenform2size * param6 >= tokenform1size) { + score += tokenform1size > tokenform2size ? tokenform1size * param7 : tokenform2size * param7; + } else if (tokenform1size * param8 <= tokenform2size || tokenform2size * param8 <= tokenform1size) { + score -= tokenform1size > tokenform2size ? (tokenform1size - tokenform2size) * param9 : (tokenform2size - tokenform1size) * param9; + } + } + } + return score; + } + + private Double tokenStemmingMapScoring(Double score, ArrayList cacheSentimentLocal1, ArrayList cacheSentimentLocal2) { + long param1 = (long) data.get("tokenStemmingMapScoring_param1"); + ArrayList tokenStemmingMap = new ArrayList(); + for (String strTokenStem : cacheSentimentLocal1) { + for (String strTokenStem1 : cacheSentimentLocal2) { + if (strTokenStem.equals(strTokenStem1) && !tokenStemmingMap.contains(strTokenStem)) { + score += param1; + tokenStemmingMap.add(strTokenStem); + } + } + } + return score; + } + + private Double inflectedCounterScoring(Double score, int inflectedCounterPositive1, + int inflectedCounterPositive2, + int inflectedCounterNegative1, int inflectedCounterNegative2) { + long param1 = (long) data.get("inflectedCounterScoring_param1"); + long param2 = (long) data.get("inflectedCounterScoring_param2"); + long param3 = (long) data.get("inflectedCounterScoring_param3"); + long param4 = (long) data.get("inflectedCounterScoring_param4"); + long param5 = (long) data.get("inflectedCounterScoring_param5"); + long param6 = (long) data.get("inflectedCounterScoring_param6"); + long param7 = (long) data.get("inflectedCounterScoring_param7"); + long param8 = (long) data.get("inflectedCounterScoring_param8"); + long param9 = (long) data.get("inflectedCounterScoring_param9"); + long param10 = (long) data.get("inflectedCounterScoring_param10"); + long param11 = (long) data.get("inflectedCounterScoring_param11"); + int inflectedCounterNegative = inflectedCounterNegative1 > inflectedCounterNegative1 + ? inflectedCounterNegative1 - inflectedCounterNegative2 + : inflectedCounterNegative2 - inflectedCounterNegative1; + if ((inflectedCounterPositive1 + inflectedCounterPositive2) > inflectedCounterNegative && inflectedCounterNegative > param1) { + score += ((inflectedCounterPositive1 + inflectedCounterPositive2) - inflectedCounterNegative) * param2; + } + if (inflectedCounterPositive1 > param3 && inflectedCounterPositive2 > param3) { + if (inflectedCounterPositive1 * param4 > inflectedCounterPositive2 && inflectedCounterPositive2 * param4 > inflectedCounterPositive1 + && inflectedCounterNegative > param5) { + score += ((inflectedCounterPositive1 + inflectedCounterPositive2) * param6) - (inflectedCounterNegative * param7); + } else if (inflectedCounterPositive1 * param8 < inflectedCounterPositive2 || inflectedCounterPositive2 * param8 < inflectedCounterPositive1) { + score -= inflectedCounterPositive1 > inflectedCounterPositive2 ? (inflectedCounterPositive1 - inflectedCounterPositive2) * param9 + : (inflectedCounterPositive2 - inflectedCounterPositive1) * param9; + } + } + if (inflectedCounterPositive1 >= param10 && inflectedCounterPositive2 >= param10) { + score -= param11; + } + return score; + } + + private Double annotatorCountScoring(Double score, int anotatorcounter1, int anotatorcounter2) { + long param1 = (long) data.get("annotatorCountScoring_param1"); + long param2 = (long) data.get("annotatorCountScoring_param2"); + long param3 = (long) data.get("annotatorCountScoring_param3"); + long param4 = (long) data.get("annotatorCountScoring_param4"); + long param5 = (long) data.get("annotatorCountScoring_param5"); + if (anotatorcounter1 > param1 && anotatorcounter2 > param1) { + if (anotatorcounter1 * param2 > anotatorcounter2 && anotatorcounter2 * param2 > anotatorcounter1) { + score += anotatorcounter1 > anotatorcounter2 ? (anotatorcounter1 - anotatorcounter2) * param3 + : (anotatorcounter2 - anotatorcounter1) * param3; + } else if (anotatorcounter1 * param4 < anotatorcounter2 || anotatorcounter2 * param4 < anotatorcounter1) { + score -= anotatorcounter1 > anotatorcounter2 ? (anotatorcounter1 - anotatorcounter2) * param5 : (anotatorcounter2 - anotatorcounter1) * param5; + } + } + return score; + } + + public Double tokensCounterScoring(Double score, int tokensCounter1, int tokensCounter2) { + int param1 = (int) data.get("tokensCounterScoring_param1"); + int param2 = (int) data.get("tokensCounterScoring_param2"); + int param3 = (int) data.get("tokensCounterScoring_param3"); + int param4 = (int) data.get("tokensCounterScoring_param4"); + int param5 = (int) data.get("tokensCounterScoring_param5"); + int param6 = (int) data.get("tokensCounterScoring_param6"); + int param7 = (int) data.get("tokensCounterScoring_param7"); + int param8 = (int) data.get("tokensCounterScoring_param8"); + int param9 = (int) data.get("tokensCounterScoring_param9"); + int param10 = (int) data.get("tokensCounterScoring_param10"); + int param11 = (int) data.get("tokensCounterScoring_param11"); + int param12 = (int) data.get("tokensCounterScoring_param12"); + int param13 = (int) data.get("tokensCounterScoring_param13"); + int param14 = (int) data.get("tokensCounterScoring_param14"); + + int param15 = (int) data.get("tokensCounterScoring_param15"); + int param16 = (int) data.get("tokensCounterScoring_param16"); + int param17 = (int) data.get("tokensCounterScoring_param17"); + int param18 = (int) data.get("tokensCounterScoring_param18"); + int param19 = (int) data.get("tokensCounterScoring_param19"); + int param20 = (int) data.get("tokensCounterScoring_param20"); + + if ((tokensCounter1 > param1 && tokensCounter2 > param1) && tokensCounter1 < tokensCounter2 * param2 && tokensCounter2 < tokensCounter1 * param2) { + if (tokensCounter1 > tokensCounter2 / param3 && tokensCounter2 > tokensCounter1 / param3 && tokensCounter1 < param4 && tokensCounter2 < param4) { + score += (tokensCounter1 + tokensCounter2) * param5; + //logger.info("score post score += (tokensCounter1 + tokensCounter2) * 500;" + score); + } else { + score -= param6; + //logger.info("Score post score -= 503; :" + score); + } + } else { + int elseint = (int) (tokensCounter1 >= tokensCounter2 ? (tokensCounter1 - tokensCounter2) * param7 : (tokensCounter2 - tokensCounter1) * param7); + if ((tokensCounter1 > tokensCounter2 * param8 || tokensCounter2 > tokensCounter1 * param8) + && tokensCounter1 > param9 && tokensCounter2 > param9) { + score -= tokensCounter1 > tokensCounter2 ? (tokensCounter1 - tokensCounter2) * param10 : (tokensCounter2 - tokensCounter1) * param10; + //logger.info("Score post score -= tokensCounter1 > tokensCounter2 ? (tokensCounter1 - tokensCounter2) * 500 : (tokensCounter2 - tokensCounter1) * 500; :" + score); + } else if (elseint > param11 && tokensCounter1 > param11 && tokensCounter2 > param11) { + if (elseint > param12 && elseint < param13) { + score += elseint * param14; + //logger.info("Score post score += elseint * 35; :" + score); + } else { + score -= elseint * param15; + //logger.info("Score post score -= elseint * 34; :" + score); + } + } else { + //logger.info("tokensCounter1: " + tokensCounter1); + //logger.info("tokensCounter2 " + tokensCounter2); + //logger.info("elseint: " + elseint); + if (tokensCounter1 == param16 && tokensCounter2 == param17 && elseint == param18) { + score += param19; + ////logger.info("score post : score += 3012;" + score); + } + int param21 = (int) data.get("tokensCounterScoring_param21"); + int param22 = (int) data.get("tokensCounterScoring_param22"); + int param23 = (int) data.get("tokensCounterScoring_param23"); + int param24 = (int) data.get("tokensCounterScoring_param24"); + int param25 = (int) data.get("tokensCounterScoring_param25"); + int param26 = (int) data.get("tokensCounterScoring_param26"); + int param27 = (int) data.get("tokensCounterScoring_param27"); + if (elseint > param20 && elseint < param21 && (tokensCounter1 > param22 || tokensCounter2 > param22) && tokensCounter1 <= param23 + && tokensCounter2 <= param23) { + score += elseint * param24; + //logger.info("Score post score += elseint * 1.5; :" + score); + } else if (elseint == param25 && tokensCounter1 == param25 && tokensCounter2 == param25) { + score += param26; + //logger.info("score post score += 3064; : " + score); + } else if (tokensCounter1 < param27 && tokensCounter2 < param27) { + score -= elseint * (tokensCounter1 + tokensCounter2); + //logger.info("Score post score -= elseint * (tokensCounter1 + tokensCounter2); :" + score); + } + } + } + return score; + } + + private Double nerEntitiesAndTokenScoring(Double score, ArrayList entityTokenTags1, + ArrayList entityTokenTags2, ArrayList nerEntities1, + ArrayList nerEntities2) { + long param1 = (long) data.get("nerEntitiesAndTokenScoring_param1"); + long param2 = (long) data.get("nerEntitiesAndTokenScoring_param2"); + List entsCollection = new ArrayList(); + for (String strEnts1 : nerEntities1) { + for (String strEnts2 : nerEntities2) { + if (strEnts1.equalsIgnoreCase(strEnts2) && !entsCollection.contains(strEnts1)) { + score += param1; + entsCollection.add(strEnts1); + } + } + } + entsCollection = new ArrayList(); + for (String strToken : entityTokenTags1) { + for (String strToken1 : entityTokenTags2) { + if (strToken.equalsIgnoreCase(strToken1) && !entsCollection.contains(strToken)) { + score += param2; + entsCollection.add(strToken); + } + } + } + return score; + } + + public Double stopWordTokenLemmaScoring(Double score, ArrayList stopWordToken1, + ArrayList stopWordToken2, ArrayList stopWordLemma1, + ArrayList stopWordLemma2) { + Collection stopWordCollection = new ArrayList(); + long param1 = (long) data.get("stopWordTokenLemmaScoring_param1"); + long param2 = (long) data.get("stopWordTokenLemmaScoring_param2"); + long param3 = (long) data.get("stopWordTokenLemmaScoring_param3"); + long param4 = (long) data.get("stopWordTokenLemmaScoring_param4"); + long param5 = (long) data.get("stopWordTokenLemmaScoring_param5"); + long param6 = (long) data.get("stopWordTokenLemmaScoring_param6"); + long param7 = (long) data.get("stopWordTokenLemmaScoring_param7"); + long param8 = (long) data.get("stopWordTokenLemmaScoring_param8"); + long param9 = (long) data.get("stopWordTokenLemmaScoring_param9"); + long param10 = (long) data.get("stopWordTokenLemmaScoring_param10"); + long param11 = (long) data.get("stopWordTokenLemmaScoring_param11"); + long param12 = (long) data.get("stopWordTokenLemmaScoring_param12"); + long param13 = (long) data.get("stopWordTokenLemmaScoring_param13"); + long param14 = (long) data.get("stopWordTokenLemmaScoring_param14"); + long param15 = (long) data.get("stopWordTokenLemmaScoring_param15"); + long param16 = (long) data.get("stopWordTokenLemmaScoring_param16"); + //logger.info("stopWordToken1 size: " + stopWordToken1.size()); + //logger.info("stopWordToken2 size: " + stopWordToken2.size()); + //logger.info("stopWordLemma1 size: " + stopWordLemma1.size()); + //logger.info("stopWordLemma2 size: " + stopWordLemma2.size()); + if (stopWordLemma2.size() >= param1) { + score -= param2; + //logger.info("score post score -= 9304"); + } + if (stopWordLemma1.size() == param3 && stopWordLemma2.size() == param3 && stopWordToken1.size() == param4 + && stopWordToken2.size() == param4) { + score += param5; + //logger.info("score post score += 7392;: " + score); + } + for (String stopwords1 : stopWordToken1) { + for (String stopwords2 : stopWordToken2) { + if (stopwords1.equals(stopwords2) && !stopWordCollection.contains(stopwords1)) { + if (stopWordToken1.size() == stopWordToken2.size() && stopWordLemma1.size() == stopWordToken2.size() && + stopWordLemma1.size() == param6 && stopWordToken1.size() == param6) { + score += param7; + //logger.info("score post score += 4103; : " + score); + } else if (stopWordLemma2.size() == param8 && stopWordLemma1.size() == param8 && stopWordToken1.size() == param8) { + score += param9; + //logger.info("score post score += 2345;: " + score); + } else if (stopWordLemma1.size() == param10 && stopWordToken1.size() == param11) { + score -= param12; + //logger.info("score postscore -= 8530;: " + score); + stopWordCollection.add(stopwords1); + } else if ((stopWordLemma1.size() >= param13 || stopWordLemma2.size() >= param13) + && (stopWordToken1.size() >= stopWordLemma1.size() || + stopWordToken2.size() >= stopWordLemma2.size())) { + score -= param14; + //logger.info("score post score -= 8654; :" + score); + //logger.info("stopWordLemma1.size(): " + stopWordLemma1.size()); + //logger.info("stopWordToken1.size(): " + stopWordToken1.size()); + //logger.info("stopWordLemma2.size(): " + stopWordLemma2.size()); + //logger.info("stopWordToken2.size(): " + stopWordToken2.size()); + } else if (stopWordLemma1.size() == param15 && stopWordToken1.size() == param15) { + score += param16; + //logger.info("score post score += 2479; : " + score); + } + } + } + } + long param17 = (long) data.get("stopWordTokenLemmaScoring_param17"); + stopWordCollection = new ArrayList(); + for (String stopwords1 : stopWordLemma1) { + for (String stopwords2 : stopWordLemma2) { + if (stopwords1.equals(stopwords2) && !stopWordCollection.contains(stopwords1)) { + score -= param17; + //logger.info("score post score -= 51; " + score); + stopWordCollection.add(stopwords1); + } + } + } + return score; + } + + private Double stopwordTokenPairCounterScoring(Double score, ArrayList stopWordToken1, + ArrayList stopWordToken2, int pairCounter1, + int pairCounter2) { + if (!stopWordToken1.isEmpty() && !stopWordToken2.isEmpty()) { + int stopwordsize1 = stopWordToken1.size(); + int stopwordsize2 = stopWordToken2.size(); + //logger.info("stopwordsize1: " + stopwordsize1); + //logger.info("stopwordsize2: " + stopwordsize2); + long param1 = (long) data.get("stopwordTokenPairCounterScoring_param1"); + long param2 = (long) data.get("stopwordTokenPairCounterScoring_param2"); + long param3 = (long) data.get("stopwordTokenPairCounterScoring_param3"); + long param4 = (long) data.get("stopwordTokenPairCounterScoring_param4"); + long param5 = (long) data.get("stopwordTokenPairCounterScoring_param5"); + long param6 = (long) data.get("stopwordTokenPairCounterScoring_param6"); + long param7 = (long) data.get("stopwordTokenPairCounterScoring_param7"); + long param8 = (long) data.get("stopwordTokenPairCounterScoring_param8"); + long param9 = (long) data.get("stopwordTokenPairCounterScoring_param9"); + long param10 = (long) data.get("stopwordTokenPairCounterScoring_param10"); + long param11 = (long) data.get("stopwordTokenPairCounterScoring_param11"); + if (stopwordsize1 * param1 < stopwordsize2 || stopwordsize2 * param1 < stopwordsize1) { + score -= stopwordsize1 > stopwordsize2 ? (stopwordsize1 - stopwordsize2) * param2 : (stopwordsize2 - stopwordsize1) * param2; + //logger.info("score post score -= stopwordsize1 > stopwordsize2 ? (stopwordsize1 - stopwordsize2) * 850 : (stopwordsize2 - stopwordsize1) * 850;: " + score); + } else if (stopwordsize1 == param3 && stopwordsize2 == param3) { + score -= param4; + //logger.info("score post score -= 7312;: " + score); + } else if (stopwordsize1 == stopwordsize2 && stopwordsize1 > param5) { + score -= stopwordsize1 * param6; + //logger.info("score post score -= stopwordsize1 * 450;: " + score); + } else if ((stopwordsize1 / param7 == stopwordsize2 || stopwordsize2 / param7 == stopwordsize1) && stopwordsize1 + stopwordsize2 >= param8) { + score -= param9; + //logger.info("score post score -= 2500;: " + score); + } else if (stopwordsize1 == param10 && stopwordsize2 == param10) { + score += param11; + //logger.info("score post score += 4513;: " + score); + } + } + long param12 = (long) data.get("stopwordTokenPairCounterScoring_param12"); + if (pairCounter1 > param12 && pairCounter2 > param12) { + //logger.info("pairCounter1: " + pairCounter1); + //logger.info("pairCounter2: " + pairCounter2); + long param13 = (long) data.get("stopwordTokenPairCounterScoring_param13"); + long param14 = (long) data.get("stopwordTokenPairCounterScoring_param14"); + if (pairCounter1 == param13 && pairCounter2 == param13) { + score -= param14; + //logger.info("score post score -= 2554;: " + score); + } + long param15 = (long) data.get("stopwordTokenPairCounterScoring_param15"); + long param17 = (long) data.get("stopwordTokenPairCounterScoring_param17"); + long param18 = (long) data.get("stopwordTokenPairCounterScoring_param18"); + long param19 = (long) data.get("stopwordTokenPairCounterScoring_param19"); + long param20 = (long) data.get("stopwordTokenPairCounterScoring_param20"); + long param21 = (long) data.get("stopwordTokenPairCounterScoring_param21"); + long param22 = (long) data.get("stopwordTokenPairCounterScoring_param22"); + long param23 = (long) data.get("stopwordTokenPairCounterScoring_param23"); + if (pairCounter1 * param15 <= pairCounter2 || pairCounter2 * param15 <= pairCounter1) { + long param16 = (long) data.get("stopwordTokenPairCounterScoring_param16"); + score -= pairCounter1 > pairCounter2 ? (pairCounter1 - pairCounter2) * param16 : (pairCounter2 - pairCounter1) * param16; + //logger.info("score post score -= pairCounter1 > pairCounter2 ? (pairCounter1 - pairCounter2) * 1500 : (pairCounter2 - pairCounter1) * 1500;: " + score); + } else if (pairCounter1 == pairCounter2 && pairCounter1 > param17) { + score -= pairCounter1 * param18; + //logger.info("score post score -= pairCounter1 * 450; : " + score); + } else if ((pairCounter1 / param19 == pairCounter2 || pairCounter2 / param19 == pairCounter1) && pairCounter1 + pairCounter2 >= param20) { + score -= param21; + //logger.info("score post score -= 2500;: " + score); + } else if (pairCounter1 == param22 && pairCounter2 == param22) { + score -= param23; + //logger.info("score post score -= 3112; " + score); + } + } + return score; + } + + private Double tgwListScoreIncrementer(Double score, ArrayList tgwListIndex1, + ArrayList tgwListIndex2) { + long param1 = (long) data.get("tgwListScoreIncrementer_param1"); + int runCount = 0; + for (String taggedWord : tgwListIndex1) { + boolean found = false; + for (String taggedWord1 : tgwListIndex2) { + if (taggedWord.equals(taggedWord1)) { + found = true; + break; + } + } + if (!found) { + runCount++; + } + } + score += runCount * param1; + return score; + } + + private List> getIMWES(List coreMaps) { + List> tokenList = new ArrayList<>(); + for (CoreMap sentence : coreMaps) { + List> imwes = sentence.get(JMWEAnnotation.class); + tokenList.addAll(imwes); + } + return tokenList; + } + + private int getInflictedCounterPositive(List> imwesFLocal) { + int InflectedCounterPositive = 0; + for (IMWE token : imwesFLocal) { + if (token.isInflected()) { + InflectedCounterPositive++; + } + } + return InflectedCounterPositive; + } + + private int getUnmarkedPatterns(List> imwesFLocal) { + int unmarked = 0; + for (IMWE token : imwesFLocal) { + IMWEDesc entry = token.getEntry(); + unmarked += entry.getUnmarkedPattern(); + } + return unmarked; + } + + private ArrayList gettokenForms(List> imwesFLocal) { + ArrayList arrs = new ArrayList<>(); + for (IMWE token : imwesFLocal) { + String form = token.getForm(); + arrs.add(form); + } + return arrs; + } + + private ArrayList getStrtokenEntryPos(List> imwesFLocal) { + ArrayList arrs = new ArrayList<>(); + for (IMWE token : imwesFLocal) { + IMWEDesc entry = token.getEntry(); + for (String strPostPrefix : entry.getPOS().getPrefixes()) { + arrs.add(strPostPrefix); + } + } + return arrs; + } + + private ArrayList getintTokenEntyCounts(List> imwesFLocal) { + ArrayList arrs = new ArrayList<>(); + for (IMWE token : imwesFLocal) { + IMWEDesc entry = token.getEntry(); + for (int counts : entry.getCounts()) { + arrs.add(counts); + } + } + return arrs; + } + + private ArrayList getITokenTags(List> imwesFLocal) { + ArrayList arrs = new ArrayList<>(); + for (IMWE token : imwesFLocal) { + for (IToken tokens : token.getTokens()) { + arrs.add(tokens.getTag()); + } + } + return arrs; + } + + private ArrayList getstrTokenStems(List> imwesFLocal) { + ArrayList arrs = new ArrayList<>(); + for (IMWE token : imwesFLocal) { + for (IToken tokens : token.getTokens()) { + for (String strtoken : tokens.getStems()) { + arrs.add(strtoken); + } + } + } + return arrs; + } + + private Integer getAnotatorcounter(List> imwesFLocal) { + return imwesFLocal.size(); + } + + private Integer getTokensCounter(List> imwesFLocal){ + int tokenSize = 0; + for(IMWE iTokenIMWE : imwesFLocal) + { + tokenSize += iTokenIMWE.getTokens().size(); + } + return tokenSize; + } + + private ArrayList getnerEntities(CoreDocument coreDocument) { + if (coreDocument == null || coreDocument.entityMentions() == null) { + return new ArrayList(); + } + ArrayList arrs = new ArrayList<>(); + for (CoreEntityMention em : coreDocument.entityMentions()) { + if (!arrs.contains(em.text())) { + arrs.add(em.text()); + } + } + return arrs; + } + + private ArrayList getnerEntitiesType(CoreDocument coreDocument) { + if (coreDocument == null || coreDocument.entityMentions() == null) { + return new ArrayList(); + } + ArrayList arrs = new ArrayList<>(); + for (CoreEntityMention em : coreDocument.entityMentions()) { + if (!arrs.contains(em.entityType())) { + arrs.add(em.entityType()); + } + } + return arrs; + } + + private Integer getPairCounter(Annotation pipelineAnnotationSentiment) { + int counter = 0; + List tokensSentiment = pipelineAnnotationSentiment. + get(CoreAnnotations.TokensAnnotation.class); + for (CoreLabel token : tokensSentiment) { + Pair stopword = token.get(StopwordAnnotator.class); + if (stopword.first() && stopword.second()) { + counter++; + } + } + return counter; + } + + private ArrayList getstopWordLemma(Annotation pipelineAnnotationSentiment) { + ArrayList arrs = new ArrayList<>(); + List tokensSentiment = pipelineAnnotationSentiment. + get(CoreAnnotations.TokensAnnotation.class); + String customStopWordList = "start,starts,period,periods,a,an,and,are,as,at,be,but,by,for," + + "if,in,into,is,it,no,not,of,on,or,such,that,the,their,then,there,these," + + "they,this,to,was,will,with,which,who,www,http,https,com,net,org,up,down,in,out,said," + + "because,although,while,since,until,after,before,over,under,through,between," + + "can,could,would,should,may,might,will"; + for (CoreLabel token : tokensSentiment) { + Set stopWords = StopAnalyzer.ENGLISH_STOP_WORDS_SET; + Set stopWordsCustom = StopwordAnnotator.getStopWordList(customStopWordList, true); + if (token.lemma() != null) { + String lemma = token.lemma().toLowerCase(); + if (stopWords.contains(lemma) || stopWordsCustom.contains(lemma)) { + arrs.add(lemma); + } + } + } + return arrs; + } + + private ArrayList getstopWordToken(Annotation pipelineAnnotationSentiment) { + ArrayList arrs = new ArrayList<>(); + List tokensSentiment = pipelineAnnotationSentiment. + get(CoreAnnotations.TokensAnnotation.class); + String customStopWordList = "start,starts,period,periods,a,an,and,are,as,at,be,but,by,for," + + "if,in,into,is,it,no,not,of,on,or,such,that,the,their,then,there,these," + + "they,this,to,was,will,with"; + for (CoreLabel token : tokensSentiment) { + String word = token.word().toLowerCase(); + Set stopWords = StopAnalyzer.ENGLISH_STOP_WORDS_SET; + Set stopWordsCustom = StopwordAnnotator.getStopWordList(customStopWordList, true); + if (stopWords.contains(word) || stopWordsCustom.contains(word)) { + arrs.add(word); + } + } + return arrs; + } + + private ArrayList getentityTokenTags(CoreDocument coreDocument) { + if (coreDocument == null || coreDocument.entityMentions() == null) { + return new ArrayList(); + } + ArrayList arrs = new ArrayList<>(); + if (coreDocument != null) { + for (CoreEntityMention em : coreDocument.entityMentions()) { + List tokens = em.tokens(); + String entityType = em.entityType(); + Double EntityConfidences = 0.0; + Set> entrySet = em.entityTypeConfidences().entrySet(); + for (Map.Entry entries : entrySet) { + if (EntityConfidences < entries.getValue()) { + EntityConfidences = entries.getValue(); + } + } + for (CoreLabel token : tokens) { + if (token != null) { + if (!arrs.contains(token.tag())) { + if (entityType.equals("PERSON") && EntityConfidences > 0.80) { + arrs.add(token.tag()); + } + } + } + } + } + } + return arrs; + } + + private ArrayList getstrTokensIpartForm(List> imwesFLocal) { + ArrayList arrs = new ArrayList<>(); + for (IMWE token : imwesFLocal) { + Collection values = token.getPartMap().values(); + for (IMWEDesc.IPart iPart : values) { + String iPartForm = iPart.getForm(); + arrs.add(iPartForm); + } + } + return arrs; + } + + private int getMarkedCounter(List> imwesFLocal) { + int marked = 0; + for (IMWE token : imwesFLocal) { + IMWEDesc entry = token.getEntry(); + marked += entry.getMarkedContinuous(); + for (IToken tokens : token.getTokens()) { + marked += tokens.getStems().size(); + } + } + return marked; + } + + public int getInflictedCounterNegative(List> imwesFLocal) { + int InflectedCounterNegative = 0; + Collection> tokeninflectionMap = new ArrayList(); + for (IMWE token : imwesFLocal) { + if (!token.isInflected() && !tokeninflectionMap.contains(token)) { + InflectedCounterNegative++; + tokeninflectionMap.add(token); + } + } + return InflectedCounterNegative; + } + + public ArrayList getTokenEntries(List> imwesFLocal) { + ArrayList tokenStrList = new ArrayList<>(); + for (IMWE token : imwesFLocal) { + final String substring = token.getEntry().toString().substring(token.getEntry() + .toString().length() - 1); + tokenStrList.add(substring); + } + return tokenStrList; + } + + + public void validateStringCaches() { + + Class sentimentAnnotatedTreeClass = + SentimentCoreAnnotations.SentimentAnnotatedTree.class; + + if (this.tokenizeCountingF == null) { + this.tokenizeCountingF = tokenizeCounting(getTaggedWordList(str1)); + } + if (this.tokenizeCounting == null) { + this.tokenizeCounting = tokenizeCounting(getTaggedWordList(str)); + } + if (this.taggedWordListF == null) { + this.taggedWordListF = getTaggedWordList(str); + } + if (this.taggedWordList1 == null) { + this.taggedWordList1 = getTaggedWordList(str1); + } + if (this.retrieveTGWListF == null) { + this.retrieveTGWListF = retrieveTGWListIndex(this.taggedWordListF); + } + if (this.retrieveTGWList1 == null) { + this.retrieveTGWList1 = retrieveTGWListIndex(this.taggedWordList1); + } + if (this.sentencesF == null) { + this.sentencesF = pipelineAnnotation1.get(CoreAnnotations.SentencesAnnotation.class); + } + if (this.sentences1 == null) { + this.sentences1 = pipelineAnnotation2.get(CoreAnnotations.SentencesAnnotation.class); + } + if (this.sentencesSentimentF == null) { + this.sentencesSentimentF = pipelineAnnotation1Sentiment.get(CoreAnnotations.SentencesAnnotation.class); + } + if (this.sentencesSentiment1 == null) { + this.sentencesSentiment1 = pipelineAnnotation2Sentiment.get(CoreAnnotations.SentencesAnnotation.class); + } + if (this.treesF == null) { + this.treesF = retrieveTrees(this.sentencesF); + } + if (this.trees1 == null) { + this.trees1 = retrieveTrees(this.sentences1); + } + if (this.grammaticalStructuresF == null) { + this.grammaticalStructuresF = grammaticalStructureSetup(this.treesF); + } + if (this.grammaticalStructures1 == null) { + this.grammaticalStructures1 = grammaticalStructureSetup(this.trees1); + } + if (this.typedDependenciesF == null) { + this.typedDependenciesF = grammaticalStructureAllTypedDependencies(this.grammaticalStructuresF); + } + if (this.typedDependencies1 == null) { + this.typedDependencies1 = grammaticalStructureAllTypedDependencies(this.grammaticalStructures1); + } + if (this.rnnCoreAnnotationsPredictedF == null) { + this.rnnCoreAnnotationsPredictedF = sentimentRNNCoreAnnotations(this.sentencesSentimentF, + sentimentAnnotatedTreeClass); + } + if (this.rnnCoreAnnotationsPredicted1 == null) { + this.rnnCoreAnnotationsPredicted1 = sentimentRNNCoreAnnotations(this.sentencesSentiment1, + sentimentAnnotatedTreeClass); + } + if (this.simpleMatricesF == null) { + this.simpleMatricesF = sentimentRNNCorePredicted(this.sentencesSentimentF, sentimentAnnotatedTreeClass); + } + if (this.simpleMatrices1 == null) { + this.simpleMatrices1 = sentimentRNNCorePredicted(this.sentencesSentiment1, sentimentAnnotatedTreeClass); + } + if (this.simpleMatricesNodevectorsF == null) { + this.simpleMatricesNodevectorsF = sentimentRNNCoreNodevectors(this.sentencesSentimentF, sentimentAnnotatedTreeClass); + } + if (this.simpleMatricesNodevectors1 == null) { + this.simpleMatricesNodevectors1 = sentimentRNNCoreNodevectors(this.sentencesSentiment1, sentimentAnnotatedTreeClass); + } + if (this.listF == null) { + DocumentReaderAndWriter readerAndWriter = classifier.makePlainTextReaderAndWriter(); + this.listF = classifier.classifyRaw(str, readerAndWriter); + } + if (this.list1 == null) { + DocumentReaderAndWriter readerAndWriter = classifier.makePlainTextReaderAndWriter(); + this.list1 = classifier.classifyRaw(str1, readerAndWriter); + } + if (this.longestF == null) { + this.longestF = setupMainLongest(this.sentencesSentimentF); + } + if (this.longest1 == null) { + this.longest1 = setupMainLongest(this.sentencesSentiment1); + } + if (this.sentimentLongestF == null) { + this.sentimentLongestF = setupMainSentiment(this.sentencesSentimentF, sentimentAnnotatedTreeClass); + } + if (this.sentimentLongest1 == null) { + this.sentimentLongest1 = setupMainSentiment(this.sentencesSentiment1, sentimentAnnotatedTreeClass); + } + if (this.imwesF == null) { + this.imwesF = getIMWES(this.coreMaps1); + } + if (this.imwes1 == null) { + this.imwes1 = getIMWES(this.coreMaps2); + } + if (this.InflectedCounterNegativeF == null) { + this.InflectedCounterNegativeF = getInflictedCounterNegative(this.imwesF); + } + if (this.InflectedCounterNegative1 == null) { + this.InflectedCounterNegative1 = getInflictedCounterNegative(this.imwes1); + } + if (this.InflectedCounterPositiveF == null) { + this.InflectedCounterPositiveF = getInflictedCounterPositive(this.imwesF); + } + if (this.InflectedCounterPositive1 == null) { + this.InflectedCounterPositive1 = getInflictedCounterPositive(this.imwes1); + } + if (this.tokenEntryF == null) { + this.tokenEntryF = getTokenEntries(this.imwesF); + } + if (this.tokenEntry1 == null) { + this.tokenEntry1 = getTokenEntries(this.imwes1); + } + if (this.MarkedContinuousCounterF == null) { + this.MarkedContinuousCounterF = getMarkedCounter(this.imwesF); + } + if (this.MarkedContinuousCounter1 == null) { + this.MarkedContinuousCounter1 = getMarkedCounter(this.imwes1); + } + if (this.UnmarkedPatternCounterF == null) { + this.UnmarkedPatternCounterF = getUnmarkedPatterns(this.imwesF); + } + if (this.UnmarkedPatternCounter1 == null) { + this.UnmarkedPatternCounter1 = getUnmarkedPatterns(this.imwes1); + } + if (this.strTokensIpartFormF == null) { + this.strTokensIpartFormF = getstrTokensIpartForm(this.imwesF); + } + if (this.strTokensIpartForm1 == null) { + this.strTokensIpartForm1 = getstrTokensIpartForm(this.imwes1); + } + if (this.tokenFormsF == null) { + this.tokenFormsF = gettokenForms(this.imwesF); + } + if (this.tokenForms1 == null) { + this.tokenForms1 = gettokenForms(this.imwes1); + } + if (this.strTokenEntryGetPOSF == null) { + this.strTokenEntryGetPOSF = getStrtokenEntryPos(this.imwesF); + } + if (this.strTokenEntryGetPOS1 == null) { + this.strTokenEntryGetPOS1 = getStrtokenEntryPos(this.imwes1); + } + if (this.intTokenEntyCountsF == null) { + this.intTokenEntyCountsF = getintTokenEntyCounts(this.imwesF); + } + if (this.intTokenEntyCounts1 == null) { + this.intTokenEntyCounts1 = getintTokenEntyCounts(this.imwes1); + } + if (this.ITokenTagsF == null) { + this.ITokenTagsF = getITokenTags(this.imwesF); + } + if (this.ITokenTags1 == null) { + this.ITokenTags1 = getITokenTags(this.imwes1); + } + if (this.strTokenStemsF == null) { + this.strTokenStemsF = getstrTokenStems(this.imwesF); + } + if (this.strTokenStems1 == null) { + this.strTokenStems1 = getstrTokenStems(this.imwes1); + } + if (this.AnotatorcounterF == null) { + this.AnotatorcounterF = getAnotatorcounter(this.imwesF); + } + if (this.Anotatorcounter1 == null) { + this.Anotatorcounter1 = getAnotatorcounter(this.imwes1); + } + if (this.TokensCounterF == null) { + this.TokensCounterF = getTokensCounter(this.imwesF); + } + if (this.TokensCounter1 == null) { + this.TokensCounter1 = getTokensCounter(this.imwes1); + } + if (this.entityTokenTagsF == null) { + this.entityTokenTagsF = getentityTokenTags(this.pipelineCoreDcoument1); + } + if (this.entityTokenTags1 == null) { + this.entityTokenTags1 = getentityTokenTags(this.pipelineCoreDcoument2); + } + if (this.nerEntitiesF == null) { + this.nerEntitiesF = getnerEntities(this.pipelineCoreDcoument1); + } + if (this.nerEntities1 == null) { + this.nerEntities1 = getnerEntities(this.pipelineCoreDcoument2); + } + if (this.nerEntitiesTypeF == null) { + this.nerEntitiesTypeF = getnerEntitiesType(this.pipelineCoreDcoument1); + } + if (this.nerEntitiesType1 == null) { + this.nerEntitiesType1 = getnerEntitiesType(this.pipelineCoreDcoument2); + } + if (this.stopWordTokenF == null) { + this.stopWordTokenF = getstopWordToken(this.pipelineAnnotation1Sentiment); + } + if (this.stopWordToken1 == null) { + this.stopWordToken1 = getstopWordToken(this.pipelineAnnotation2Sentiment); + } + if (this.stopWordLemmaF == null) { + this.stopWordLemmaF = getstopWordLemma(this.pipelineAnnotation1Sentiment); + } + if (this.stopWordLemma1 == null) { + this.stopWordLemma1 = getstopWordLemma(this.pipelineAnnotation2Sentiment); + } + if (this.PairCounterF == null) { + this.PairCounterF = getPairCounter(this.pipelineAnnotation1Sentiment); + } + if (this.PairCounter1 == null) { + this.PairCounter1 = getPairCounter(this.pipelineAnnotation2Sentiment); + } + } + + + public Double callSMX() { + + Double score = 0.0; + + /* + try { + int index = new Random().nextInt(9); + fh = new FileHandler("E:/stationær backup filer/Projects/ArtificialAutism_intellij/logs/autismlog-" + index); + //logger.addHandler(fh); + SimpleFormatter formatter = new SimpleFormatter(); + fh.setFormatter(formatter); + } catch (SecurityException e) { + e.printStackTrace(); + } catch (IOException e) { + e.printStackTrace(); + } */ + + //logger.info("Sent1: " + str); + //logger.info("Sent2: " + str1); + int counter1; + int counter2; + validateStringCaches(); + counter1 = this.tokenizeCountingF; + counter2 = this.tokenizeCounting; + long overValue_param1 = (long) data.get("overValue_param1"); + final int overValue = (int) ((counter1 >= counter2 ? counter1 - counter2 : counter2 - counter1) * overValue_param1); + score -= overValue; + //logger.info("score post overValue: " + score); + score = tgwListScoreIncrementer(score, this.retrieveTGWListF, this.retrieveTGWList1); + //logger.info("score post tgwListScoreIncrementer: " + score); + Class sentimentAnnotatedTreeClass = + SentimentCoreAnnotations.SentimentAnnotatedTree.class; + + score = iterateTrees(this.treesF, this.trees1, score); + //logger.info("\n\n \n \n \nscore post iterateTrees: " + score); + score = typeDependenciesGrammaticalRelation(this.typedDependenciesF, typedDependencies1, score, this.grammaticalStructuresF, this.grammaticalStructures1, + this.treesF, this.trees1); + //logger.info("score post typeDependenciesGrammaticalRelation: " + score); + + score = simpleRNNMatrixCalculations(score, this.simpleMatricesF, this.simpleMatrices1); + //logger.info("score post simpleRNNMatrixCalculations: " + score); + score = simpleRNNMaxtrixVectors(score, this.simpleMatricesNodevectorsF, this.simpleMatricesNodevectors1); + //logger.info("score post simpleRNNMaxtrixVectors: " + score); + Integer sentiment1 = this.rnnCoreAnnotationsPredictedF.size(); + Integer sentiment2 = this.rnnCoreAnnotationsPredicted1.size(); + + long param1_sentiment = (long) data.get("sentiment1_param1"); + + score -= (sentiment1 > sentiment2 ? sentiment2 - sentiment1 : sentiment1 - sentiment2) * param1_sentiment; + //logger.info("score post rnnCoreAnnotationsPredicted " + score); + int scoreClassifyraw = classifyRawEvaluation(); + //logger.info("scoreClassifyraw: " + scoreClassifyraw); + score -= scoreClassifyraw; + //logger.info("score post classifyRawEvaluation " + score); + score = sentimentMatrixVariances(score, this.longestF, this.longest1, this.sentimentLongestF, this.sentimentLongest1); + //logger.info("score post sentimentMatrixVariances " + score); + score = entryCountsRelation(score, this.intTokenEntyCountsF, this.intTokenEntyCounts1); + //logger.info("score post entryCountsRelation " + score); + score = entryCountsScoring(score, this.intTokenEntyCountsF, this.intTokenEntyCounts1); + //logger.info("score post entryCountsScoring " + score); + score = tokenEntryPosScoring(score, this.strTokenEntryGetPOSF, this.strTokenEntryGetPOS1); + //logger.info("score post tokenEntryPosScoring " + score); + score = unmarkedPatternCounterScoring(score, this.UnmarkedPatternCounterF, + this.UnmarkedPatternCounter1); + //logger.info("score post unmarkedPatternCounterScoring: " + score); + score = markedContiniousCounterScoring(score, this.MarkedContinuousCounterF, + this.MarkedContinuousCounter1); + //logger.info("score post markedContiniousCounterScoring " + score); + score = strTokensMapScoring(score, this.strTokensIpartFormF, this.strTokensIpartForm1); + //logger.info("score post strTokensMapScoring " + score); + score = strTokenEntryScoring(score, this.tokenEntryF, this.tokenEntry1); + //logger.info("score post strTokenEntryScoring " + score); + score = strTokenMapTagsScoring(score, this.ITokenTagsF, this.ITokenTags1); + //logger.info("score post strTokenMapTagsScoring " + score); + score = tokenformSizeScoring(score, this.tokenFormsF, this.tokenForms1); + //logger.info("score post tokenformSizeScoring " + score); + score = tokenStemmingMapScoring(score, this.strTokenStemsF, this.strTokenStems1); + //logger.info("score post tokenStemmingMapScoring " + score); + + score = inflectedCounterScoring(score, this.InflectedCounterPositiveF, this.InflectedCounterPositive1, + this.InflectedCounterNegativeF, this.InflectedCounterNegative1); + //logger.info("score post inflectedCounterScoring " + score); + score = annotatorCountScoring(score, this.AnotatorcounterF, this.Anotatorcounter1); + //logger.info("score post annotatorCountScoring " + score); + score = tokensCounterScoring(score, this.TokensCounterF, this.TokensCounter1); + //logger.info("score post tokensCounterScoring " + score); + LevenshteinDistance leven = new LevenshteinDistance(str, str1); + double SentenceScoreDiff = leven.computeLevenshteinDistance(); + long param1 = (long) data.get("SentenceScoreDiff_param1"); + SentenceScoreDiff *= param1; + score -= SentenceScoreDiff; + //logger.info("score post SentenceScoreDiff " + score); + score = nerEntitiesAndTokenScoring(score, this.entityTokenTagsF, this.entityTokenTags1, + this.nerEntitiesF, this.nerEntities1); + //logger.info("score post nerEntitiesAndTokenScoring " + score); + score = stopWordTokenLemmaScoring(score, this.stopWordTokenF, this.stopWordToken1, + this.stopWordLemmaF, this.stopWordLemma1); + //logger.info("score post stopWordTokenLemmaScoring " + score); + score = stopwordTokenPairCounterScoring(score, this.stopWordTokenF, this.stopWordToken1, + this.PairCounterF, this.PairCounter1); + //logger.info("score post stopwordTokenPairCounterScoring " + score); + + return score; + + } +} diff --git a/src/main/java/FunctionLayer/StopwordAnnotator.java b/src/main/java/FunctionLayer/StopwordAnnotator.java new file mode 100644 index 0000000..b6df578 --- /dev/null +++ b/src/main/java/FunctionLayer/StopwordAnnotator.java @@ -0,0 +1,108 @@ +package FunctionLayer; + +import java.util.Collections; +import java.util.List; +import java.util.Properties; +import java.util.Set; +import java.util.Arrays; + +import edu.stanford.nlp.ling.CoreAnnotation; +import edu.stanford.nlp.pipeline.Annotator; +import org.apache.lucene.analysis.CharArraySet; +import org.apache.lucene.analysis.core.StopAnalyzer; + +import edu.stanford.nlp.ling.*; +import edu.stanford.nlp.pipeline.Annotation; +import edu.stanford.nlp.util.Pair; +import edu.stanford.nlp.util.ArraySet; + +/** + * CoreNlp Annotator that checks if in coming token is a stopword + * + * @author John Conwell + * @author Paul Landes + */ +public class StopwordAnnotator implements Annotator, CoreAnnotation> { + + /** + * stopword annotator class name used in annotators property + */ + public static final String ANNOTATOR_CLASS = "stopword"; + + /** + * Property key to specify the comma delimited list of custom stopwords + */ + public static final String STOPWORDS_LIST = "stopword-list"; + + /** + * Property key to specify if stopword list is case insensitive + */ + public static final String IGNORE_STOPWORD_CASE = "ignore-stopword-case"; + + private static Class boolPair = Pair.makePair(true, true).getClass(); + + private Properties props; + private CharArraySet stopwords; + + public StopwordAnnotator() { + this(new Properties()); + } + + public StopwordAnnotator(String notUsed, Properties props) { + this(props); + } + + public StopwordAnnotator(Properties props) { + this.props = props; + if (this.props.containsKey(STOPWORDS_LIST)) { + String stopwordList = props.getProperty(STOPWORDS_LIST); + boolean ignoreCase = Boolean.parseBoolean(props.getProperty(IGNORE_STOPWORD_CASE, "false")); + this.stopwords = getStopWordList(stopwordList, ignoreCase); + } else { + this.stopwords = (CharArraySet) StopAnalyzer.ENGLISH_STOP_WORDS_SET; + } + } + + @Override + public void annotate(Annotation annotation) { + if (stopwords != null && stopwords.size() > 0 && annotation.containsKey(CoreAnnotations.TokensAnnotation.class)) { + List tokens = annotation.get(CoreAnnotations.TokensAnnotation.class); + for (CoreLabel token : tokens) { + boolean isWordStopword = stopwords.contains(token.word().toLowerCase()); + boolean isLemmaStopword = stopwords.contains(token.lemma().toLowerCase()); + Pair pair = Pair.makePair(isWordStopword, isLemmaStopword); + token.set(StopwordAnnotator.class, pair); + } + } + } + + @Override + public Set> requirementsSatisfied() { + return Collections.singleton(StopwordAnnotator.class); + } + + @Override + public Set> requires() { + return Collections.unmodifiableSet(new ArraySet<>(Arrays.asList( + CoreAnnotations.TextAnnotation.class, + CoreAnnotations.TokensAnnotation.class, + CoreAnnotations.LemmaAnnotation.class, + CoreAnnotations.PartOfSpeechAnnotation.class + ))); + } + + @Override + @SuppressWarnings("unchecked") + public Class> getType() { + return (Class>) boolPair; + } + + public static CharArraySet getStopWordList(String stopwordList, boolean ignoreCase) { + String[] terms = stopwordList.split(","); + CharArraySet stopwordSet = new CharArraySet(terms.length, ignoreCase); + for (String term : terms) { + stopwordSet.add(term); + } + return CharArraySet.unmodifiableSet(stopwordSet); + } +} diff --git a/src/main/java/META-INF/MANIFEST.MF b/src/main/java/META-INF/MANIFEST.MF new file mode 100644 index 0000000..3de0fd0 --- /dev/null +++ b/src/main/java/META-INF/MANIFEST.MF @@ -0,0 +1,3 @@ +Manifest-Version: 1.0 +Main-Class: PresentationLayer.DiscordHandler + diff --git a/src/main/java/PresentationLayer/DiscordHandler.java b/src/main/java/PresentationLayer/DiscordHandler.java new file mode 100644 index 0000000..69dd5f6 --- /dev/null +++ b/src/main/java/PresentationLayer/DiscordHandler.java @@ -0,0 +1,114 @@ +package PresentationLayer; + +import DataLayer.RunnerClient; +import DataLayer.ThreadClient; +import DataLayer.settings; +import FunctionLayer.Datahandler; +import FunctionLayer.PipelineJMWESingleton; +import edu.stanford.nlp.pipeline.StanfordCoreNLP; +import discord4j.core.event.domain.lifecycle.ReadyEvent; +import discord4j.core.event.domain.message.MessageCreateEvent; +import discord4j.core.object.entity.Message; +import net.dv8tion.jda.api.JDABuilder; +import net.dv8tion.jda.api.entities.Activity; +import net.dv8tion.jda.api.entities.Member; +import net.dv8tion.jda.api.events.message.MessageReceivedEvent; +import net.dv8tion.jda.api.hooks.ListenerAdapter; +import net.dv8tion.jda.api.requests.GatewayIntent; + +import javax.security.auth.login.LoginException; +import java.io.FileInputStream; +import java.io.FileNotFoundException; +import java.io.IOException; +import java.io.UnsupportedEncodingException; +import java.net.*; +import java.sql.SQLException; +import java.util.ArrayList; +import java.util.List; +import java.util.Properties; +import java.util.concurrent.Executors; + + +/** + * @author install1 + */ +public class DiscordHandler extends ListenerAdapter { + private static StanfordCoreNLP stanfordCoreNLP; + private static Datahandler datahandler; + private static StanfordCoreNLP stanfordCoreNLPSentiment; + + public static void main(String[] args) { + datahandler = new Datahandler(); + PipelineJMWESingleton.getINSTANCE(); + stanfordCoreNLP = datahandler.pipeLineSetUp(); + stanfordCoreNLPSentiment = datahandler.shiftReduceParserInitiate(); + + System.out.println("FINISHED ALL ANNOTATIONS"); + String strF = datahandler.trimString("abcdef"); + datahandler.getResponseFutures(strF, stanfordCoreNLP, stanfordCoreNLPSentiment); + Properties prop = new Properties(); + String fileName = "app.config"; + try (FileInputStream fis = new FileInputStream(fileName)) { + prop.load(fis); + } catch (FileNotFoundException ex) { + } catch (IOException ex) { + } + String token = prop.getProperty("app.discordtoken"); + + JDABuilder.createLight(token, GatewayIntent.GUILD_MESSAGES, GatewayIntent.DIRECT_MESSAGES) + .addEventListeners(new DiscordHandler()) + .setActivity(Activity.playing("Being the autism bot")) + .build(); + new ThreadClient(datahandler, stanfordCoreNLP, stanfordCoreNLPSentiment); + } + + + @Override + public void onMessageReceived(MessageReceivedEvent event) { + String content = event.getMessage().getContentRaw(); + String username = event.getMessage().getAuthor().getName(); + List mentionedMembers = event.getMessage().getMentions().getMembers(); + //List mentionedMembers = event.getMessage().getMentionedMembers(); + for (Member member : mentionedMembers) { + content = content.replace(member.getId(), ""); + } + if (username != null && !event.getAuthor().isBot() && !content.isEmpty() + && event.getMessage().getCategory() != null) { + String channelName = event.getMessage().getChannel().getName().toLowerCase(); + boolean channelpermissionsDenied = false; + if (channelName.contains("suggestion-box")) { + channelpermissionsDenied = true; + } + String categoryName = event.getMessage().getCategory().getName().toLowerCase(); + switch (categoryName) { + case "public area": + case "information area": { + break; + } + default: { + channelpermissionsDenied = true; + break; + } + } + if (!channelpermissionsDenied) { + boolean mentionedBot = false; + if (mentionedMembers != null) { + for (Member member : mentionedMembers) { + if (member.getEffectiveName().equals(event.getJDA().getSelfUser().getName())) { + mentionedBot = true; + break; + } + } + } + final String contentF = content; + try { + new RunnerClient(contentF, mentionedBot, channelName, datahandler, stanfordCoreNLP, stanfordCoreNLPSentiment, + event, username); + } catch (Exception e) { + e.printStackTrace(); + System.exit(1); + } + } + } + } +} diff --git a/src/test/java/TestJunit.java b/src/test/java/TestJunit.java new file mode 100644 index 0000000..35dddc9 --- /dev/null +++ b/src/test/java/TestJunit.java @@ -0,0 +1,560 @@ +import DataLayer.DataMapper; +import DataLayer.testClasses; +import FunctionLayer.Datahandler; +import FunctionLayer.PipelineJMWESingleton; +import FunctionLayer.StanfordParser.SentimentAnalyzerTestDynamicTesting; +import edu.mit.jmwe.data.IMWE; +import edu.mit.jmwe.data.IToken; +import edu.stanford.nlp.ie.AbstractSequenceClassifier; +import edu.stanford.nlp.ie.crf.CRFClassifier; +import edu.stanford.nlp.ling.CoreAnnotations; +import edu.stanford.nlp.ling.CoreLabel; +import edu.stanford.nlp.ling.TaggedWord; +import edu.stanford.nlp.parser.lexparser.LexicalizedParser; +import edu.stanford.nlp.pipeline.Annotation; +import edu.stanford.nlp.pipeline.CoreDocument; +import edu.stanford.nlp.pipeline.StanfordCoreNLP; +import edu.stanford.nlp.tagger.maxent.MaxentTagger; +import edu.stanford.nlp.trees.*; +import edu.stanford.nlp.util.CoreMap; +import org.ejml.simple.SimpleMatrix; +import org.json.simple.JSONObject; +import org.json.simple.parser.JSONParser; +import org.json.simple.parser.ParseException; +import org.junit.Assert; +//import org.junit.Test; +import org.junit.jupiter.api.Test; + +import java.io.FileReader; +import java.io.IOException; +import java.text.DecimalFormat; +import java.text.DecimalFormatSymbols; +import java.util.*; + +public class TestJunit { + + private String taggerPath = "edu/stanford/nlp/models/pos-tagger/english-left3words-distsim.tagger"; + private MaxentTagger tagger = new MaxentTagger(taggerPath); + private GrammaticalStructureFactory gsf = initiateGrammaticalStructureFactory(); + + private JSONParser parser = new JSONParser(); + private JSONObject dataFromJson = null; + + String nerModel = "edu/stanford/nlp/models/ner/english.all.3class.distsim.crf.ser.gz"; + AbstractSequenceClassifier classifier = CRFClassifier. + getClassifierNoExceptions(nerModel); + + public GrammaticalStructureFactory initiateGrammaticalStructureFactory() { + String lexParserEnglishPCFG = "edu/stanford/nlp/models/lexparser/englishPCFG.ser.gz"; + LexicalizedParser lp = LexicalizedParser. + loadModel(lexParserEnglishPCFG, "-maxLength", "100"); + TreebankLanguagePack langpack = lp.getOp().langpack(); + return langpack.grammaticalStructureFactory(); + } + + public Double testCall(testClasses testClass, StanfordCoreNLP stanfordCoreNLP, StanfordCoreNLP stanfordCoreNLPSentiment, JSONObject dataRandom, + String tableTestType, boolean testAll, int indexCounter) { + String sent1 = testClass.getSentence1(); + String sent2 = testClass.getSentence2(); + + //System.out.println("sent1: " + sent1); + //System.out.println("sent2: " + sent2); + + ArrayList concurrentRelations = new ArrayList(); + Annotation jmweAnnotationF = PipelineJMWESingleton.INSTANCE.getJMWEAnnotation(sent1); + Annotation jmweAnnotation = PipelineJMWESingleton.INSTANCE.getJMWEAnnotation(sent2); + + Integer tokenizeCountingF = null; + List> taggedWordListF = null; + List> taggedWordList1 = null; + ArrayList retrieveTGWListF = null; + java.util.ArrayList retrieveTGWList1 = null; + List sentencesF = null; + List sentence1 = null; + List sentencesSentimentF = null; + List sentenceSentiment1 = null; + List coreMaps1 = jmweAnnotationF.get(CoreAnnotations.SentencesAnnotation.class); + ArrayList treesF = null; + ArrayList trees1 = null; + ArrayList grammaticalStructuresF = null; + ArrayList grammaticalStructures1 = null; + ArrayList typedDependenciesF = null; + ArrayList rnnCoreAnnotationsPredictedF = null; + ArrayList simpleMatricesF = null; + ArrayList simpleMatricesNodevectorsF = null; + ArrayList listF = null; + Integer longestF = null; + Integer sentimentLongestF = null; + List> imwesF = null; + Integer InflectedCounterNegativeF = null; + Integer InflectedCounterPositiveF = null; + ArrayList tokenEntryF = null; + Integer MarkedContinuousCounterF = null; + Integer UnmarkedPatternCounterF = null; + ArrayList strTokensIpartFormF = null; + ArrayList tokenFormsF = null; + ArrayList strTokenEntryGetPOSF = null; + ArrayList intTokenEntyCountsF = null; + ArrayList ITokenTagsF = null; + ArrayList strTokenStemsF = null; + Integer AnotatorcounterF = null; + Integer TokensCounterF = null; + ArrayList entityTokenTagsF = null; + ArrayList nerEntitiesF = null; + ArrayList nerEntitiesTypeF = null; + ArrayList stopWordTokenF = null; + ArrayList stopWordLemmaF = null; + Integer PairCounterF = null; + + java.util.ArrayList typedDependencies1 = null; + ArrayList rnnCoreAnnotationsPredicted1 = null; + ArrayList simpleMatrices1 = null; + ArrayList simpleMatricesNodevectors1 = null; + List list1 = null; + Integer longest1 = null; + Integer sentimentLongest1 = null; + List> imwes1 = null; + Integer InflectedCounterNegative1 = null; + Integer InflectedCounterPositive1 = null; + ArrayList tokenEntry1 = null; + Integer MarkedContinuousCounter1 = null; + Integer UnmarkedPatternCounter1 = null; + ArrayList strTokensIpartForm1 = null; + ArrayList tokenForms1 = null; + ArrayList strTokenEntryGetPOS1 = null; + ArrayList intTokenEntyCounts1 = null; + ArrayList ITokenTags1 = null; + ArrayList strTokenStems1 = null; + Integer Anotatorcounter1 = null; + Integer TokensCounter1 = null; + ArrayList entityTokenTags1 = null; + ArrayList nerEntities1 = null; + ArrayList nerEntitiesType1 = null; + ArrayList stopWordToken1 = null; + ArrayList stopWordLemma1 = null; + Integer PairCounter1 = null; + List coreMaps2 = jmweAnnotation.get(CoreAnnotations.SentencesAnnotation.class); + + + + Annotation strAnno = new Annotation(sent1); + strAnno.compact(); + stanfordCoreNLP.annotate(strAnno); + + Annotation strAnno2 = new Annotation(sent2); + strAnno2.compact(); + stanfordCoreNLP.annotate(strAnno2); + + + + Annotation strAnnoSentiment = new Annotation(sent1); + strAnnoSentiment.compact(); + stanfordCoreNLPSentiment.annotate(strAnnoSentiment); + + Annotation strAnnoSentimen2 = new Annotation(sent2); + strAnnoSentimen2.compact(); + stanfordCoreNLPSentiment.annotate(strAnnoSentimen2); + + + + Annotation annotation = new Annotation(sent1); + stanfordCoreNLP.annotate(annotation); + CoreDocument coreDocument = new CoreDocument(annotation); + + annotation = new Annotation(sent2); + stanfordCoreNLP.annotate(annotation); + CoreDocument coreDocument1 = new CoreDocument(annotation); + + + Integer tokenizeCounting = null; + + SentimentAnalyzerTestDynamicTesting sentimentAnalyzerTest = new SentimentAnalyzerTestDynamicTesting(sent1, sent2, + coreMaps1, coreMaps2, strAnno, + strAnno2, strAnnoSentiment, + strAnnoSentimen2, coreDocument, + coreDocument1, + tagger, gsf, classifier, tokenizeCounting, tokenizeCountingF, + taggedWordListF, taggedWordList1, retrieveTGWListF, retrieveTGWList1, + sentencesF, sentence1, sentencesSentimentF, sentenceSentiment1, treesF, trees1, + grammaticalStructuresF, grammaticalStructures1, typedDependenciesF, + typedDependencies1, rnnCoreAnnotationsPredictedF, rnnCoreAnnotationsPredicted1, + simpleMatricesF, simpleMatrices1, simpleMatricesNodevectorsF, simpleMatricesNodevectors1, + listF, list1, longestF, longest1, sentimentLongestF, sentimentLongest1, imwesF, + imwes1, InflectedCounterNegativeF, InflectedCounterNegative1, InflectedCounterPositiveF, + InflectedCounterPositive1, tokenEntryF, tokenEntry1, MarkedContinuousCounterF, + MarkedContinuousCounter1, UnmarkedPatternCounterF, UnmarkedPatternCounter1, + strTokensIpartFormF, strTokensIpartForm1, tokenFormsF, tokenForms1, + strTokenEntryGetPOSF, strTokenEntryGetPOS1, intTokenEntyCountsF, + intTokenEntyCounts1, ITokenTagsF, ITokenTags1, strTokenStemsF, strTokenStems1, + AnotatorcounterF, Anotatorcounter1, TokensCounterF, TokensCounter1, + entityTokenTagsF, entityTokenTags1, nerEntitiesF, nerEntities1, nerEntitiesTypeF, + nerEntitiesType1, stopWordTokenF, stopWordToken1, stopWordLemmaF, stopWordLemma1, + PairCounterF, PairCounter1, dataRandom, !testAll); //testAll = True -> testingfunction = false + + if (testAll) { + Double score = sentimentAnalyzerTest.callSMX(); + return score; + } + + //Just do all the fucking rest, put them in small individual tables, get each function to pass. use that result. see if it can pass combined afterwards. + Double score = 0.0; + //there should be no null values cause calling validateStringCaches(). + if (tableTestType.equals("tokensCounterScoring")) { + score = sentimentAnalyzerTest.tokensCounterScoring(0.0, sentimentAnalyzerTest.getTokensCounterF(), sentimentAnalyzerTest.getTokensCounter1()); + } + else if (tableTestType.equals("stopWordTokenLemmaScoring")) + { + score = sentimentAnalyzerTest.stopWordTokenLemmaScoring(0.0, sentimentAnalyzerTest.getStopWordTokenF(), sentimentAnalyzerTest.getStopWordToken1(), + sentimentAnalyzerTest.getStopWordLemmaF(), sentimentAnalyzerTest.getStopWordLemma1()); + } + else if (tableTestType.equals("simpleRNNMaxtrixVectors")) + { + //System.out.println("sentimentAnalyzerTest.getSimpleMatricesNodevectorsF(): " + sentimentAnalyzerTest.getSimpleMatricesNodevectorsF()); + //System.out.println("sentimentAnalyzerTest.getSimpleMatricesNodevectors1(): " + sentimentAnalyzerTest.getSimpleMatricesNodevectors1()); + score = sentimentAnalyzerTest.simpleRNNMaxtrixVectors(score, sentimentAnalyzerTest.getSimpleMatricesNodevectorsF(), sentimentAnalyzerTest.getSimpleMatricesNodevectors1()); + } + + return score; + } + + //@RetryingTest(500) + @Test + public void testScoring() { + Set randomDataSet = null; + + //is only used when testing specific function, not used when testing all. + //String testCaseFunction = "tokensCounterScoring"; + //String testCaseFunction = "stopWordTokenLemmaScoring"; + String testCaseFunction = "simpleRNNMaxtrixVectors"; + boolean testAll = false; //put to false when testing a specific function instead. + if (!testAll) { + Set jsonObjects = DataMapper.pickHighestProgression(testCaseFunction); + if (jsonObjects.isEmpty()) { + //we start from the bottom with random data. + LoadDataFromJson(); + randomDataSet = ModifyDataRandomly(testCaseFunction); + } + else { + //we continue from the last highest iteration. + randomDataSet = miscData(jsonObjects, testCaseFunction); + } + DataMapper.WriteDataToSpecificFunction(randomDataSet, testCaseFunction); + randomDataSet = DataMapper.SelectRandomDataNotProcessed(testCaseFunction); + } + else { + //select 1 row from all tables where passed_all_test_cases. comment out retryingtest when doing this here. + } + + Datahandler datahandler = new Datahandler(); + PipelineJMWESingleton.getINSTANCE(); + StanfordCoreNLP stanfordCoreNLP = datahandler.pipeLineSetUp(); + StanfordCoreNLP stanfordCoreNLPSentiment = datahandler.shiftReduceParserInitiate(); + + //running all tests + List testClassesListAll = DataMapper.GetAllTestsCases(); + List testClassesSpecificFunction = DataMapper.GetFunctionTestCases(); + + boolean passedtests = false; + int counter = 0; + + int max_index_counter_tests_passed = DataMapper.get_index_counter_tests_passed(testCaseFunction, 0); + + String comperator_for_score_for_failing_testcase = DataMapper.getMaxIndexComparator(max_index_counter_tests_passed); + Double bestScore = DataMapper.getbestScoreFormax_index_counter_tests_passed(max_index_counter_tests_passed, testCaseFunction, comperator_for_score_for_failing_testcase); + + + for (JSONObject dataRandom : randomDataSet) { + if (counter % 100 == 0) + { + System.out.println("counter: " + counter + "/" + randomDataSet.size()); + } + if (!testAll) + { + //here we run tests for individual functions. + if (passedAllTests(dataRandom, testClassesSpecificFunction, stanfordCoreNLP, stanfordCoreNLPSentiment, testCaseFunction, testAll, + max_index_counter_tests_passed)) { + DataMapper.MarkSuccessfullFunctionData(testCaseFunction, (Integer) dataRandom.get("rowid")); + System.out.println("passed all."); + passedtests = true; + break; + } + } + else { + //just curious if it might pass all original test cases afterwards. + if (passedAllTests(dataRandom, testClassesListAll, stanfordCoreNLP, stanfordCoreNLPSentiment, testCaseFunction, testAll, + max_index_counter_tests_passed)) { + passedtests = true; + } + } + counter++; + } + if (!testAll) { + DataMapper.UpdateProcessed(testCaseFunction, randomDataSet, max_index_counter_tests_passed, bestScore, comperator_for_score_for_failing_testcase); + DataMapper.UpdateModifiedKeyForRowID(randomDataSet, testCaseFunction); + } + if (!passedtests){ + Assert.assertTrue(1 > 800.0); + } + } + + private boolean passedAllTests(JSONObject dataRandom, List testClassesList, StanfordCoreNLP stanfordCoreNLP, StanfordCoreNLP stanfordCoreNLPSentiment, + String tableTestType, boolean testAll, int max_index_counter_tests_passed) { + int indexCounter = 0; + dataRandom.put("index_counter_tests_passed", indexCounter); //indicating how far a run got. + for (testClasses testClass : testClassesList) + { + Double PerformTesting = testCall(testClass, stanfordCoreNLP, stanfordCoreNLPSentiment, dataRandom, tableTestType, testAll, indexCounter); + if (PerformTesting.isNaN() || PerformTesting == null) + { + dataRandom.put("index_counter_tests_passed", 0); // we put it to 0 to indicate that it completely fucked up and is no useable. + return false; + } + /* + if (indexCounter > 46 && PerformTesting < 120.0) + { + System.out.println("indexCounter: " + indexCounter); + System.out.println("str1: " + testClass.getSentence1()); + System.out.println("str2: " + testClass.getSentence2()); + System.out.println("getScore: " + testClass.getScore()); + System.out.println("getComparator: " + testClass.getComparator()); + System.out.println("PerformTesting: " + PerformTesting); + } + */ + //update score when we hit the failing test case or were to surpass it. + if (indexCounter >= max_index_counter_tests_passed) + { + dataRandom.put("failed_testcase_score", PerformTesting); + } + testClassesList.get(indexCounter).setResultScore(PerformTesting); + //perfomrfittingless is only done when running the full test case, not individual functions. + if (testClass.isPerformTestingFittingLess()) + { + testClasses testClassPerformFittingBetter = testClassesList.get(indexCounter - 1); + if (testClass.getComparator().equals("<")) { + //passing this condition always requires the current score to give a worse result than the previous one because only one of the sentences is replaced and + //is meant to act inferior compared to the previous sentence used instead. + if (PerformTesting.intValue() > testClassPerformFittingBetter.getResultScore()) { + return false; + } + } + else if (testClass.getComparator().equals(">")) { + //passing this condition always requires the current score to give a better result than the previous + if (PerformTesting.intValue() < testClassPerformFittingBetter.getResultScore()) { + return false; + } + } + } + else if (testClass.getComparator().equals("<")) + { + //if getscore 1 and PerformTesting 500 then we fail + if (PerformTesting.intValue() > testClass.getScore()) + { + return false; + } + } + else if (testClass.getComparator().equals(">")) + { + //if getscore -1 and PerformTesting -500 then we fail + if (PerformTesting.intValue() < testClass.getScore()) + { + return false; + } + } + //some cases have "no operation" since we just need the result of them for comparing to the next result. + //only happens on full test run, not when doing functions. + indexCounter++; + //System.out.println("indexCounter: " + indexCounter); + dataRandom.put("index_counter_tests_passed", indexCounter); //indicating how far a run got. + } + return true; + } + + private void LoadDataFromJson() { + try { + dataFromJson = (JSONObject) parser.parse( + new FileReader("/mnt/hdd/home/christian/content/sourcemod_plugins_and_extensions/addons/sourcemod/scripting/gogs/ArtificialAutism/dynamicScore.json"));//path to the JSON file. + } catch (IOException e) { + try { + String whoami = System.getProperty("user.name"); + dataFromJson = (JSONObject) parser.parse( + new FileReader("/home/" + whoami + "/autism_bot_number_crunching/dynamicScore.json"));//path to the JSON file. + } catch (IOException ex) { + throw new RuntimeException(ex); + } catch (ParseException ex) { + throw new RuntimeException(ex); + } + } catch (ParseException e) { + throw new RuntimeException(e); + } + } + + private Set ModifyDataRandomly(String testCaseFunction) { + Set randomDataSet = new HashSet<>(); + for (int i = 0; i < 1; i++) + { + JSONObject object = new JSONObject(); + for (Object key : dataFromJson.keySet()) + { + if (!key.toString().startsWith(testCaseFunction + "Double_param") && + !key.toString().startsWith(testCaseFunction + "_param")) continue; + Object intValue = dataFromJson.get(key); + if (key.toString().startsWith("simpleRNNMaxtrixVectorsDouble") || key.toString().startsWith("simpleRNNMatrixCalculationsDouble") + || key.toString().startsWith("typeDependenciesGrammaticalRelationDouble") || key.toString().startsWith("iterateTreesDouble")) { + double doubleVal = (double) intValue; + Double RandomUpperBound = (doubleVal) * (new Random().nextInt(10) + 0.1); + double randomVal = RandomUpperBound * new Random().nextDouble(); + object.put(key, randomVal + 0.1); + } else { + intValue = ((Long) intValue).intValue(); //well needing this conversion is fucking scuffed + int RandomUpperBound = ((int) intValue + 1) * (new Random().nextInt(10) + 1); + int RandomLowerBound = (int) intValue / 2; + object.put(key, new Random().nextInt((int) (RandomUpperBound - RandomLowerBound)) + RandomLowerBound); + } + } + randomDataSet.add(object); + } + + Set randomDataSetToReturn = miscData(randomDataSet, testCaseFunction); + return randomDataSetToReturn; + } + + private JSONObject getCloneRandomShuffled(List keys, JSONObject clone) + { + int keyIndex = 0; + int splitAmount = new Random().nextInt(2, clone.keySet().size() / 2); //determines how many random values may go into each iteration. + + for (Object key : keys) { + int negativeOrPositive = new Random().nextInt(3); + if (key.toString().startsWith("simpleRNNMaxtrixVectorsDouble") || key.toString().startsWith("simpleRNNMatrixCalculationsDouble") + || key.toString().startsWith("typeDependenciesGrammaticalRelationDouble") || key.toString().startsWith("iterateTreesDouble")) { + Double doubleValue1 = (Double) clone.get(key); + DecimalFormat newFormat = new DecimalFormat("#.###"); + newFormat.setDecimalFormatSymbols(DecimalFormatSymbols.getInstance(Locale.ENGLISH)); + if (negativeOrPositive == 0) { //values below minus 1 million would be silly + double newValue1 = doubleValue1 - (doubleValue1 * new Random().nextDouble(0.0, 25)); + if (newValue1 > -1000000) { + clone.put(key, Double.valueOf(newFormat.format(newValue1))); + } + } else if (negativeOrPositive == 1) { //values over 1 million would be silly + double newValue1 = doubleValue1 + (doubleValue1 * new Random().nextDouble(0.0,25)); + if (newValue1 < 1000000) { + clone.put(key, Double.valueOf(newFormat.format(newValue1))); + } + } else { + //we just truncate spaces are comma + double newValue1 = Double.valueOf(newFormat.format(doubleValue1)); + clone.put(key, newValue1); + } + } + else { + int intValue1 = (int) clone.get(key); + if (negativeOrPositive == 0) { //values below minus 1 million would be silly + int i = intValue1 - (int) (intValue1 * new Random().nextDouble(0.0, 25)); + if (i > -1000000) { + clone.put(key, i); + } + } else if (negativeOrPositive == 1) { //values over 1 million would be silly + int i = intValue1 + (int) (intValue1 * new Random().nextDouble(0.0, 25)); + if (i < 1000000) { + clone.put(key, i); + } + } else { + //we dont change anything about the key. + clone.put(key, intValue1); + } + } + keyIndex++; + if (keyIndex % splitAmount == 0 && keyIndex > 0) { + break; + } + } + return clone; + } + + private Set miscData(Set randomDataSet, String testCaseFunction){ + Set randomDataSetToReturn = new HashSet<>(); + + int max_index_counter_tests_passed = DataMapper.get_index_counter_tests_passed(testCaseFunction, 0); + boolean maxIndexCounterTestsPassedCount = DataMapper.getMaxIndex_counter_tests_passedCount(max_index_counter_tests_passed, testCaseFunction); + + HashMap> arr = DataMapper.get_parameter_generations(randomDataSet, testCaseFunction); + + for (int i = 0; i < 2; i++) { + for (JSONObject data : randomDataSet) { + List keys = new ArrayList<>(data.keySet()); + Collections.shuffle(keys); + //first approach if less than 1000 rows for the particular index_counter_tests_passed. we just randomly generate some data. + if (maxIndexCounterTestsPassedCount) { + randomDataSetToReturn.add(getCloneRandomShuffled(keys, (JSONObject) data.clone())); + } else { + JSONObject rowid = compareDataTo(keys, (JSONObject) data.clone(), arr.get(data.get("rowid")), testCaseFunction, true); + JSONObject rowid_negative = compareDataTo(keys, (JSONObject) data.clone(), arr.get(data.get("rowid")), testCaseFunction, false); + if (rowid != null) { + randomDataSetToReturn.add(rowid); + randomDataSetToReturn.add(rowid_negative); + } + } + } + } + System.out.println("randomDataSetToReturn size: " + randomDataSetToReturn.size()); + return randomDataSetToReturn; + } + + private JSONObject compareDataTo(List keys, JSONObject clone, ArrayList arr, String testCaseFunction, boolean isPlussed) { + int keyIndex = 0; + int splitAmount = 1; + + boolean modifiedKey = false; + for (Object key : keys) { + boolean alreadyDone = false; + for (int i : arr) + { + if (key.toString().endsWith("param" + key)) + { + alreadyDone = true; + break; + } + } + if (alreadyDone) + { + continue; + } + modifiedKey = true; + + if (key.toString().startsWith("simpleRNNMaxtrixVectorsDouble") || key.toString().startsWith("simpleRNNMatrixCalculationsDouble") + || key.toString().startsWith("typeDependenciesGrammaticalRelationDouble") || key.toString().startsWith("iterateTreesDouble")) { + Double doubleValue1 = (Double) clone.get(key); + DecimalFormat newFormat = new DecimalFormat("#.###"); + newFormat.setDecimalFormatSymbols(DecimalFormatSymbols.getInstance(Locale.ENGLISH)); + Double v = Double.valueOf(newFormat.format(doubleValue1)); + if (isPlussed) { + clone.put(key, v + (v * 0.10)); + } + else { + clone.put(key, v - (v * 0.10)); + } + } + else { + int intValue1 = (int) clone.get(key); + if (isPlussed) { + clone.put(key, (int) (intValue1 + (intValue1 * 0.10))); + } + else { + clone.put(key, (int) (intValue1 - (intValue1 * 0.10))); + } + } + clone.put("modified_key", key); + keyIndex++; //maybe remove keyindex and splitamount again. + if (keyIndex % splitAmount == 0) { + break; + } + } + if (!modifiedKey) + { + DataMapper.deleteRow((int) clone.get("rowid"), testCaseFunction); + return null; + } + return clone; + } +} +