first commit
This commit is contained in:
commit
cc6aed28f9
8
ArtificialAutism.iml
Normal file
8
ArtificialAutism.iml
Normal file
@ -0,0 +1,8 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<module version="4">
|
||||
<component name="CheckStyle-IDEA-Module">
|
||||
<option name="configuration">
|
||||
<map />
|
||||
</option>
|
||||
</component>
|
||||
</module>
|
16
NOTES.txt
Normal file
16
NOTES.txt
Normal file
@ -0,0 +1,16 @@
|
||||
for running this you need to include stanford parser:
|
||||
https://nlp.stanford.edu/software/lex-parser.shtml#Download
|
||||
|
||||
if you want to run it on a remote machine with the current POM setup you need to include all jars which
|
||||
means also the stanford parser jars filling around 620 MB, this jar is currently not included in the uploaded /libs folder
|
||||
because its too large, download it from the link above and put it in the /libs folder of the remote machine to run it on
|
||||
|
||||
now also requires: https://nlp.stanford.edu/software/stanford-srparser-2014-10-23-models.jar
|
||||
kinda fucking huge
|
||||
|
||||
now requires about 4 GB ram atleast to run.
|
||||
also now requires nlp.stanford.edu/software/stanford-corenlp-full-2018-10-05.zip
|
||||
because it needs lexicalized parsers which shift reduce parser does not posses.
|
||||
also requires the regular stanford parser except of just shift reduce parser
|
||||
|
||||
jars too large for /lib folder: CorenlpModels-1.0, srParser-1, ws4j-1.0.1, StanfordParserModel-1.0
|
13
app.config
Normal file
13
app.config
Normal file
@ -0,0 +1,13 @@
|
||||
app.url=jdbc:mysql://localhost:3306/databasename?useSSL=false&useLegacyDatetimeCode=false&serverTimezone=UTC&allowPublicKeyRetrieval=True
|
||||
app.username=
|
||||
app.password=
|
||||
app.hostip=
|
||||
app.hostip2=
|
||||
app.hostport=
|
||||
app.hostport2=
|
||||
app.discordtoken=
|
||||
app.interval_days=4
|
||||
app.string_count=14000
|
||||
app.thread_count=4
|
||||
app.interval_days_minus=4
|
||||
app.random_length=2.5
|
464
create_table_exhaustive.sql
Normal file
464
create_table_exhaustive.sql
Normal file
@ -0,0 +1,464 @@
|
||||
create table ArtificialAutism.exhaustive_params(
|
||||
rowid int primary key auto_increment,
|
||||
stopwordTokenPairCounterScoring_param1 int ,
|
||||
stopwordTokenPairCounterScoring_param2 int ,
|
||||
stopwordTokenPairCounterScoring_param3 int ,
|
||||
stopwordTokenPairCounterScoring_param4 int ,
|
||||
stopwordTokenPairCounterScoring_param5 int ,
|
||||
stopwordTokenPairCounterScoring_param6 int ,
|
||||
stopwordTokenPairCounterScoring_param7 int ,
|
||||
stopwordTokenPairCounterScoring_param8 int ,
|
||||
stopwordTokenPairCounterScoring_param9 int ,
|
||||
stopwordTokenPairCounterScoring_param10 int ,
|
||||
stopwordTokenPairCounterScoring_param11 int ,
|
||||
stopwordTokenPairCounterScoring_param12 int ,
|
||||
stopwordTokenPairCounterScoring_param13 int ,
|
||||
stopwordTokenPairCounterScoring_param14 int ,
|
||||
stopwordTokenPairCounterScoring_param15 int ,
|
||||
stopwordTokenPairCounterScoring_param16 int ,
|
||||
stopwordTokenPairCounterScoring_param17 int ,
|
||||
stopwordTokenPairCounterScoring_param18 int ,
|
||||
stopwordTokenPairCounterScoring_param19 int ,
|
||||
stopwordTokenPairCounterScoring_param20 int ,
|
||||
stopwordTokenPairCounterScoring_param21 int ,
|
||||
stopwordTokenPairCounterScoring_param22 int ,
|
||||
stopwordTokenPairCounterScoring_param23 int ,
|
||||
stopWordTokenLemmaScoring_param1 int ,
|
||||
stopWordTokenLemmaScoring_param2 int ,
|
||||
stopWordTokenLemmaScoring_param3 int ,
|
||||
stopWordTokenLemmaScoring_param4 int ,
|
||||
stopWordTokenLemmaScoring_param5 int ,
|
||||
stopWordTokenLemmaScoring_param6 int ,
|
||||
stopWordTokenLemmaScoring_param7 int ,
|
||||
stopWordTokenLemmaScoring_param8 int ,
|
||||
stopWordTokenLemmaScoring_param9 int ,
|
||||
stopWordTokenLemmaScoring_param10 int ,
|
||||
stopWordTokenLemmaScoring_param11 int ,
|
||||
stopWordTokenLemmaScoring_param12 int ,
|
||||
stopWordTokenLemmaScoring_param13 int ,
|
||||
stopWordTokenLemmaScoring_param14 int ,
|
||||
stopWordTokenLemmaScoring_param15 int ,
|
||||
stopWordTokenLemmaScoring_param16 int ,
|
||||
stopWordTokenLemmaScoring_param17 int ,
|
||||
nerEntitiesAndTokenScoring_param1 int ,
|
||||
nerEntitiesAndTokenScoring_param2 int ,
|
||||
SentenceScoreDiff_param1 int ,
|
||||
tokensCounterScoring_param1 int ,
|
||||
tokensCounterScoring_param2 int ,
|
||||
tokensCounterScoring_param3 int ,
|
||||
tokensCounterScoring_param4 int ,
|
||||
tokensCounterScoring_param5 int ,
|
||||
tokensCounterScoring_param6 int ,
|
||||
tokensCounterScoring_param7 int ,
|
||||
tokensCounterScoring_param8 int ,
|
||||
tokensCounterScoring_param9 int ,
|
||||
tokensCounterScoring_param10 int ,
|
||||
tokensCounterScoring_param11 int ,
|
||||
tokensCounterScoring_param12 int ,
|
||||
tokensCounterScoring_param13 int ,
|
||||
tokensCounterScoring_param14 int ,
|
||||
tokensCounterScoring_param15 int ,
|
||||
tokensCounterScoring_param16 int ,
|
||||
tokensCounterScoring_param17 int ,
|
||||
tokensCounterScoring_param18 int ,
|
||||
tokensCounterScoring_param19 int ,
|
||||
tokensCounterScoring_param20 int ,
|
||||
tokensCounterScoring_param21 int ,
|
||||
tokensCounterScoring_param22 int ,
|
||||
tokensCounterScoring_param23 int ,
|
||||
tokensCounterScoring_param24 int ,
|
||||
tokensCounterScoring_param25 int ,
|
||||
tokensCounterScoring_param26 int ,
|
||||
tokensCounterScoring_param27 int ,
|
||||
annotatorCountScoring_param1 int ,
|
||||
annotatorCountScoring_param2 int ,
|
||||
annotatorCountScoring_param3 int ,
|
||||
annotatorCountScoring_param4 int ,
|
||||
annotatorCountScoring_param5 int ,
|
||||
inflectedCounterScoring_param1 int ,
|
||||
inflectedCounterScoring_param2 int ,
|
||||
inflectedCounterScoring_param3 int ,
|
||||
inflectedCounterScoring_param4 int ,
|
||||
inflectedCounterScoring_param5 int ,
|
||||
inflectedCounterScoring_param6 int ,
|
||||
inflectedCounterScoring_param7 int ,
|
||||
inflectedCounterScoring_param8 int ,
|
||||
inflectedCounterScoring_param9 int ,
|
||||
inflectedCounterScoring_param10 int ,
|
||||
inflectedCounterScoring_param11 int ,
|
||||
tokenStemmingMapScoring_param1 int ,
|
||||
tokenformSizeScoring_param1 int ,
|
||||
tokenformSizeScoring_param2 int ,
|
||||
tokenformSizeScoring_param3 int ,
|
||||
tokenformSizeScoring_param4 int ,
|
||||
tokenformSizeScoring_param5 int ,
|
||||
tokenformSizeScoring_param6 int ,
|
||||
tokenformSizeScoring_param7 int ,
|
||||
tokenformSizeScoring_param8 int ,
|
||||
tokenformSizeScoring_param9 int ,
|
||||
strTokenMapTagsScoring_param1 int ,
|
||||
strTokenMapTagsScoring_param2 int ,
|
||||
strTokenMapTagsScoring_param3 int ,
|
||||
strTokenMapTagsScoring_param4 int ,
|
||||
strTokenMapTagsScoring_param5 int ,
|
||||
strTokenMapTagsScoring_param6 int ,
|
||||
strTokenEntryScoring_param1 int ,
|
||||
strTokenEntryScoring_param2 int ,
|
||||
strTokenEntryScoring_param3 int ,
|
||||
strTokenEntryScoring_param4 int ,
|
||||
strTokenEntryScoring_param5 int ,
|
||||
strTokenEntryScoring_param6 int ,
|
||||
strTokenEntryScoring_param7 int ,
|
||||
strTokenEntryScoring_param8 int ,
|
||||
strTokenEntryScoring_param9 int ,
|
||||
strTokenEntryScoring_param10 int ,
|
||||
strTokenEntryScoring_param11 int ,
|
||||
strTokensMapScoring_param1 int ,
|
||||
strTokensMapScoring_param2 int ,
|
||||
strTokensMapScoring_param3 int ,
|
||||
strTokensMapScoring_param4 int ,
|
||||
strTokensMapScoring_param5 int ,
|
||||
strTokensMapScoring_param6 int ,
|
||||
markedContiniousCounterScoring_param1 int ,
|
||||
markedContiniousCounterScoring_param2 int ,
|
||||
markedContiniousCounterScoring_param3 int ,
|
||||
markedContiniousCounterScoring_param4 int ,
|
||||
markedContiniousCounterScoring_param5 int ,
|
||||
markedContiniousCounterScoring_param6 int ,
|
||||
markedContiniousCounterScoring_param7 int ,
|
||||
markedContiniousCounterScoring_param8 int ,
|
||||
markedContiniousCounterScoring_param9 int ,
|
||||
markedContiniousCounterScoring_param10 int ,
|
||||
markedContiniousCounterScoring_param11 int ,
|
||||
unmarkedPatternCounterScoring_param1 int ,
|
||||
unmarkedPatternCounterScoring_param2 int ,
|
||||
unmarkedPatternCounterScoring_param3 int ,
|
||||
unmarkedPatternCounterScoring_param4 int ,
|
||||
unmarkedPatternCounterScoring_param5 int ,
|
||||
unmarkedPatternCounterScoring_param6 int ,
|
||||
unmarkedPatternCounterScoring_param7 int ,
|
||||
unmarkedPatternCounterScoring_param8 int ,
|
||||
tokenEntryPosScoring_param1 int ,
|
||||
tokenEntryPosScoring_param2 int ,
|
||||
tokenEntryPosScoring_param3 int ,
|
||||
tokenEntryPosScoring_param4 int ,
|
||||
entryCountsScoring_param1 int ,
|
||||
entryCountsScoring_param2 int ,
|
||||
entryCountsScoring_param3 int ,
|
||||
entryCountsScoring_param4 int ,
|
||||
entryCountsScoring_param5 int ,
|
||||
entryCountsScoring_param6 int ,
|
||||
entryCountsScoring_param7 int ,
|
||||
entryCountsScoring_param8 int ,
|
||||
entryCountsRelation_param1 int ,
|
||||
entryCountsRelation_param2 int ,
|
||||
entryCountsRelation_param3 int ,
|
||||
entryCountsRelation_param4 int ,
|
||||
entryCountsRelation_param5 int ,
|
||||
entryCountsRelation_param6 int ,
|
||||
entryCountsRelation_param7 int ,
|
||||
entryCountsRelation_param8 int ,
|
||||
entryCountsRelation_param9 int ,
|
||||
entryCountsRelation_param10 int ,
|
||||
entryCountsRelation_param11 int ,
|
||||
entryCountsRelation_param12 int ,
|
||||
entryCountsRelation_param13 int ,
|
||||
entryCountsRelation_param14 int ,
|
||||
entryCountsRelation_param15 int ,
|
||||
sentimentMatrixVariances_param1 int ,
|
||||
sentimentMatrixVariances_param2 int ,
|
||||
sentimentMatrixVariances_param3 int ,
|
||||
sentimentMatrixVariances_param4 int ,
|
||||
sentimentMatrixVariances_param5 int ,
|
||||
sentimentMatrixVariances_param6 int ,
|
||||
sentimentMatrixVariances_param7 int ,
|
||||
sentimentMatrixVariances_param8 int ,
|
||||
sentimentMatrixVariances_param9 int ,
|
||||
sentimentMatrixVariances_param10 int ,
|
||||
sentimentMatrixVariances_param11 int ,
|
||||
sentimentMatrixVariances_param12 int ,
|
||||
sentimentMatrixVariances_param13 int ,
|
||||
sentimentMatrixVariances_param14 int ,
|
||||
sentimentMatrixVariances_param15 int ,
|
||||
sentimentMatrixVariances_param16 int ,
|
||||
sentimentMatrixVariances_param17 int ,
|
||||
sentimentMatrixVariances_param18 int ,
|
||||
sentimentMatrixVariances_param19 int ,
|
||||
sentimentMatrixVariances_param20 int ,
|
||||
sentimentMatrixVariances_param21 int ,
|
||||
sentimentMatrixVariances_param22 int ,
|
||||
sentimentMatrixVariances_param23 int ,
|
||||
sentimentMatrixVariances_param24 int ,
|
||||
sentimentMatrixVariances_param25 int ,
|
||||
sentimentMatrixVariances_param26 int ,
|
||||
sentimentMatrixVariances_param27 int ,
|
||||
sentimentMatrixVariances_param28 int ,
|
||||
sentimentMatrixVariances_param29 int ,
|
||||
sentimentMatrixVariances_param30 int ,
|
||||
sentimentMatrixVariances_param31 int ,
|
||||
sentimentMatrixVariances_param32 int ,
|
||||
sentimentMatrixVariances_param33 int ,
|
||||
sentimentMatrixVariances_param34 int ,
|
||||
sentimentMatrixVariances_param35 int ,
|
||||
sentimentMatrixVariances_param36 int ,
|
||||
sentimentMatrixVariances_param37 int ,
|
||||
classifyRawEvaluation_param1 int ,
|
||||
classifyRawEvaluation_param2 int ,
|
||||
classifyRawEvaluation_param3 int ,
|
||||
sentiment1_param1 int ,
|
||||
simpleRNNMaxtrixVectors_param1 int ,
|
||||
simpleRNNMaxtrixVectors_param2 int ,
|
||||
simpleRNNMaxtrixVectors_param3 int ,
|
||||
simpleRNNMaxtrixVectors_param4 int ,
|
||||
simpleRNNMaxtrixVectors_param5 int ,
|
||||
simpleRNNMaxtrixVectorsDouble_param6 decimal(10,5),
|
||||
simpleRNNMaxtrixVectorsDouble_param7 decimal(10,5),
|
||||
simpleRNNMaxtrixVectors_param8 int ,
|
||||
simpleRNNMaxtrixVectorsDouble_param9 decimal(10,5),
|
||||
simpleRNNMaxtrixVectorsDouble_param10 decimal(10,5),
|
||||
simpleRNNMaxtrixVectors_param11 int ,
|
||||
simpleRNNMaxtrixVectorsDouble_param12 decimal(10,5),
|
||||
simpleRNNMaxtrixVectorsDouble_param13 decimal(10,5),
|
||||
simpleRNNMaxtrixVectors_param14 int ,
|
||||
simpleRNNMaxtrixVectors_param15 int ,
|
||||
simpleRNNMaxtrixVectors_param16 int ,
|
||||
simpleRNNMaxtrixVectors_param17 int ,
|
||||
simpleRNNMaxtrixVectors_param18 int ,
|
||||
simpleRNNMaxtrixVectors_param19 int ,
|
||||
simpleRNNMaxtrixVectors_param20 int ,
|
||||
simpleRNNMaxtrixVectors_param21 int ,
|
||||
simpleRNNMaxtrixVectors_param22 int ,
|
||||
simpleRNNMaxtrixVectors_param23 int ,
|
||||
simpleRNNMaxtrixVectors_param24 int ,
|
||||
simpleRNNMaxtrixVectors_param25 int ,
|
||||
simpleRNNMaxtrixVectors_param26 int ,
|
||||
simpleRNNMaxtrixVectors_param27 int ,
|
||||
simpleRNNMaxtrixVectors_param28 int ,
|
||||
simpleRNNMaxtrixVectors_param29 int ,
|
||||
simpleRNNMaxtrixVectors_param30 int ,
|
||||
simpleRNNMaxtrixVectors_param31 int ,
|
||||
simpleRNNMaxtrixVectors_param32 int ,
|
||||
simpleRNNMaxtrixVectors_param33 int ,
|
||||
simpleRNNMaxtrixVectorsDouble_param34 decimal(10,5),
|
||||
simpleRNNMaxtrixVectorsDouble_param35 decimal(10,5),
|
||||
simpleRNNMaxtrixVectors_param36 int ,
|
||||
simpleRNNMaxtrixVectorsDouble_param37 decimal(10,5),
|
||||
simpleRNNMaxtrixVectorsDouble_param38 decimal(10,5),
|
||||
simpleRNNMaxtrixVectors_param39 int ,
|
||||
simpleRNNMaxtrixVectors_param40 int ,
|
||||
simpleRNNMaxtrixVectors_param41 int ,
|
||||
simpleRNNMaxtrixVectors_param42 int ,
|
||||
simpleRNNMaxtrixVectors_param43 int ,
|
||||
simpleRNNMaxtrixVectors_param44 int ,
|
||||
simpleRNNMaxtrixVectors_param45 int ,
|
||||
simpleRNNMaxtrixVectors_param46 int ,
|
||||
simpleRNNMaxtrixVectors_param47 int ,
|
||||
simpleRNNMaxtrixVectors_param48 int ,
|
||||
simpleRNNMaxtrixVectors_param49 int ,
|
||||
simpleRNNMaxtrixVectorsDouble_param50 decimal(10,5),
|
||||
simpleRNNMaxtrixVectors_param51 int ,
|
||||
simpleRNNMaxtrixVectorsDouble_param52 decimal(10,5),
|
||||
simpleRNNMaxtrixVectors_param53 int ,
|
||||
simpleRNNMaxtrixVectorsDouble_param54 decimal(10,5),
|
||||
simpleRNNMaxtrixVectors_param55 int ,
|
||||
simpleRNNMaxtrixVectorsDouble_param56 decimal(10,5),
|
||||
simpleRNNMaxtrixVectorsDouble_param57 decimal(10,5),
|
||||
simpleRNNMaxtrixVectors_param58 int ,
|
||||
simpleRNNMaxtrixVectorsDouble_param59 decimal(10,5),
|
||||
simpleRNNMaxtrixVectorsDouble_param60 decimal(10,5),
|
||||
simpleRNNMaxtrixVectors_param61 int ,
|
||||
simpleRNNMaxtrixVectorsDouble_param62 decimal(10,5),
|
||||
simpleRNNMaxtrixVectorsDouble_param63 decimal(10,5),
|
||||
simpleRNNMaxtrixVectors_param64 int ,
|
||||
simpleRNNMaxtrixVectorsDouble_param65 decimal(10,5),
|
||||
simpleRNNMaxtrixVectorsDouble_param66 decimal(10,5),
|
||||
simpleRNNMaxtrixVectors_param67 int ,
|
||||
simpleRNNMaxtrixVectors_param68 int ,
|
||||
simpleRNNMaxtrixVectors_param69 int ,
|
||||
simpleRNNMaxtrixVectorsDouble_param70 decimal(10,5),
|
||||
simpleRNNMaxtrixVectorsDouble_param71 decimal(10,5),
|
||||
simpleRNNMaxtrixVectors_param72 int ,
|
||||
simpleRNNMaxtrixVectorsDouble_param73 decimal(10,5),
|
||||
simpleRNNMaxtrixVectorsDouble_param74 decimal(10,5),
|
||||
simpleRNNMaxtrixVectors_param75 int ,
|
||||
simpleRNNMaxtrixVectors_param76 int ,
|
||||
simpleRNNMaxtrixVectorsDouble_param77 decimal(10,5),
|
||||
simpleRNNMaxtrixVectorsDouble_param78 decimal(10,5),
|
||||
simpleRNNMaxtrixVectors_param79 int ,
|
||||
simpleRNNMaxtrixVectors_param80 int ,
|
||||
simpleRNNMaxtrixVectors_param81 int ,
|
||||
simpleRNNMaxtrixVectors_param82 int ,
|
||||
simpleRNNMaxtrixVectors_param83 int ,
|
||||
simpleRNNMaxtrixVectors_param84 int ,
|
||||
simpleRNNMaxtrixVectors_param85 int ,
|
||||
simpleRNNMaxtrixVectors_param86 int ,
|
||||
simpleRNNMatrixCalculations_param1 int ,
|
||||
simpleRNNMatrixCalculations_param2 int ,
|
||||
simpleRNNMatrixCalculations_param3 int ,
|
||||
simpleRNNMatrixCalculations_param4 int ,
|
||||
simpleRNNMatrixCalculations_param5 int ,
|
||||
simpleRNNMatrixCalculations_param6 int ,
|
||||
simpleRNNMatrixCalculations_param7 int ,
|
||||
simpleRNNMatrixCalculations_param8 int ,
|
||||
simpleRNNMatrixCalculations_param9 int ,
|
||||
simpleRNNMatrixCalculations_param10 int ,
|
||||
simpleRNNMatrixCalculations_param11 int ,
|
||||
simpleRNNMatrixCalculations_param12 int ,
|
||||
simpleRNNMatrixCalculations_param13 int ,
|
||||
simpleRNNMatrixCalculations_param14 int ,
|
||||
simpleRNNMatrixCalculations_param15 int ,
|
||||
simpleRNNMatrixCalculations_param16 int ,
|
||||
simpleRNNMatrixCalculations_param17 int ,
|
||||
simpleRNNMatrixCalculationsDouble_param18 decimal(10,5),
|
||||
simpleRNNMatrixCalculationsDouble_param19 decimal(10,5),
|
||||
simpleRNNMatrixCalculationsDouble_param20 decimal(10,5),
|
||||
simpleRNNMatrixCalculationsDouble_param21 decimal(10,5),
|
||||
simpleRNNMatrixCalculationsDouble_param22 decimal(10,5),
|
||||
simpleRNNMatrixCalculationsDouble_param23 decimal(10,5),
|
||||
simpleRNNMatrixCalculations_param24 int ,
|
||||
simpleRNNMatrixCalculations_param25 int ,
|
||||
simpleRNNMatrixCalculations_param26 int ,
|
||||
simpleRNNMatrixCalculations_param27 int ,
|
||||
simpleRNNMatrixCalculationsDouble_param28 decimal(10,5),
|
||||
simpleRNNMatrixCalculations_param29 int ,
|
||||
simpleRNNMatrixCalculationsDouble_param30 decimal(10,5),
|
||||
simpleRNNMatrixCalculationsDouble_param31 decimal(10,5),
|
||||
simpleRNNMatrixCalculationsDouble_param32 decimal(10,5),
|
||||
simpleRNNMatrixCalculationsDouble_param33 decimal(10,5),
|
||||
simpleRNNMatrixCalculationsDouble_param34 decimal(10,5),
|
||||
simpleRNNMatrixCalculationsDouble_param35 decimal(10,5),
|
||||
simpleRNNMatrixCalculationsDouble_param36 decimal(10,5),
|
||||
simpleRNNMatrixCalculations_param37 int ,
|
||||
simpleRNNMatrixCalculations_param38 int ,
|
||||
simpleRNNMatrixCalculations_param39 int ,
|
||||
simpleRNNMatrixCalculations_param40 int ,
|
||||
simpleRNNMatrixCalculations_param41 int ,
|
||||
simpleRNNMatrixCalculations_param42 int ,
|
||||
simpleRNNMatrixCalculations_param43 int ,
|
||||
simpleRNNMatrixCalculations_param44 int ,
|
||||
simpleRNNMatrixCalculations_param45 int ,
|
||||
simpleRNNMatrixCalculations_param46 int ,
|
||||
simpleRNNMatrixCalculationsDouble_param47 decimal(10,5),
|
||||
simpleRNNMatrixCalculations_param48 int ,
|
||||
simpleRNNMatrixCalculationsDouble_param49 decimal(10,5),
|
||||
simpleRNNMatrixCalculations_param50 int ,
|
||||
simpleRNNMatrixCalculations_param51 int ,
|
||||
simpleRNNMatrixCalculationsDouble_param52 decimal(10,5),
|
||||
simpleRNNMatrixCalculations_param53 int ,
|
||||
simpleRNNMatrixCalculations_param54 int ,
|
||||
simpleRNNMatrixCalculationsDouble_param55 decimal(10,5),
|
||||
simpleRNNMatrixCalculationsDouble_param56 decimal(10,5),
|
||||
simpleRNNMatrixCalculations_param57 int ,
|
||||
simpleRNNMatrixCalculations_param58 int ,
|
||||
simpleRNNMatrixCalculations_param59 int ,
|
||||
simpleRNNMatrixCalculations_param60 int ,
|
||||
simpleRNNMatrixCalculations_param61 int ,
|
||||
simpleRNNMatrixCalculations_param62 int ,
|
||||
simpleRNNMatrixCalculations_param63 int ,
|
||||
simpleRNNMatrixCalculations_param64 int ,
|
||||
simpleRNNMatrixCalculations_param65 int ,
|
||||
simpleRNNMatrixCalculations_param66 int ,
|
||||
simpleRNNMatrixCalculations_param67 int ,
|
||||
simpleRNNMatrixCalculations_param68 int ,
|
||||
simpleRNNMatrixCalculations_param69 int ,
|
||||
simpleRNNMatrixCalculations_param70 int ,
|
||||
simpleRNNMatrixCalculations_param71 int ,
|
||||
simpleRNNMatrixCalculations_param72 int ,
|
||||
simpleRNNMatrixCalculations_param73 int ,
|
||||
simpleRNNMatrixCalculations_param74 int ,
|
||||
typeDependenciesGrammaticalRelation_param1 int ,
|
||||
typeDependenciesGrammaticalRelation_param2 int ,
|
||||
typeDependenciesGrammaticalRelation_param3 int ,
|
||||
typeDependenciesGrammaticalRelation_param4 int ,
|
||||
typeDependenciesGrammaticalRelation_param5 int ,
|
||||
typeDependenciesGrammaticalRelation_param6 int ,
|
||||
typeDependenciesGrammaticalRelation_param7 int ,
|
||||
typeDependenciesGrammaticalRelation_param8 int ,
|
||||
typeDependenciesGrammaticalRelation_param9 int ,
|
||||
typeDependenciesGrammaticalRelation_param10 int ,
|
||||
typeDependenciesGrammaticalRelation_param11 int ,
|
||||
typeDependenciesGrammaticalRelation_param12 int ,
|
||||
typeDependenciesGrammaticalRelationDouble_param13 decimal(10,5),
|
||||
typeDependenciesGrammaticalRelation_param14 int ,
|
||||
typeDependenciesGrammaticalRelation_param15 int ,
|
||||
typeDependenciesGrammaticalRelation_param16 int ,
|
||||
typeDependenciesGrammaticalRelation_param17 int ,
|
||||
typeDependenciesGrammaticalRelation_param18 int ,
|
||||
typeDependenciesGrammaticalRelation_param19 int ,
|
||||
typeDependenciesGrammaticalRelation_param20 int ,
|
||||
typeDependenciesGrammaticalRelation_param21 int ,
|
||||
typeDependenciesGrammaticalRelation_param22 int ,
|
||||
typeDependenciesGrammaticalRelation_param23 int ,
|
||||
typeDependenciesGrammaticalRelation_param24 int ,
|
||||
typeDependenciesGrammaticalRelation_param25 int ,
|
||||
typeDependenciesGrammaticalRelation_param26 int ,
|
||||
typeDependenciesGrammaticalRelation_param27 int ,
|
||||
typeDependenciesGrammaticalRelation_param28 int ,
|
||||
typeDependenciesGrammaticalRelation_param29 int ,
|
||||
typeDependenciesGrammaticalRelation_param30 int ,
|
||||
typeDependenciesGrammaticalRelation_param31 int ,
|
||||
typeDependenciesGrammaticalRelation_param32 int ,
|
||||
typeDependenciesGrammaticalRelation_param33 int ,
|
||||
typeDependenciesGrammaticalRelation_param34 int ,
|
||||
typeDependenciesGrammaticalRelation_param35 int ,
|
||||
typeDependenciesGrammaticalRelation_param36 int ,
|
||||
typeDependenciesGrammaticalRelation_param37 int ,
|
||||
typeDependenciesGrammaticalRelation_param38 int ,
|
||||
typeDependenciesGrammaticalRelation_param39 int ,
|
||||
typeDependenciesGrammaticalRelation_param40 int ,
|
||||
typeDependenciesGrammaticalRelation_param41 int ,
|
||||
typeDependenciesGrammaticalRelation_param42 int ,
|
||||
typeDependenciesGrammaticalRelation_param43 int ,
|
||||
typeDependenciesGrammaticalRelation_param44 int ,
|
||||
typeDependenciesGrammaticalRelation_param45 int ,
|
||||
typeDependenciesGrammaticalRelation_param46 int ,
|
||||
typeDependenciesGrammaticalRelation_param47 int ,
|
||||
typeDependenciesGrammaticalRelation_param48 int ,
|
||||
typeDependenciesGrammaticalRelation_param49 int ,
|
||||
typeDependenciesGrammaticalRelation_param50 int ,
|
||||
typeDependenciesGrammaticalRelation_param51 int ,
|
||||
typeDependenciesGrammaticalRelation_param52 int ,
|
||||
typeDependenciesGrammaticalRelation_param53 int ,
|
||||
typeDependenciesGrammaticalRelation_param54 int ,
|
||||
typeDependenciesGrammaticalRelation_param55 int ,
|
||||
typeDependenciesGrammaticalRelation_param56 int ,
|
||||
typeDependenciesGrammaticalRelation_param57 int ,
|
||||
typeDependenciesGrammaticalRelation_param58 int ,
|
||||
typeDependenciesGrammaticalRelation_param59 int ,
|
||||
iterateTrees_param1 int ,
|
||||
iterateTrees_param2 int ,
|
||||
iterateTrees_param3 int ,
|
||||
iterateTrees_param4 int ,
|
||||
iterateTrees_param5 int ,
|
||||
iterateTrees_param6 int ,
|
||||
iterateTrees_param7 int ,
|
||||
iterateTrees_param8 int ,
|
||||
iterateTrees_param9 int ,
|
||||
iterateTrees_param10 int ,
|
||||
iterateTreesDouble_param11 decimal(10,5),
|
||||
iterateTrees_param12 int ,
|
||||
iterateTrees_param13 int ,
|
||||
iterateTrees_param14 int ,
|
||||
iterateTrees_param15 int ,
|
||||
iterateTrees_param16 int ,
|
||||
iterateTrees_param17 int ,
|
||||
iterateTrees_param18 int ,
|
||||
iterateTrees_param19 int ,
|
||||
iterateTreesDouble_param20 decimal(10,5),
|
||||
iterateTrees_param21 int ,
|
||||
iterateTrees_param22 int ,
|
||||
iterateTrees_param23 int ,
|
||||
iterateTrees_param24 int ,
|
||||
iterateTrees_param25 int ,
|
||||
iterateTrees_param26 int ,
|
||||
iterateTrees_param27 int ,
|
||||
iterateTrees_param28 int ,
|
||||
iterateTrees_param29 int ,
|
||||
iterateTrees_param30 int ,
|
||||
iterateTrees_param31 int ,
|
||||
tgwListScoreIncrementer_param1 int ,
|
||||
overValue_param1 int ,
|
||||
created_on timestamp default now(),
|
||||
colission_count int ,
|
||||
passed_all_test_cases tinyint(1) DEFAULT NULL
|
||||
)
|
9
create_table_sentence_testing.sql
Normal file
9
create_table_sentence_testing.sql
Normal file
@ -0,0 +1,9 @@
|
||||
CREATE TABLE `sentence_testing` (
|
||||
`String1` varchar(255) NOT NULL,
|
||||
`String2` varchar(255) NOT NULL,
|
||||
`score_required` int(11) DEFAULT 0,
|
||||
`comperator_for_score` varchar(255) NOT NULL,
|
||||
`PerformTestingFittingLess` tinyint(1) DEFAULT 0,
|
||||
`rowid` int(11) NOT NULL AUTO_INCREMENT,
|
||||
PRIMARY KEY (`rowid`)
|
||||
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_unicode_ci;
|
16
discord_autismbot.service
Normal file
16
discord_autismbot.service
Normal file
@ -0,0 +1,16 @@
|
||||
[Unit]
|
||||
Description=autismbot Discord
|
||||
|
||||
[Service]
|
||||
User=nonroot
|
||||
Group=nonroot
|
||||
WorkingDirectory=/home/nonroot/autism_bot
|
||||
Environment=PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/usr/lib/jvm/jdk-17/bin
|
||||
ExecStart=/usr/lib/jvm/jdk-17/bin/java -Xmx4900M -jar /home/nonroot/autism_bot/ArtificialAutism-1.0.jar
|
||||
Restart=on-failure
|
||||
RestartSec=10
|
||||
|
||||
#Restart=always
|
||||
|
||||
[Install]
|
||||
WantedBy=default.target
|
485
dynamicScore.json
Normal file
485
dynamicScore.json
Normal file
@ -0,0 +1,485 @@
|
||||
{
|
||||
"stopwordTokenPairCounterScoring_param1": 5,
|
||||
"stopwordTokenPairCounterScoring_param2": 550,
|
||||
"stopwordTokenPairCounterScoring_param3" : 5,
|
||||
"stopwordTokenPairCounterScoring_param4" : 7500,
|
||||
"stopwordTokenPairCounterScoring_param5" : 6,
|
||||
"stopwordTokenPairCounterScoring_param6" : 550,
|
||||
"stopwordTokenPairCounterScoring_param7" : 3,
|
||||
"stopwordTokenPairCounterScoring_param8" : 6,
|
||||
"stopwordTokenPairCounterScoring_param9" : 3500,
|
||||
"stopwordTokenPairCounterScoring_param10" : 3,
|
||||
"stopwordTokenPairCounterScoring_param11" : 2513,
|
||||
"stopwordTokenPairCounterScoring_param12": 0,
|
||||
"stopwordTokenPairCounterScoring_param13": 1,
|
||||
"stopwordTokenPairCounterScoring_param14": 2550,
|
||||
"stopwordTokenPairCounterScoring_param15": 5,
|
||||
"stopwordTokenPairCounterScoring_param16": 1500,
|
||||
"stopwordTokenPairCounterScoring_param17": 5,
|
||||
"stopwordTokenPairCounterScoring_param18": 450,
|
||||
"stopwordTokenPairCounterScoring_param19": 2,
|
||||
"stopwordTokenPairCounterScoring_param20": 4,
|
||||
"stopwordTokenPairCounterScoring_param21": 2500,
|
||||
"stopwordTokenPairCounterScoring_param22": 3,
|
||||
"stopwordTokenPairCounterScoring_param23": 3200,
|
||||
|
||||
"stopWordTokenLemmaScoring_param1": 5,
|
||||
"stopWordTokenLemmaScoring_param2": 9300,
|
||||
"stopWordTokenLemmaScoring_param3": 3,
|
||||
"stopWordTokenLemmaScoring_param4": 2,
|
||||
"stopWordTokenLemmaScoring_param5": 7400,
|
||||
"stopWordTokenLemmaScoring_param6": 1,
|
||||
"stopWordTokenLemmaScoring_param7": 4100,
|
||||
"stopWordTokenLemmaScoring_param8": 3,
|
||||
"stopWordTokenLemmaScoring_param9": 540,
|
||||
"stopWordTokenLemmaScoring_param10": 1,
|
||||
"stopWordTokenLemmaScoring_param11": 0,
|
||||
"stopWordTokenLemmaScoring_param12": 8500,
|
||||
"stopWordTokenLemmaScoring_param13": 4,
|
||||
"stopWordTokenLemmaScoring_param14": 8650,
|
||||
"stopWordTokenLemmaScoring_param15": 2,
|
||||
"stopWordTokenLemmaScoring_param16": 2500,
|
||||
"stopWordTokenLemmaScoring_param17": 51,
|
||||
|
||||
"nerEntitiesAndTokenScoring_param1": 2500,
|
||||
"nerEntitiesAndTokenScoring_param2": 2000,
|
||||
|
||||
"SentenceScoreDiff_param1": 15,
|
||||
|
||||
"tokensCounterScoring_param1": 1,
|
||||
"tokensCounterScoring_param2": 5,
|
||||
"tokensCounterScoring_param3": 2,
|
||||
"tokensCounterScoring_param4": 10,
|
||||
"tokensCounterScoring_param5": 500,
|
||||
"tokensCounterScoring_param6": 500,
|
||||
"tokensCounterScoring_param7": 500,
|
||||
"tokensCounterScoring_param8": 5,
|
||||
"tokensCounterScoring_param9": 0,
|
||||
"tokensCounterScoring_param10": 500,
|
||||
"tokensCounterScoring_param11": 0,
|
||||
"tokensCounterScoring_param12": 500,
|
||||
"tokensCounterScoring_param13": 2000,
|
||||
"tokensCounterScoring_param14": 35,
|
||||
"tokensCounterScoring_param15": 34,
|
||||
"tokensCounterScoring_param16": 0,
|
||||
"tokensCounterScoring_param17": 1,
|
||||
"tokensCounterScoring_param18": 500,
|
||||
"tokensCounterScoring_param19": 3012,
|
||||
"tokensCounterScoring_param20": 0,
|
||||
"tokensCounterScoring_param21": 1500,
|
||||
"tokensCounterScoring_param22": 0,
|
||||
"tokensCounterScoring_param23": 2,
|
||||
"tokensCounterScoring_param24": 2,
|
||||
"tokensCounterScoring_param25": 0,
|
||||
"tokensCounterScoring_param26": 3065,
|
||||
"tokensCounterScoring_param27": 5,
|
||||
|
||||
"annotatorCountScoring_param1": 1,
|
||||
"annotatorCountScoring_param2": 2,
|
||||
"annotatorCountScoring_param3": 700,
|
||||
"annotatorCountScoring_param4": 5,
|
||||
"annotatorCountScoring_param5": 400,
|
||||
|
||||
"inflectedCounterScoring_param1": 0,
|
||||
"inflectedCounterScoring_param2": 650,
|
||||
"inflectedCounterScoring_param3": 0,
|
||||
"inflectedCounterScoring_param4": 2,
|
||||
"inflectedCounterScoring_param5": 5,
|
||||
"inflectedCounterScoring_param6": 150,
|
||||
"inflectedCounterScoring_param7": 10,
|
||||
"inflectedCounterScoring_param8": 5,
|
||||
"inflectedCounterScoring_param9": 400,
|
||||
"inflectedCounterScoring_param10": 15,
|
||||
"inflectedCounterScoring_param11": 4000,
|
||||
|
||||
"tokenStemmingMapScoring_param1": 500,
|
||||
|
||||
"tokenformSizeScoring_param1": 0,
|
||||
"tokenformSizeScoring_param2": 5,
|
||||
"tokenformSizeScoring_param3": 1600,
|
||||
"tokenformSizeScoring_param4": 500,
|
||||
"tokenformSizeScoring_param5": 0,
|
||||
"tokenformSizeScoring_param6": 2,
|
||||
"tokenformSizeScoring_param7": 600,
|
||||
"tokenformSizeScoring_param8": 4,
|
||||
"tokenformSizeScoring_param9": 600,
|
||||
|
||||
"strTokenMapTagsScoring_param1": 1450,
|
||||
"strTokenMapTagsScoring_param2": 0,
|
||||
"strTokenMapTagsScoring_param3": 2,
|
||||
"strTokenMapTagsScoring_param4": 700,
|
||||
"strTokenMapTagsScoring_param5": 600,
|
||||
"strTokenMapTagsScoring_param6": 700,
|
||||
|
||||
"strTokenEntryScoring_param1": 2,
|
||||
"strTokenEntryScoring_param2": 2500,
|
||||
"strTokenEntryScoring_param3": 2,
|
||||
"strTokenEntryScoring_param4": 450,
|
||||
"strTokenEntryScoring_param5": 1,
|
||||
"strTokenEntryScoring_param6": 9450,
|
||||
"strTokenEntryScoring_param7": 1,
|
||||
"strTokenEntryScoring_param8": 4080,
|
||||
"strTokenEntryScoring_param9": 2,
|
||||
"strTokenEntryScoring_param10": 4560,
|
||||
"strTokenEntryScoring_param11": 250,
|
||||
|
||||
"strTokensMapScoring_param1": 3880,
|
||||
"strTokensMapScoring_param2": 2,
|
||||
"strTokensMapScoring_param3": 700,
|
||||
"strTokensMapScoring_param4": 600,
|
||||
"strTokensMapScoring_param5": 0,
|
||||
"strTokensMapScoring_param6": 700,
|
||||
|
||||
"markedContiniousCounterScoring_param1": 0,
|
||||
"markedContiniousCounterScoring_param2": 50,
|
||||
"markedContiniousCounterScoring_param3": 120,
|
||||
"markedContiniousCounterScoring_param4": 2,
|
||||
"markedContiniousCounterScoring_param5": 4500,
|
||||
"markedContiniousCounterScoring_param6": 0,
|
||||
"markedContiniousCounterScoring_param7": 500,
|
||||
"markedContiniousCounterScoring_param8": 0,
|
||||
"markedContiniousCounterScoring_param9": 10,
|
||||
"markedContiniousCounterScoring_param10": 5,
|
||||
"markedContiniousCounterScoring_param11": 400,
|
||||
|
||||
"unmarkedPatternCounterScoring_param1": 0,
|
||||
"unmarkedPatternCounterScoring_param2": 100,
|
||||
"unmarkedPatternCounterScoring_param3": 2,
|
||||
"unmarkedPatternCounterScoring_param4": 2500,
|
||||
"unmarkedPatternCounterScoring_param5": 5,
|
||||
"unmarkedPatternCounterScoring_param6": 4000,
|
||||
"unmarkedPatternCounterScoring_param7": 500,
|
||||
"unmarkedPatternCounterScoring_param8": 3,
|
||||
|
||||
"tokenEntryPosScoring_param1": 1,
|
||||
"tokenEntryPosScoring_param2": 500,
|
||||
"tokenEntryPosScoring_param3": 3,
|
||||
"tokenEntryPosScoring_param4": 700,
|
||||
|
||||
"entryCountsScoring_param1": 20,
|
||||
"entryCountsScoring_param2": 10,
|
||||
"entryCountsScoring_param3": 30,
|
||||
"entryCountsScoring_param4": 46800,
|
||||
"entryCountsScoring_param5": 0,
|
||||
"entryCountsScoring_param6": 250,
|
||||
"entryCountsScoring_param7": 3,
|
||||
"entryCountsScoring_param8": 930,
|
||||
|
||||
"entryCountsRelation_param1": 20,
|
||||
"entryCountsRelation_param2": 0,
|
||||
"entryCountsRelation_param3": 5900,
|
||||
"entryCountsRelation_param4": 0,
|
||||
"entryCountsRelation_param5": 5,
|
||||
"entryCountsRelation_param6": 450,
|
||||
"entryCountsRelation_param7": 50,
|
||||
"entryCountsRelation_param8": 180,
|
||||
"entryCountsRelation_param9": 2,
|
||||
"entryCountsRelation_param10": 450,
|
||||
"entryCountsRelation_param11": 3,
|
||||
"entryCountsRelation_param12": 550,
|
||||
"entryCountsRelation_param13": 10,
|
||||
"entryCountsRelation_param14": 2,
|
||||
"entryCountsRelation_param15": 600,
|
||||
|
||||
"sentimentMatrixVariances_param1": 12,
|
||||
"sentimentMatrixVariances_param2": 16,
|
||||
"sentimentMatrixVariances_param3": 10000,
|
||||
"sentimentMatrixVariances_param4": 44,
|
||||
"sentimentMatrixVariances_param5": 48,
|
||||
"sentimentMatrixVariances_param6": 7900,
|
||||
"sentimentMatrixVariances_param7": 5,
|
||||
"sentimentMatrixVariances_param8": 5500,
|
||||
"sentimentMatrixVariances_param9": 2,
|
||||
"sentimentMatrixVariances_param10": 1,
|
||||
"sentimentMatrixVariances_param11": 45,
|
||||
"sentimentMatrixVariances_param12": 20,
|
||||
"sentimentMatrixVariances_param13": 20,
|
||||
"sentimentMatrixVariances_param14": 45,
|
||||
"sentimentMatrixVariances_param15": 20,
|
||||
"sentimentMatrixVariances_param16": 2,
|
||||
"sentimentMatrixVariances_param17": 20,
|
||||
"sentimentMatrixVariances_param18": 2,
|
||||
"sentimentMatrixVariances_param19": 5,
|
||||
"sentimentMatrixVariances_param20": 20,
|
||||
"sentimentMatrixVariances_param21": 50,
|
||||
"sentimentMatrixVariances_param22": 5,
|
||||
"sentimentMatrixVariances_param23": 24,
|
||||
"sentimentMatrixVariances_param24": 20,
|
||||
"sentimentMatrixVariances_param25": 4500,
|
||||
"sentimentMatrixVariances_param26": 5,
|
||||
"sentimentMatrixVariances_param27": 19,
|
||||
"sentimentMatrixVariances_param28": 24,
|
||||
"sentimentMatrixVariances_param29": 4500,
|
||||
"sentimentMatrixVariances_param30": 26,
|
||||
"sentimentMatrixVariances_param31": 35,
|
||||
"sentimentMatrixVariances_param32": 3900,
|
||||
"sentimentMatrixVariances_param33": 1,
|
||||
"sentimentMatrixVariances_param34": 15,
|
||||
"sentimentMatrixVariances_param35": 25,
|
||||
"sentimentMatrixVariances_param36": 4900,
|
||||
"sentimentMatrixVariances_param37": 1900,
|
||||
|
||||
"classifyRawEvaluation_param1": 990,
|
||||
"classifyRawEvaluation_param2": 2,
|
||||
"classifyRawEvaluation_param3": 5500,
|
||||
|
||||
"sentiment1_param1": 500,
|
||||
|
||||
"simpleRNNMaxtrixVectors_param1": 10,
|
||||
"simpleRNNMaxtrixVectors_param2": 500,
|
||||
"simpleRNNMaxtrixVectors_param3": 500,
|
||||
"simpleRNNMaxtrixVectors_param4": 100,
|
||||
"simpleRNNMaxtrixVectors_param5": 100,
|
||||
"simpleRNNMaxtrixVectorsDouble_param6": 0.1,
|
||||
"simpleRNNMaxtrixVectorsDouble_param7": 0.050,
|
||||
"simpleRNNMaxtrixVectors_param8": 250,
|
||||
"simpleRNNMaxtrixVectorsDouble_param9": 0.44,
|
||||
"simpleRNNMaxtrixVectorsDouble_param10": 0.60,
|
||||
"simpleRNNMaxtrixVectors_param11": 2400,
|
||||
"simpleRNNMaxtrixVectorsDouble_param12": 0.40,
|
||||
"simpleRNNMaxtrixVectorsDouble_param13": 0.445,
|
||||
"simpleRNNMaxtrixVectors_param14": 3600,
|
||||
"simpleRNNMaxtrixVectors_param15": 50,
|
||||
"simpleRNNMaxtrixVectors_param16": 48,
|
||||
"simpleRNNMaxtrixVectors_param17": 50,
|
||||
"simpleRNNMaxtrixVectors_param18": 6900,
|
||||
"simpleRNNMaxtrixVectors_param19": 30,
|
||||
"simpleRNNMaxtrixVectors_param20": 135,
|
||||
"simpleRNNMaxtrixVectors_param21": 100,
|
||||
"simpleRNNMaxtrixVectors_param22": 105,
|
||||
"simpleRNNMaxtrixVectors_param23": 255,
|
||||
"simpleRNNMaxtrixVectors_param24": 150,
|
||||
"simpleRNNMaxtrixVectors_param25": 90,
|
||||
"simpleRNNMaxtrixVectors_param26": 95,
|
||||
"simpleRNNMaxtrixVectors_param27": 7700,
|
||||
"simpleRNNMaxtrixVectors_param28": 80,
|
||||
"simpleRNNMaxtrixVectors_param29": 85,
|
||||
"simpleRNNMaxtrixVectors_param30": 4500,
|
||||
"simpleRNNMaxtrixVectors_param31": 125,
|
||||
"simpleRNNMaxtrixVectors_param32": 130,
|
||||
"simpleRNNMaxtrixVectors_param33": 43,
|
||||
"simpleRNNMaxtrixVectorsDouble_param34": 0.01,
|
||||
"simpleRNNMaxtrixVectorsDouble_param35": 0.00,
|
||||
"simpleRNNMaxtrixVectors_param36": 1300,
|
||||
"simpleRNNMaxtrixVectorsDouble_param37": 0.1,
|
||||
"simpleRNNMaxtrixVectorsDouble_param38": 1.0,
|
||||
"simpleRNNMaxtrixVectors_param39": 7935,
|
||||
"simpleRNNMaxtrixVectors_param40": 1,
|
||||
"simpleRNNMaxtrixVectors_param41": 10,
|
||||
"simpleRNNMaxtrixVectors_param42": 585,
|
||||
"simpleRNNMaxtrixVectors_param43": 2500,
|
||||
"simpleRNNMaxtrixVectors_param44": 50,
|
||||
"simpleRNNMaxtrixVectors_param45": 51,
|
||||
"simpleRNNMaxtrixVectors_param46": 57,
|
||||
"simpleRNNMaxtrixVectors_param47": 22,
|
||||
"simpleRNNMaxtrixVectors_param48": 24,
|
||||
"simpleRNNMaxtrixVectors_param49": 9875,
|
||||
"simpleRNNMaxtrixVectorsDouble_param50": 0.000,
|
||||
"simpleRNNMaxtrixVectors_param51": 500,
|
||||
"simpleRNNMaxtrixVectorsDouble_param52": 0.1,
|
||||
"simpleRNNMaxtrixVectors_param53": 255,
|
||||
"simpleRNNMaxtrixVectorsDouble_param54": 0.50,
|
||||
"simpleRNNMaxtrixVectors_param55": 1200,
|
||||
"simpleRNNMaxtrixVectorsDouble_param56": 0.050,
|
||||
"simpleRNNMaxtrixVectorsDouble_param57": 0.10,
|
||||
"simpleRNNMaxtrixVectors_param58": 350,
|
||||
"simpleRNNMaxtrixVectorsDouble_param59": 3.0,
|
||||
"simpleRNNMaxtrixVectorsDouble_param60": 9.25,
|
||||
"simpleRNNMaxtrixVectors_param61": 1555,
|
||||
"simpleRNNMaxtrixVectorsDouble_param62": 9.25,
|
||||
"simpleRNNMaxtrixVectorsDouble_param63": 10.0,
|
||||
"simpleRNNMaxtrixVectors_param64": 2935,
|
||||
"simpleRNNMaxtrixVectorsDouble_param65": 1.0,
|
||||
"simpleRNNMaxtrixVectorsDouble_param66": 3.0,
|
||||
"simpleRNNMaxtrixVectors_param67": 585,
|
||||
"simpleRNNMaxtrixVectors_param68": 3,
|
||||
"simpleRNNMaxtrixVectors_param69": 6555,
|
||||
"simpleRNNMaxtrixVectorsDouble_param70": 0.01,
|
||||
"simpleRNNMaxtrixVectorsDouble_param71": 0.00,
|
||||
"simpleRNNMaxtrixVectors_param72": 3300,
|
||||
"simpleRNNMaxtrixVectorsDouble_param73": 0.1,
|
||||
"simpleRNNMaxtrixVectorsDouble_param74": 0.2,
|
||||
"simpleRNNMaxtrixVectors_param75": 6790,
|
||||
"simpleRNNMaxtrixVectors_param76": 1025,
|
||||
"simpleRNNMaxtrixVectorsDouble_param77": 0.050,
|
||||
"simpleRNNMaxtrixVectorsDouble_param78": 0.10,
|
||||
"simpleRNNMaxtrixVectors_param79": 750,
|
||||
"simpleRNNMaxtrixVectors_param80": 1,
|
||||
"simpleRNNMaxtrixVectors_param81": 10,
|
||||
"simpleRNNMaxtrixVectors_param82": 380,
|
||||
"simpleRNNMaxtrixVectors_param83": 3,
|
||||
"simpleRNNMaxtrixVectors_param84": 7500,
|
||||
"simpleRNNMaxtrixVectors_param85": 5,
|
||||
"simpleRNNMaxtrixVectors_param86": 400,
|
||||
|
||||
"simpleRNNMatrixCalculations_param1": 100,
|
||||
"simpleRNNMatrixCalculations_param2": 50,
|
||||
"simpleRNNMatrixCalculations_param3": 100,
|
||||
"simpleRNNMatrixCalculations_param4": 0,
|
||||
"simpleRNNMatrixCalculations_param5": 25,
|
||||
"simpleRNNMatrixCalculations_param6": 100,
|
||||
"simpleRNNMatrixCalculations_param7": 25,
|
||||
"simpleRNNMatrixCalculations_param8": 25,
|
||||
"simpleRNNMatrixCalculations_param9": 25,
|
||||
"simpleRNNMatrixCalculations_param10": 5,
|
||||
"simpleRNNMatrixCalculations_param11": 0,
|
||||
"simpleRNNMatrixCalculations_param12": 0,
|
||||
"simpleRNNMatrixCalculations_param13": 10,
|
||||
"simpleRNNMatrixCalculations_param14": 55,
|
||||
"simpleRNNMatrixCalculations_param15": 82,
|
||||
"simpleRNNMatrixCalculations_param16": 30,
|
||||
"simpleRNNMatrixCalculations_param17": 60,
|
||||
"simpleRNNMatrixCalculationsDouble_param18": 0.0,
|
||||
"simpleRNNMatrixCalculationsDouble_param19": 1.3,
|
||||
"simpleRNNMatrixCalculationsDouble_param20": 1.9,
|
||||
"simpleRNNMatrixCalculationsDouble_param21": 1.99,
|
||||
"simpleRNNMatrixCalculationsDouble_param22": 1.248,
|
||||
"simpleRNNMatrixCalculationsDouble_param23": 1.238,
|
||||
"simpleRNNMatrixCalculations_param24": 2500,
|
||||
"simpleRNNMatrixCalculations_param25": 34,
|
||||
"simpleRNNMatrixCalculations_param26": 28,
|
||||
"simpleRNNMatrixCalculations_param27": 7948,
|
||||
"simpleRNNMatrixCalculationsDouble_param28": 55.5,
|
||||
"simpleRNNMatrixCalculations_param29": 7530,
|
||||
"simpleRNNMatrixCalculationsDouble_param30": 0.0,
|
||||
"simpleRNNMatrixCalculationsDouble_param31": 1.6,
|
||||
"simpleRNNMatrixCalculationsDouble_param32": 1.95,
|
||||
"simpleRNNMatrixCalculationsDouble_param33": 61.1,
|
||||
"simpleRNNMatrixCalculationsDouble_param34": 61.9,
|
||||
"simpleRNNMatrixCalculationsDouble_param35": 37.5,
|
||||
"simpleRNNMatrixCalculationsDouble_param36": 38.2,
|
||||
"simpleRNNMatrixCalculations_param37": 4500,
|
||||
"simpleRNNMatrixCalculations_param38": 4500,
|
||||
"simpleRNNMatrixCalculations_param39": 550,
|
||||
"simpleRNNMatrixCalculations_param40": 25,
|
||||
"simpleRNNMatrixCalculations_param41": 75,
|
||||
"simpleRNNMatrixCalculations_param42": 250,
|
||||
"simpleRNNMatrixCalculations_param43": 75,
|
||||
"simpleRNNMatrixCalculations_param44": 25,
|
||||
"simpleRNNMatrixCalculations_param45": 23,
|
||||
"simpleRNNMatrixCalculations_param46": 3500,
|
||||
"simpleRNNMatrixCalculationsDouble_param47": 75.9,
|
||||
"simpleRNNMatrixCalculations_param48": 24,
|
||||
"simpleRNNMatrixCalculationsDouble_param49": 24.9,
|
||||
"simpleRNNMatrixCalculations_param50": 85,
|
||||
"simpleRNNMatrixCalculations_param51": 4500,
|
||||
"simpleRNNMatrixCalculationsDouble_param52": 43.5,
|
||||
"simpleRNNMatrixCalculations_param53": 50,
|
||||
"simpleRNNMatrixCalculations_param54": 55,
|
||||
"simpleRNNMatrixCalculationsDouble_param55": 60.0,
|
||||
"simpleRNNMatrixCalculationsDouble_param56": 66.5,
|
||||
"simpleRNNMatrixCalculations_param57": 55,
|
||||
"simpleRNNMatrixCalculations_param58": 75,
|
||||
"simpleRNNMatrixCalculations_param59": 90,
|
||||
"simpleRNNMatrixCalculations_param60": 50,
|
||||
"simpleRNNMatrixCalculations_param61": 55,
|
||||
"simpleRNNMatrixCalculations_param62": 2800,
|
||||
"simpleRNNMatrixCalculations_param63": 25,
|
||||
"simpleRNNMatrixCalculations_param64": 45,
|
||||
"simpleRNNMatrixCalculations_param65": 100,
|
||||
"simpleRNNMatrixCalculations_param66": 45,
|
||||
"simpleRNNMatrixCalculations_param67": 25,
|
||||
"simpleRNNMatrixCalculations_param68": 25,
|
||||
"simpleRNNMatrixCalculations_param69": 1400,
|
||||
"simpleRNNMatrixCalculations_param70": 0,
|
||||
"simpleRNNMatrixCalculations_param71": 500,
|
||||
"simpleRNNMatrixCalculations_param72": 150,
|
||||
"simpleRNNMatrixCalculations_param73": 12,
|
||||
"simpleRNNMatrixCalculations_param74": 2501,
|
||||
|
||||
"typeDependenciesGrammaticalRelation_param1": 701,
|
||||
"typeDependenciesGrammaticalRelation_param2": 528,
|
||||
"typeDependenciesGrammaticalRelation_param3": 700,
|
||||
"typeDependenciesGrammaticalRelation_param4":527,
|
||||
"typeDependenciesGrammaticalRelation_param5": 0,
|
||||
"typeDependenciesGrammaticalRelation_param6": 4,
|
||||
"typeDependenciesGrammaticalRelation_param7": 3450,
|
||||
"typeDependenciesGrammaticalRelation_param8": 5,
|
||||
"typeDependenciesGrammaticalRelation_param9": 0,
|
||||
"typeDependenciesGrammaticalRelation_param10": 5,
|
||||
"typeDependenciesGrammaticalRelation_param11": 1,
|
||||
"typeDependenciesGrammaticalRelation_param12": 160,
|
||||
"typeDependenciesGrammaticalRelationDouble_param13": 2.5,
|
||||
"typeDependenciesGrammaticalRelation_param14": 260,
|
||||
"typeDependenciesGrammaticalRelation_param15": 5,
|
||||
"typeDependenciesGrammaticalRelation_param16": 10,
|
||||
"typeDependenciesGrammaticalRelation_param17": 260,
|
||||
"typeDependenciesGrammaticalRelation_param18": 600,
|
||||
"typeDependenciesGrammaticalRelation_param19": 5,
|
||||
"typeDependenciesGrammaticalRelation_param20": 8,
|
||||
"typeDependenciesGrammaticalRelation_param21": 10,
|
||||
"typeDependenciesGrammaticalRelation_param22": 50,
|
||||
"typeDependenciesGrammaticalRelation_param23": 23435,
|
||||
"typeDependenciesGrammaticalRelation_param24": 180,
|
||||
"typeDependenciesGrammaticalRelation_param25": 480,
|
||||
"typeDependenciesGrammaticalRelation_param26": 10522,
|
||||
"typeDependenciesGrammaticalRelation_param27": 35,
|
||||
"typeDependenciesGrammaticalRelation_param28": 45,
|
||||
"typeDependenciesGrammaticalRelation_param29": 4021,
|
||||
"typeDependenciesGrammaticalRelation_param30": 6,
|
||||
"typeDependenciesGrammaticalRelation_param31": 9340,
|
||||
"typeDependenciesGrammaticalRelation_param32": 15,
|
||||
"typeDependenciesGrammaticalRelation_param33": 10,
|
||||
"typeDependenciesGrammaticalRelation_param34": 2,
|
||||
"typeDependenciesGrammaticalRelation_param35": 80,
|
||||
"typeDependenciesGrammaticalRelation_param36": 2502,
|
||||
"typeDependenciesGrammaticalRelation_param37": 0,
|
||||
"typeDependenciesGrammaticalRelation_param38": 4101,
|
||||
"typeDependenciesGrammaticalRelation_param39": 5,
|
||||
"typeDependenciesGrammaticalRelation_param40": 450,
|
||||
"typeDependenciesGrammaticalRelation_param41": 450,
|
||||
"typeDependenciesGrammaticalRelation_param42": 1,
|
||||
"typeDependenciesGrammaticalRelation_param43": 3,
|
||||
"typeDependenciesGrammaticalRelation_param44": 1500,
|
||||
"typeDependenciesGrammaticalRelation_param45": 5,
|
||||
"typeDependenciesGrammaticalRelation_param46": 500,
|
||||
"typeDependenciesGrammaticalRelation_param47": 1,
|
||||
"typeDependenciesGrammaticalRelation_param48": 3431,
|
||||
"typeDependenciesGrammaticalRelation_param49": 0,
|
||||
"typeDependenciesGrammaticalRelation_param50": 3,
|
||||
"typeDependenciesGrammaticalRelation_param51": 4500,
|
||||
"typeDependenciesGrammaticalRelation_param52": 5,
|
||||
"typeDependenciesGrammaticalRelation_param53": 500,
|
||||
"typeDependenciesGrammaticalRelation_param54": 6500,
|
||||
"typeDependenciesGrammaticalRelation_param55": 250,
|
||||
"typeDependenciesGrammaticalRelation_param56": 3,
|
||||
"typeDependenciesGrammaticalRelation_param57": 160,
|
||||
"typeDependenciesGrammaticalRelation_param58": 3,
|
||||
"typeDependenciesGrammaticalRelation_param59": 160,
|
||||
|
||||
"iterateTrees_param1": 4,
|
||||
"iterateTrees_param2": 5,
|
||||
"iterateTrees_param3": 0,
|
||||
"iterateTrees_param4": 200,
|
||||
"iterateTrees_param5": 0,
|
||||
"iterateTrees_param6": 4,
|
||||
"iterateTrees_param7": 500,
|
||||
"iterateTrees_param8": 2,
|
||||
"iterateTrees_param9": 350,
|
||||
"iterateTrees_param10": 2,
|
||||
"iterateTreesDouble_param11": 1.5,
|
||||
"iterateTrees_param12": 3,
|
||||
"iterateTrees_param13": 8745,
|
||||
"iterateTrees_param14": 5,
|
||||
"iterateTrees_param15": 1550,
|
||||
"iterateTrees_param16": 3,
|
||||
"iterateTrees_param17": 3949,
|
||||
"iterateTrees_param18": 550,
|
||||
"iterateTrees_param19": 800,
|
||||
"iterateTreesDouble_param20": 1.5,
|
||||
"iterateTrees_param21": 350,
|
||||
"iterateTrees_param22": 2826,
|
||||
"iterateTrees_param23": 5,
|
||||
"iterateTrees_param24": 1923,
|
||||
"iterateTrees_param25": 2,
|
||||
"iterateTrees_param26": 12985,
|
||||
"iterateTrees_param27": 1,
|
||||
"iterateTrees_param28": 2803,
|
||||
"iterateTrees_param29": 553,
|
||||
"iterateTrees_param30": 10,
|
||||
"iterateTrees_param31": 400,
|
||||
|
||||
"tgwListScoreIncrementer_param1": 64,
|
||||
|
||||
"overValue_param1": 32
|
||||
}
|
136
insert_table_sentence_testing.sql
Normal file
136
insert_table_sentence_testing.sql
Normal file
@ -0,0 +1,136 @@
|
||||
|
||||
INSERT INTO ArtificialAutism.sentence_testing
|
||||
(String1, String2, score_required, comperator_for_score, PerformTestingFittingLess)
|
||||
VALUES
|
||||
('I was thinking to small supplies to avoid waiting in the rain. This way, in case of trouble you go home and take in your supply instead of waiting 45 min',
|
||||
'*NêkØ* Kroaat_West bG <3', 800, '<', false),
|
||||
('u want head from me :wlenny:', 'no thanks but i know some ladyboys here that would', 2000, '>', false),
|
||||
('we need a trim for kroaat\'s teamwin', 'no thanks but i know some ladyboys here that would', -1, '<', true),
|
||||
('i am not a stalker', 'but we ban for bhop hack', -1, 'no operation', false),
|
||||
('i am not a stalker', 'hey stalker', -1, '>', true),
|
||||
('what do you think of humanzz', 'did we actually go inside rocket -_-', -1, 'no operation', false),
|
||||
('what do you think of humanzz', 'crying for beeing tossed for fire', -1, '>', true),
|
||||
('what do you think of humanzz', 'crying for beeing tossed for fire', 3000, '>', false),
|
||||
('admin ! this map needs a Free Torchlight for all', '( ? <:wlenny:514861023002624001> ?? ? <:wlenny:514861023002624001> ) ( ? <:wlenny:514861023002624001> ?? ? <:wlenny:514861023002624001> ) ( ? <:wlenny:514861023002624001> ?? ? <:wlenny:514861023002624001> ) ( ? <:wlenny:514861023002624001> ?? ?', -5000, '<', false),
|
||||
('i said that because i indeed have more knowledge about medicines than the average joe', '( ? <:wlenny:514861023002624001> ?? ? <:wlenny:514861023002624001> ) ( ? <:wlenny:514861023002624001> ?? ? <:wlenny:514861023002624001> ) ( ? <:wlenny:514861023002624001> ?? ? <:wlenny:514861023002624001> ) ( ? <:wlenny:514861023002624001> ?? ?', -1, 'no operation', false),
|
||||
('i said that because i indeed have more knowledge about medicines than the average joe', 'Depends on the situation but i will mostly trust my doctor if he says this will help and i actually need it', -1, '>', true),
|
||||
('tell me something', 'you learn fast yoshmi', -1, 'no operation', false),
|
||||
('tell me something', 'when i see europeans dead i laugh', -1, '>', true),
|
||||
('crobat im gonna nominate next event for you', 'why did we sploit . <:wlenny:514861023002624001> <:wlenny:514861023002624001> <:wlenny:514861023002624001>', -1, 'no operation', false),
|
||||
('crobat im gonna nominate next event for you', 'lets go for mako', -1, '>', true),
|
||||
('how are the calcluations going? any issue with the JMWE?', 'anyone know if upgrading damage increases the mines\' damage also', -1, 'no operation', false),
|
||||
('how are the calcluations going? any issue with the JMWE?', 'i have to get back to work', -1, '>', true),
|
||||
('sprichst du Deutsch?', 'like rpggift unknown !!! 130', -2500, '<', false),
|
||||
('do you like memes?', 'we need to adapt to the team we have', 3400, '<', false),
|
||||
('do you like memes?', 'i have to get back to work', 14400, '>', false),
|
||||
('is that a cursed sentence?', 'just didnt want to have heal since i died', -200, '<', false),
|
||||
('my name is ? ? ? ? G ? ? ? but this server doesn\'t read my special ? ? ? ? ? ? characters', 'dont say that sentence again', -5000, '<', false),
|
||||
('my name is ? ? ? ? G ? ? ? but this server doesn\'t read my special ? ? ? ? ? ? characters', 'please dont tell me your gonna repeat that', -5000, '<', false),
|
||||
('my name is ? ? ? ? G ? ? ? but this server doesn\'t read my special ? ? ? ? ? ? characters', 'na it was a good try', -5000, '<', false),
|
||||
('my name is ? ? ? ? G ? ? ? but this server doesn\'t read my special ? ? ? ? ? ? characters', 'NATSU DIES IN THE END', -5000, '<', false),
|
||||
('reeee', '??( ? :wlenny~1: ?? ? :wlenny~1: )?? ( ? :wlenny~1: ?? ? :wlenny~1: )/ [ :wlenny~1: ?~ :wlenny~1: :] ? :wlenny~1: ?? ?? <', -2500, '<', false),
|
||||
('dw, my mom is a stupid cunt, she deserved it', '(????????????-)---….. JOINT :wlenny~1: !', -2500, '<', false),
|
||||
('are you a paste cut or a cut and paste?', '? ? ? ? ? ? ? ? A ? ? ? V ? ? ? ? ? ? ? ? ?', -2500, '<', false),
|
||||
('Did you know that Denmark\'s short form (DK) is pronounced as "decay"? :thonk~1:', '?( ? <:wlenny:514861023002624001> ?? ? <:wlenny:514861023002624001> )??( ? <:wlenny:514861023002624001> ?? ? <:wlenny:514861023002624001> )??( ? <:wlenny:514861023002624001> ?? ?<:wlenny:514861023002624001>)??( ?<:wlenny:514861023002624001>?? ? <:w', -2500, '<', false),
|
||||
('are you a space cat or a cat in space? <:thonk:382012909942734858>', '{fullred}( ? <:wlenny:514861023002624001> ? ? {hotpink}? ? ? ? ? ? ? ? ? ? ? ? ? ? ? ? <:wlenny:514861023002624001> ?)', -2500, '<', false),
|
||||
('something else to tell me now', '{fullred}(--)? ?(--? )?{mediumblue}?(--)? ?(--)?{magenta}?(--)?{indigo}?(--? )?', -2500, '<', false),
|
||||
('do you have repeating sentences', 'its pretty cheap with 10 ppl you pay about 60 euro for a week', 1500, '<', false),
|
||||
('what is 60 euro a week', '{fullred}( ? <:wlenny:514861023002624001> ? ? {hotpink}? ? ? ? ? ? ? ? ? ? ? ? ? ? ? ? <:wlenny:514861023002624001> ?)', -2500, '<', false),
|
||||
('do you watch news and if yes which one', '? ? ? ? ? ? ? ? A ? ? ? V ? ? ? ? ? ? ? ? ?', -2500, '<', false),
|
||||
('"im gonna bad manner you', '? ? ? ? ? ? ? ? A ? ? ? V ? ? ? ? ? ? ? ? ?', -2500, '<', false),
|
||||
('LOOK OUT BIG DOG', '? ? ? ? ? ? ? ? A ? ? ? V ? ? ? ? ? ? ? ? ?', -2500, '<', false),
|
||||
('3 days = 30 cents', '? ? ? ? ? ? ? ? A ? ? ? V ? ? ? ? ? ? ? ? ?', -2500, '<', false),
|
||||
(':D we had a lot of fun for 2 rounds :D', '? ? ? ? ? ? ? ? A ? ? ? V ? ? ? ? ? ? ? ? ?', -2500, '<', false),
|
||||
('>FUCK I JUST PRESSED MY ZTELE BIND', '? ? ? ? ? ? ? ? A ? ? ? V ? ? ? ? ? ? ? ? ?', -2500, '<', false),
|
||||
('400 solos on mako <:wlenny:514861023002624001>', '? ? ? ? ? ? ? ? A ? ? ? V ? ? ? ? ? ? ? ? ?', -2500, '<', false),
|
||||
('2 noobs 3 not bad', '{fullred}( ? <:wlenny:514861023002624001> ? ? {hotpink}? ? ? ? ? ? ? ? ? ? ? ? ? ? ? ? <:wlenny:514861023002624001> ?)', -2500, '<', false),
|
||||
('??????? NOW WE RIOT ???????', '{fullred}( ? <:wlenny:514861023002624001> ? ? {hotpink}? ? ? ? ? ? ? ? ? ? ? ? ? ? ? ? <:wlenny:514861023002624001> ?)', -2500, '<', false),
|
||||
('admin turn on bhop pleas', '{fullred}( ? <:wlenny:514861023002624001> ? ? {hotpink}? ? ? ? ? ? ? ? ? ? ? ? ? ? ? ? <:wlenny:514861023002624001> ?)', -2500, '<', false),
|
||||
('paranoid is never event', '{fullred}( ? <:wlenny:514861023002624001> ? ? {hotpink}? ? ? ? ? ? ? ? ? ? ? ? ? ? ? ? <:wlenny:514861023002624001> ?)', -2500, '<', false),
|
||||
('players keep diying LLLLOOOOLLL', '{fullred}( ? <:wlenny:514861023002624001> ? ? {hotpink}? ? ? ? ? ? ? ? ? ? ? ? ? ? ? ? <:wlenny:514861023002624001> ?)', -2500, '<', false),
|
||||
('PRESS THE FUCKING BUTTON IN THE ELEVATOR', '{fullred}( ? <:wlenny:514861023002624001> ? ? {hotpink}? ? ? ? ? ? ? ? ? ? ? ? ? ? ? ? <:wlenny:514861023002624001> ?)', -2500, '<', false),
|
||||
('but instead of minecraft server i got css ze', 'Rocklee when did you come back from the isis khalifate <:wlenny:514861023002624001>', 2500, '<', false),
|
||||
('First time there\'s that many CT at this point', 'Rocklee when did you come back from the isis khalifate <:wlenny:514861023002624001>', 2500, '<', false),
|
||||
('go to spec so changemap', 'Rocklee when did you come back from the isis khalifate <:wlenny:514861023002624001>', -2500, '<', false),
|
||||
('What\'s for lunch?', 'what does bm stand for', -1, 'no operation', false),
|
||||
('What\'s for lunch?', '2 eggs and 1 cup', -1, '>', true),
|
||||
('do you watch any series or animes or cartoons', 'you guys voted for this', 1500, '<', false),
|
||||
('do you know pyrono', 'i have to get accustomed to it', 3000, '<', false),
|
||||
('Is William a good admin?', 'but this is helms deep', -1, 'no operation', false),
|
||||
('Is William a good admin?', 'keep an eye on them', -1, '>', true),
|
||||
('scuffed jenz', 'I HAVE WATCHED ONLY ONE CARTOON AND IT\'S POKEMON', -2500, '<', false),
|
||||
('So what?', 'I HAVE WATCHED ONLY ONE CARTOON AND IT\'S POKEMON', 3500, '<', false),
|
||||
('Who is the enemy?', 'I HAVE WATCHED ONLY ONE CARTOON AND IT\'S POKEMON', 1500, '<', false),
|
||||
('Sounds bad, doesn\'t work', 'that hold is identical to the other room', 500, '<', false),
|
||||
('oh wait, because I don\'t have any', 'would be cool if VIPs would nominate other than paranoid. All the maps in the vote all the time suck so people just vote for an', 1500, '<', false),
|
||||
('{navy}? :wlenny~1: ?? {violet}? :wlenny~1: ? :wlenny~1: ? :wlenny~1: ?? ? :wlenny~1: ? :wlenny~1: ? :wlenny~1: ??', 'will you still be online tommorow?', -4500, '<', false),
|
||||
('stop being such a kid and act more polite towards people ', 'i played nemesis on paradise a few days ago and it worked fine', 3500, '<', false),
|
||||
('Enfin. Map noob', 'dagger dagger', -400, '<', false),
|
||||
('u have to hit the middle one with ur nade', 'your not going to mcdonalds, you have had your chance with the cheeseburger', 400, '<', false),
|
||||
('How is everyone doing', 'wieso ist dein Bein am Arsch. Eigdl hängt das runter', 400, '<', false),
|
||||
('How is everyone doing', 'meshlem how does it feel to be russian', 700, '<', false),
|
||||
('they dont buy kefvlar', 'you have a database available again', 400, '<', false),
|
||||
('because of lag?', 'french tourit', -1400, '<', false),
|
||||
('because of lag?', 'Even when I\'m working', 3500, '<', false),
|
||||
('or need another restart', 'Even when I\'m working', 2600, '>', false),
|
||||
('or need another restart', 'french tourit', -1400, '<', false),
|
||||
('wow that clock works/', 'didnt the bot like mako? what happened to that?', 400, '<', false),
|
||||
('haHAA stop that cringe chat haHAA', 'didnt the bot like mako? what happened to that?', 3400, '>', false),
|
||||
('like 1s down now i guess i will die', 'monkaGIGA', 4400, '>', false),
|
||||
('what do you want', 'admin extend', 5100, '>', false),
|
||||
('You are a one large bug', 'omg you are right', 5900, '>', false),
|
||||
('I\'m not a mapper, wtf', 'this map was made by wtf', 3400, '>', false),
|
||||
('do you have plants thonk', 'banana trees are plants', 3400, '>', false),
|
||||
('do you have plants thonk', 'fucking alcolo', 100, '<', false),
|
||||
('do you have plants thonk', 'qual arma e 382012909942734858', -1400, '<', false),
|
||||
('do you have plants thonk', 'wlenny on gamebanana', 2500, '>', false),
|
||||
('And how was it? :wlenny~1:', 'at lvl 1 avad is 140 cd', 400, '>', false),
|
||||
('wtf? :monkaS~2:', 'think thats it kangaroo next', -400, '<', false),
|
||||
('yurope', '?? ??????? ??? ??', -2400, '<', false),
|
||||
('fuck', '?? ??????? ??? ??', -2400, '<', false),
|
||||
('you have your priorities straight. i respect that', 'I see the bot doesn\'t want to die he knows depressive ass bot autism fasz eater Amtssprache ist Deutsch :error: &259041609495216129> speak hungarian garantua is the best map', -7400, '<', false),
|
||||
('Hey that\'s racist', 'I love you autismo', 3400, '>', false),
|
||||
('Guten Morgen, Herr Autismus, kannst du deine Fresse für ein Paar Minuten halten? :moon2SH:', 'NM_GunPoint: :NM_GunPoint: :NM_GunPoint: :NM_GunPoint: "Nesstsuki Szeretem az édes pinat Ness szeretem az edes pinat Ness send lenny"', -4400, '<', false),
|
||||
(' I LOVE YOU PLEASE TAKE ME BACK I AM SO SORRY IT WAS A DRUNK ACCIDENT', 'i am not exactly sure how you think renaming them to !autism will cause them to loop over and over again', -2400, '<', false),
|
||||
('Now look', 'High foot', -2400, '<', false),
|
||||
('please wake up again', 'kind ghost pinát nyal', -2400, '<', false),
|
||||
('?mute autism 50', 'have a?', -2400, '<', false),
|
||||
('no chloroform pls', 'propper_model myb?', 400, '>', false),
|
||||
('yeah so stop inflating, you make these ppl do that', 'I SAID GOOD NIGHT', -1, 'no operation', false),
|
||||
('yeah so stop inflating, you make these ppl do that', 'i am inflating and dont make these ppl do that', -1, '>', true),
|
||||
('yeah so stop inflating, you make these ppl do that', 'i am inflating and dont make these ppl do that', 500, '>', false),
|
||||
('Maybe that\'s why we have many people who prefer download something from torrents than buying something even it\'s cheap', 'yeah stop inflating, you make people do that', 3400, '>', false),
|
||||
('Artificial Autism Generation 3 (STEAM_0:0:610560766) Pressed button: Deathrun_Button', 'with big ass boss', -2400, '<', false),
|
||||
('Artificial Autism Generation 3 Pressed button: Deathrun_Button', 'No ? I SAID GOOD NIGHT', 3400, '>', false),
|
||||
('Explain?', 'No ? I SAID GOOD NIGHT', 5400, '>', false),
|
||||
('Dolly stop scamming me', 'Oh hello autims', -2400, '>', false),
|
||||
('Most importantly, I\'m a cat belonging to himself :cooltom:', 'It already has random bomb sites and random spawns. So it\'s gameplay ready, but the walls, man.:DDDD', -400, '<', false),
|
||||
('you are his woman? ', 'are you dead again', 4400, '>', false),
|
||||
('I\'m not your man', 'Yep. semodel. Doesn\'t work. Plugin for BLender is installed and working. Characters are very well exported.', -2400, '<', false),
|
||||
('shut up', 'Only champagne', 400, '>', false),
|
||||
('right well kill yourself first of all :woa~1:', 'What is a command prompt: A command prompt allow you to execute .exe files through commands heres a sneak peek : "we are nothing more to women than an amusement park ride with life insurance"', -2400, '<', false),
|
||||
('A', 'I have neither', -2400, '<', false),
|
||||
('maybe i tomorow or sunday have an update ready', 'How dare you calling me baguette woman? I am not lisa and moreover not french:angerynut~1:', -2400, '<', false),
|
||||
('no shut up, why the fuck did you just roast me nigger', 'with 0, they cant break it by shot or knife but it still makes a sound of breaking possible i think', -2400, '<', false),
|
||||
('is bot alive again?', 'tired of spinning in circles?', 1400, '>', false),
|
||||
('is bot alive again?', 'yes it is', 6400, '>', false),
|
||||
(':pepelaugh~2:', 'geci cuki autismo bot pinát nyal eating nice dogs Du hast nichteinmal eine Persönlichkeit', -3400, '<', false),
|
||||
('spits bót <!>', 'i been blessed by only meeting nice dogs', -400, '<', false),
|
||||
('spits bót <!>', 'spits faggot', 5400, '>', false),
|
||||
('bro fuck you', 'welp i dont have time any more for focusing on the bot so its just a simple restart', 2400, '>', false),
|
||||
('nah it\'s random, it can react to long messages pretty quickly sometiems', 'i mean there certainly exists models that i could apply for french/german/spanish/chinese/hungarian gay is when you are a guy but have an autistic 2D girl on your profile picture - now this is gay', -400, '<', false),
|
||||
('you don\'t have a', 'Yep. semodel. Doesn\'t work. Plugin for BLender is installed and working. Characters are very well exported.', -2400, '<', false),
|
||||
('finally bot is admin ready', 'Only champagne', 2400, '<', false),
|
||||
('Helmstronk stop posting cringe, last warning', 'finally admin ready', 4400, '>', false),
|
||||
('1pm for me', 'Helmstronk stop posting cringe, last warning', -2400, '<', false),
|
||||
('it\'s 2 PM dude', 'Nesstsuki Szeretem az édes pinat', -2400, '<', false),
|
||||
('can i have b?', 'Dark Silent speak', -2400, '<', false),
|
||||
('autismo loves to eat pussy?', 'I have neither', 2400, '>', false),
|
||||
('autismo loves to eat pussy?', 'I have sweeto autismo, wake up', 5400, '>', false),
|
||||
('Good afternoon acousticheart01815 and have A', 'master degree in artificial autism carpeting I love to eat pussy 🤣 🤣 🤣 🤣 🤣 🤣', -2400, '<', false),
|
||||
('i certainly planned only to spend time on it one weekend', 'i am not exactly sure how you think renaming them to !autism will cause them to loop over and over again', -2400, '<', false),
|
||||
('Who', 'i have neither', -2400, '<', false),
|
||||
('feels bad i only have like one hour free time per day for working on the update on you', 'in func_breakable there setting "strength" and in help comment "Number of points of damage to take before breaking. 0 means don\'t break."', -2400, '<', false),
|
||||
('that sentence is dumb as fuck stop using it', 'ΣZΣ | jenz you see any bots on here??? 404Trucy look your new friend 1pm for me :loved: :shinogeci: :sbpl: you alive? does shino like fasz? boost the server pls', -4400, '<', false),
|
||||
('Ye, i caught ya bitch!', 'I hope lask overtakes gaulloise', -2400, '<', false),
|
||||
('Ye, i caught ya bitch!', 'Good for you bitch', 15400, '>', false)
|
226
pom.xml
Normal file
226
pom.xml
Normal file
@ -0,0 +1,226 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
|
||||
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
|
||||
<modelVersion>4.0.0</modelVersion>
|
||||
<groupId>com.mycompany</groupId>
|
||||
<artifactId>ArtificialAutism</artifactId>
|
||||
<version>1.0</version>
|
||||
<packaging>jar</packaging>
|
||||
<dependencies>
|
||||
<dependency>
|
||||
<groupId>com.googlecode.json-simple</groupId>
|
||||
<artifactId>json-simple</artifactId>
|
||||
<version>1.1.1</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.apache.commons</groupId>
|
||||
<artifactId>commons-dbcp2</artifactId>
|
||||
<version>2.5.0</version>
|
||||
<type>jar</type>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>net.dv8tion</groupId>
|
||||
<artifactId>JDA</artifactId>
|
||||
<version>5.0.0-beta.21</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.junit-pioneer</groupId>
|
||||
<artifactId>junit-pioneer</artifactId>
|
||||
<version>2.2.0</version>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>mysql</groupId>
|
||||
<artifactId>mysql-connector-java</artifactId>
|
||||
<version>8.0.13</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>com.google.guava</groupId>
|
||||
<artifactId>guava</artifactId>
|
||||
<version>26.0-jre</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>com.github.mpkorstanje</groupId>
|
||||
<artifactId>simmetrics-core</artifactId>
|
||||
<version>4.1.1</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>ws4j</groupId>
|
||||
<artifactId>ws4j</artifactId>
|
||||
<version>1.0.1</version>
|
||||
<scope>system</scope>
|
||||
<systemPath>${project.basedir}/lib/ws4j-1.0.1.jar</systemPath>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>stanford-corenlp-models-english</groupId>
|
||||
<artifactId>stanford-corenlp-models-english</artifactId>
|
||||
<version>4.3.1</version>
|
||||
<scope>system</scope>
|
||||
<systemPath>${project.basedir}/lib/stanford-corenlp-models-english-4.3.1.jar</systemPath>
|
||||
<type>jar</type>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>ejml-simple</groupId>
|
||||
<artifactId>ejml-simple</artifactId>
|
||||
<version>0.39</version>
|
||||
<scope>system</scope>
|
||||
<systemPath>${project.basedir}/lib/ejml-simple-0.39.jar</systemPath>
|
||||
<type>jar</type>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>ejml-core</groupId>
|
||||
<artifactId>ejml-core</artifactId>
|
||||
<version>0.39</version>
|
||||
<scope>system</scope>
|
||||
<systemPath>${project.basedir}/lib/ejml-core-0.39.jar</systemPath>
|
||||
<type>jar</type>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>ejml-ddense</groupId>
|
||||
<artifactId>ejml-ddense</artifactId>
|
||||
<version>0.39</version>
|
||||
<scope>system</scope>
|
||||
<systemPath>${project.basedir}/lib/ejml-ddense-0.39.jar</systemPath>
|
||||
<type>jar</type>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>jmweAnno</groupId>
|
||||
<artifactId>jmweAnno</artifactId>
|
||||
<version>1.0</version>
|
||||
<scope>system</scope>
|
||||
<systemPath>${project.basedir}/lib/jmweAnno-1.0.jar</systemPath>
|
||||
<type>jar</type>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>edu.mit.jmwe</groupId>
|
||||
<artifactId>edu.mit.jmwe</artifactId>
|
||||
<version>1.0.2</version>
|
||||
<scope>system</scope>
|
||||
<systemPath>${project.basedir}/lib/edu.mit.jmwe-1.0.2.jar</systemPath>
|
||||
<type>jar</type>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>org.apache.lucene</groupId>
|
||||
<artifactId>lucene-analyzers-common</artifactId>
|
||||
<version>7.2.0</version>
|
||||
<type>jar</type>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>edu.stanford.nlp</groupId>
|
||||
<artifactId>stanford-corenlp</artifactId>
|
||||
<version>4.3.1</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>edu.stanford.nlp</groupId>
|
||||
<artifactId>stanford-corenlp</artifactId>
|
||||
<version>4.3.1</version>
|
||||
<classifier>models</classifier>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>com.discord4j</groupId>
|
||||
<artifactId>discord4j-core</artifactId>
|
||||
<version>3.2.0</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.jetbrains.kotlin</groupId>
|
||||
<artifactId>kotlin-stdlib-jdk8</artifactId>
|
||||
<version>${kotlin.version}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.jetbrains.kotlin</groupId>
|
||||
<artifactId>kotlin-test</artifactId>
|
||||
<version>${kotlin.version}</version>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.jetbrains.kotlinx</groupId>
|
||||
<artifactId>kotlinx-coroutines-core-jvm</artifactId>
|
||||
<version>1.5.0</version>
|
||||
</dependency>
|
||||
</dependencies>
|
||||
|
||||
<properties>
|
||||
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
|
||||
<maven.compiler.source>1.8</maven.compiler.source>
|
||||
<maven.compiler.target>1.8</maven.compiler.target>
|
||||
<mainClass>PresentationLayer.DiscordHandler</mainClass>
|
||||
<kotlin.version>1.5.20-M1</kotlin.version>
|
||||
</properties>
|
||||
<build>
|
||||
<plugins>
|
||||
<plugin>
|
||||
<artifactId>maven-dependency-plugin</artifactId>
|
||||
<executions>
|
||||
<execution>
|
||||
<phase>install</phase>
|
||||
<goals>
|
||||
<goal>copy-dependencies</goal>
|
||||
</goals>
|
||||
<configuration>
|
||||
<outputDirectory>${project.build.directory}/lib</outputDirectory>
|
||||
</configuration>
|
||||
</execution>
|
||||
</executions>
|
||||
</plugin>
|
||||
<plugin>
|
||||
<artifactId>maven-jar-plugin</artifactId>
|
||||
<configuration>
|
||||
<archive>
|
||||
<manifest>
|
||||
<addClasspath>true</addClasspath>
|
||||
<classpathPrefix>lib/</classpathPrefix>
|
||||
<mainClass>${mainClass}</mainClass>
|
||||
</manifest>
|
||||
</archive>
|
||||
</configuration>
|
||||
</plugin>
|
||||
<plugin>
|
||||
<groupId>org.jetbrains.kotlin</groupId>
|
||||
<artifactId>kotlin-maven-plugin</artifactId>
|
||||
<version>${kotlin.version}</version>
|
||||
<executions>
|
||||
<execution>
|
||||
<id>compile</id>
|
||||
<phase>compile</phase>
|
||||
<goals>
|
||||
<goal>compile</goal>
|
||||
</goals>
|
||||
</execution>
|
||||
<execution>
|
||||
<id>test-compile</id>
|
||||
<phase>test-compile</phase>
|
||||
<goals>
|
||||
<goal>test-compile</goal>
|
||||
</goals>
|
||||
</execution>
|
||||
</executions>
|
||||
<configuration>
|
||||
<jvmTarget>1.8</jvmTarget>
|
||||
</configuration>
|
||||
</plugin>
|
||||
<plugin>
|
||||
<groupId>org.apache.maven.plugins</groupId>
|
||||
<artifactId>maven-compiler-plugin</artifactId>
|
||||
<executions>
|
||||
<execution>
|
||||
<id>compile</id>
|
||||
<phase>compile</phase>
|
||||
<goals>
|
||||
<goal>compile</goal>
|
||||
</goals>
|
||||
</execution>
|
||||
<execution>
|
||||
<id>testCompile</id>
|
||||
<phase>test-compile</phase>
|
||||
<goals>
|
||||
<goal>testCompile</goal>
|
||||
</goals>
|
||||
</execution>
|
||||
</executions>
|
||||
</plugin>
|
||||
</plugins>
|
||||
</build>
|
||||
|
||||
</project>
|
55
src/main/java/DataLayer/DBCPDataSource.java
Normal file
55
src/main/java/DataLayer/DBCPDataSource.java
Normal file
@ -0,0 +1,55 @@
|
||||
/*
|
||||
* To change this license header, choose License Headers in Project Properties.
|
||||
* To change this template file, choose Tools | Templates
|
||||
* and open the template in the editor.
|
||||
*/
|
||||
package DataLayer;
|
||||
|
||||
import java.io.FileInputStream;
|
||||
import java.io.FileNotFoundException;
|
||||
import java.io.IOException;
|
||||
import java.sql.Connection;
|
||||
import java.sql.SQLException;
|
||||
import java.util.Properties;
|
||||
import java.util.logging.Level;
|
||||
import java.util.logging.Logger;
|
||||
|
||||
import org.apache.commons.dbcp2.BasicDataSource;
|
||||
import DataLayer.settings;
|
||||
|
||||
/**
|
||||
* @author install1
|
||||
*/
|
||||
public class DBCPDataSource {
|
||||
private static BasicDataSource ds = new BasicDataSource();
|
||||
|
||||
static {
|
||||
try {
|
||||
Properties prop = new Properties();
|
||||
String fileName = "app.config";
|
||||
try (FileInputStream fis = new FileInputStream(fileName)) {
|
||||
prop.load(fis);
|
||||
} catch (FileNotFoundException ex) {
|
||||
} catch (IOException ex) {
|
||||
}
|
||||
ds.setDriver(new com.mysql.cj.jdbc.Driver());
|
||||
ds.setUrl(prop.getProperty("app.url"));
|
||||
ds.setUsername(prop.getProperty("app.username"));
|
||||
ds.setPassword(prop.getProperty("app.password"));
|
||||
ds.setMaxTotal(-1);
|
||||
ds.setMinIdle(5);
|
||||
ds.setMaxIdle(-1);
|
||||
ds.setMaxOpenPreparedStatements(100);
|
||||
System.out.println("called BasicDataSource ");
|
||||
} catch (SQLException ex) {
|
||||
Logger.getLogger(DBCPDataSource.class.getName()).log(Level.SEVERE, null, ex);
|
||||
}
|
||||
}
|
||||
|
||||
public static Connection getConnection() throws SQLException {
|
||||
return ds.getConnection();
|
||||
}
|
||||
|
||||
private DBCPDataSource() {
|
||||
}
|
||||
}
|
660
src/main/java/DataLayer/DataMapper.java
Normal file
660
src/main/java/DataLayer/DataMapper.java
Normal file
@ -0,0 +1,660 @@
|
||||
/*
|
||||
* To change this license header, choose License Headers in Project Properties.
|
||||
* To change this template file, choose Tools | Templates
|
||||
* and open the template in the editor.
|
||||
*/
|
||||
package DataLayer;
|
||||
|
||||
import org.jetbrains.annotations.NotNull;
|
||||
import org.json.simple.JSONObject;
|
||||
|
||||
import java.io.FileInputStream;
|
||||
import java.io.FileNotFoundException;
|
||||
import java.io.IOException;
|
||||
import java.sql.*;
|
||||
import java.util.*;
|
||||
import java.util.logging.Level;
|
||||
import java.util.logging.Logger;
|
||||
|
||||
/**
|
||||
* @author install1
|
||||
*/
|
||||
public class DataMapper {
|
||||
|
||||
public static ArrayList<String> getAllStrings() {
|
||||
Connection l_cCon = null;
|
||||
PreparedStatement l_pStatement = null;
|
||||
ResultSet l_rsSearch = null;
|
||||
ArrayList<String> arrayListStr = new ArrayList();
|
||||
Properties prop = new Properties();
|
||||
String fileName = "app.config";
|
||||
try (FileInputStream fis = new FileInputStream(fileName)) {
|
||||
prop.load(fis);
|
||||
} catch (FileNotFoundException ex) {
|
||||
} catch (IOException ex) {
|
||||
}
|
||||
try {
|
||||
l_cCon = DBCPDataSource.getConnection();
|
||||
//not better, just forces the responds to variate a lot more
|
||||
String l_sSQL = "SELECT * FROM `Sentences` where last_used < CURRENT_DATE() - INTERVAL "
|
||||
+ String.valueOf(prop.getProperty("app.interval_days"))
|
||||
+ " DAY order by LENGTH(Strings) desc";
|
||||
//System.out.println("l_sSQL: " + l_sSQL);
|
||||
l_pStatement = l_cCon.prepareStatement(l_sSQL);
|
||||
l_rsSearch = l_pStatement.executeQuery();
|
||||
while (l_rsSearch.next()) {
|
||||
arrayListStr.add(l_rsSearch.getString(1));
|
||||
}
|
||||
} catch (SQLException throwables) {
|
||||
throwables.printStackTrace();
|
||||
} finally {
|
||||
CloseConnections(l_pStatement, l_rsSearch, l_cCon);
|
||||
}
|
||||
return arrayListStr;
|
||||
}
|
||||
|
||||
public static void InsertMYSQLStrings(String str) {
|
||||
Connection l_cCon = null;
|
||||
PreparedStatement l_pStatement = null;
|
||||
ResultSet l_rsSearch = null;
|
||||
Properties prop = new Properties();
|
||||
String fileName = "app.config";
|
||||
try (FileInputStream fis = new FileInputStream(fileName)) {
|
||||
prop.load(fis);
|
||||
} catch (FileNotFoundException ex) {
|
||||
} catch (IOException ex) {
|
||||
}
|
||||
String l_sSQL = "INSERT IGNORE `Sentences` (`Strings`, `last_used`) VALUES (?, DATE(NOW()) - interval " +
|
||||
String.valueOf(prop.getProperty("app.interval_days_minus")) + " DAY)";
|
||||
try {
|
||||
l_cCon = DBCPDataSource.getConnection();
|
||||
l_pStatement = l_cCon.prepareStatement(l_sSQL);
|
||||
l_pStatement.setString(1, str);
|
||||
|
||||
l_pStatement.execute();
|
||||
} catch (SQLException throwables) {
|
||||
throwables.printStackTrace();
|
||||
} finally {
|
||||
CloseConnections(l_pStatement, l_rsSearch, l_cCon);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
public static void CloseConnections(PreparedStatement ps, ResultSet rs, Connection con) {
|
||||
if (rs != null) {
|
||||
try {
|
||||
rs.close();
|
||||
} catch (SQLException ex) {
|
||||
Logger.getLogger(DataMapper.class.getName()).log(Level.SEVERE, null, ex);
|
||||
}
|
||||
}
|
||||
if (ps != null) {
|
||||
try {
|
||||
ps.close();
|
||||
} catch (SQLException ex) {
|
||||
Logger.getLogger(DataMapper.class.getName()).log(Level.SEVERE, null, ex);
|
||||
}
|
||||
}
|
||||
if (con != null) {
|
||||
try {
|
||||
con.close();
|
||||
} catch (SQLException ex) {
|
||||
Logger.getLogger(DataMapper.class.getName()).log(Level.SEVERE, null, ex);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public static void checkStringsToDelete() {
|
||||
Properties prop = new Properties();
|
||||
String fileName = "app.config";
|
||||
try (FileInputStream fis = new FileInputStream(fileName)) {
|
||||
prop.load(fis);
|
||||
} catch (FileNotFoundException ex) {
|
||||
} catch (IOException ex) {
|
||||
}
|
||||
PreparedStatement l_pStatement = null;
|
||||
ResultSet l_rsSearch = null;
|
||||
String CountSQL = "select count(*) from Sentences";
|
||||
String l_sSQL = "delete from Sentences order by last_used asc limit 5;";
|
||||
try (Connection l_cCon = DBCPDataSource.getConnection()) {
|
||||
l_pStatement = l_cCon.prepareStatement(CountSQL);
|
||||
ResultSet resultSet = l_pStatement.executeQuery();
|
||||
if (resultSet.next()) {
|
||||
int count = resultSet.getInt(1);
|
||||
if (count > Integer.valueOf(prop.getProperty("app.string_count"))) {
|
||||
//System.out.println("cleaning strings: " + l_sSQL);
|
||||
l_pStatement = l_cCon.prepareStatement(l_sSQL);
|
||||
l_pStatement.executeUpdate();
|
||||
}
|
||||
}
|
||||
} catch (SQLException throwables) {
|
||||
throwables.printStackTrace();
|
||||
} finally {
|
||||
CloseConnections(l_pStatement, l_rsSearch, null);
|
||||
}
|
||||
}
|
||||
|
||||
public static void updateLastUsed(@NotNull ArrayList<String> mysqlUpdateLastUsed) {
|
||||
Connection l_cCon = null;
|
||||
PreparedStatement l_pStatement = null;
|
||||
ResultSet l_rsSearch = null;
|
||||
String l_sSQL = "update Sentences Set last_used = now() where Strings = (?)";
|
||||
try {
|
||||
l_cCon = DBCPDataSource.getConnection();
|
||||
l_pStatement = l_cCon.prepareStatement(l_sSQL);
|
||||
for (String str1 : mysqlUpdateLastUsed) {
|
||||
l_pStatement.setString(1, str1);
|
||||
l_pStatement.execute();
|
||||
}
|
||||
} catch (SQLException throwables) {
|
||||
throwables.printStackTrace();
|
||||
} finally {
|
||||
CloseConnections(l_pStatement, l_rsSearch, l_cCon);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
//from here it's testing stuff
|
||||
|
||||
public static void WriteDataToSpecificFunction(Set<JSONObject> dataSet, String testCaseFunction) {
|
||||
Connection l_cCon = null;
|
||||
PreparedStatement l_pStatement = null;
|
||||
ResultSet l_rsSearch = null;
|
||||
|
||||
try {
|
||||
//generating the columns to be inserted
|
||||
StringBuilder l_sSQL_columns = new StringBuilder("insert into `" + testCaseFunction + "` (");
|
||||
for (JSONObject data : dataSet) {
|
||||
for (Object key : data.keySet()) {
|
||||
//only taking parameters that match the current test type.
|
||||
if (!key.toString().startsWith(testCaseFunction + "Double_param") &&
|
||||
!key.toString().startsWith(testCaseFunction + "_param")) continue;
|
||||
l_sSQL_columns.append(key.toString());
|
||||
l_sSQL_columns.append(",");
|
||||
}
|
||||
break;
|
||||
}
|
||||
l_sSQL_columns.setLength(l_sSQL_columns.length() - 1);
|
||||
l_sSQL_columns.append(", java_pid) VALUES ");
|
||||
|
||||
|
||||
l_cCon = DBCPDataSource.getConnection();
|
||||
int indexCounter = 0;
|
||||
StringBuilder l_sSQL = new StringBuilder(" ");
|
||||
//adding the actual value pairs that have to be inserted.
|
||||
for (JSONObject data : dataSet) {
|
||||
if (indexCounter % 250 == 0 && indexCounter > 0) {
|
||||
System.out.println("insert Traversed " + indexCounter + "/" + dataSet.size());
|
||||
l_sSQL.setLength(l_sSQL.length() - 1); //removing last comma
|
||||
l_pStatement = l_cCon.prepareStatement(l_sSQL_columns.toString() + l_sSQL.toString());
|
||||
l_pStatement.execute();
|
||||
l_sSQL = new StringBuilder(" ");
|
||||
}
|
||||
indexCounter++;
|
||||
l_sSQL.append(" (");
|
||||
|
||||
for (Object key : data.keySet()) {
|
||||
//only taking parameters that match the current test type.
|
||||
if (!key.toString().startsWith(testCaseFunction + "Double_param") &&
|
||||
!key.toString().startsWith(testCaseFunction + "_param")) continue;
|
||||
if (key.toString().startsWith("simpleRNNMaxtrixVectorsDouble") || key.toString().startsWith("simpleRNNMatrixCalculationsDouble")
|
||||
|| key.toString().startsWith("typeDependenciesGrammaticalRelationDouble") || key.toString().startsWith("iterateTreesDouble")) {
|
||||
Double doubleValue1 = (Double) data.get(key);
|
||||
l_sSQL.append(doubleValue1);
|
||||
} else {
|
||||
//System.out.println("key: " + key + " val: " + data.get(key));
|
||||
int intValue1 = (int) data.get(key);
|
||||
l_sSQL.append(intValue1);
|
||||
}
|
||||
l_sSQL.append(",");
|
||||
}
|
||||
l_sSQL.append(ProcessHandle.current().pid());
|
||||
l_sSQL.append("),");
|
||||
}
|
||||
l_sSQL.setLength(l_sSQL.length() - 1); //removing last comma
|
||||
l_pStatement = l_cCon.prepareStatement(l_sSQL_columns.toString() + l_sSQL.toString());
|
||||
l_pStatement.execute();
|
||||
System.out.println("finished last insert traversed");
|
||||
} catch (SQLException throwables) {
|
||||
throwables.printStackTrace();
|
||||
} finally {
|
||||
CloseConnections(l_pStatement, l_rsSearch, l_cCon);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
||||
public static List<testClasses> GetAllTestsCases() {
|
||||
Connection l_cCon = null;
|
||||
PreparedStatement l_pStatement = null;
|
||||
ResultSet l_rsSearch = null;
|
||||
List<testClasses> testClassesList = new ArrayList();
|
||||
try {
|
||||
l_cCon = DBCPDataSource.getConnection();
|
||||
String l_sSQL = "select * from `sentence_testing` order by rowid asc";
|
||||
l_pStatement = l_cCon.prepareStatement(l_sSQL);
|
||||
l_rsSearch = l_pStatement.executeQuery();
|
||||
while (l_rsSearch.next()) {
|
||||
testClasses testClass = new testClasses(l_rsSearch.getString(1), l_rsSearch.getString(2), l_rsSearch.getInt(3), l_rsSearch.getString(4),
|
||||
l_rsSearch.getBoolean(5));
|
||||
testClassesList.add(testClass);
|
||||
}
|
||||
} catch (SQLException throwables) {
|
||||
throwables.printStackTrace();
|
||||
} finally {
|
||||
CloseConnections(l_pStatement, l_rsSearch, l_cCon);
|
||||
}
|
||||
return testClassesList;
|
||||
}
|
||||
|
||||
public static void MarkSuccessfullFunctionData(String testCaseFunction, Integer rowid) {
|
||||
Connection l_cCon = null;
|
||||
PreparedStatement l_pStatement = null;
|
||||
ResultSet l_rsSearch = null;
|
||||
try {
|
||||
l_cCon = DBCPDataSource.getConnection();
|
||||
StringBuilder l_sSQL = new StringBuilder("update `" + testCaseFunction + "` set passed_all_test_cases = true where rowid = " + rowid);
|
||||
l_pStatement = l_cCon.prepareStatement(l_sSQL.toString());
|
||||
l_pStatement.executeUpdate();
|
||||
} catch (SQLException throwables) {
|
||||
throwables.printStackTrace();
|
||||
} finally {
|
||||
CloseConnections(l_pStatement, l_rsSearch, l_cCon);
|
||||
}
|
||||
}
|
||||
|
||||
public static Set<JSONObject> SelectRandomDataNotProcessed(String testCaseFunction) {
|
||||
Set<JSONObject> randomDataSet = new HashSet<>();
|
||||
Connection l_cCon = null;
|
||||
PreparedStatement l_pStatement = null;
|
||||
ResultSet l_rsSearch = null;
|
||||
try {
|
||||
l_cCon = DBCPDataSource.getConnection();
|
||||
StringBuilder l_sSQL = new StringBuilder("select * from `" + testCaseFunction + "` where "
|
||||
+ " java_pid = " + ProcessHandle.current().pid());
|
||||
l_pStatement = l_cCon.prepareStatement(l_sSQL.toString());
|
||||
l_rsSearch = l_pStatement.executeQuery();
|
||||
ResultSetMetaData metaData = l_rsSearch.getMetaData();
|
||||
while (l_rsSearch.next()) {
|
||||
JSONObject object = new JSONObject();
|
||||
for (int i = 1; i <= metaData.getColumnCount(); i++) //these indexes start at 1 instead of 0
|
||||
{
|
||||
if (metaData.getColumnName(i).equals("created_on")) break;
|
||||
int columnType = metaData.getColumnType(i);
|
||||
Object Value = null;
|
||||
if (Types.INTEGER == columnType) {
|
||||
Value = l_rsSearch.getInt(i);
|
||||
} else if (Types.DECIMAL == columnType) {
|
||||
Value = l_rsSearch.getDouble(i);
|
||||
} else continue;
|
||||
object.put(metaData.getColumnName(i), Value);
|
||||
}
|
||||
randomDataSet.add(object);
|
||||
}
|
||||
} catch (SQLException throwables) {
|
||||
throwables.printStackTrace();
|
||||
} finally {
|
||||
CloseConnections(l_pStatement, l_rsSearch, l_cCon);
|
||||
}
|
||||
return randomDataSet;
|
||||
}
|
||||
|
||||
public static List<testClasses> GetFunctionTestCases() {
|
||||
Connection l_cCon = null;
|
||||
PreparedStatement l_pStatement = null;
|
||||
ResultSet l_rsSearch = null;
|
||||
List<testClasses> testClassesList = new ArrayList();
|
||||
try {
|
||||
l_cCon = DBCPDataSource.getConnection();
|
||||
String l_sSQL = "select * from `sentence_testing_function` order by rowid asc";
|
||||
l_pStatement = l_cCon.prepareStatement(l_sSQL);
|
||||
l_rsSearch = l_pStatement.executeQuery();
|
||||
while (l_rsSearch.next()) {
|
||||
testClasses testClass = new testClasses(l_rsSearch.getString(1), l_rsSearch.getString(2), l_rsSearch.getInt(3), l_rsSearch.getString(4),
|
||||
false);
|
||||
testClassesList.add(testClass);
|
||||
}
|
||||
} catch (SQLException throwables) {
|
||||
throwables.printStackTrace();
|
||||
} finally {
|
||||
CloseConnections(l_pStatement, l_rsSearch, l_cCon);
|
||||
}
|
||||
return testClassesList;
|
||||
}
|
||||
|
||||
public static void UpdateProcessed(String testCaseFunction, Set<JSONObject> randomDataSet, int max_index_counter_tests_passed, Double bestScore,
|
||||
String comperator_for_score_for_failing_testcase) {
|
||||
Connection l_cCon = null;
|
||||
PreparedStatement l_pStatement = null;
|
||||
ResultSet l_rsSearch = null;
|
||||
List<testClasses> testClassesList = new ArrayList();
|
||||
boolean maxIndexCounterTestsPassedCount = DataMapper.getMaxIndex_counter_tests_passedCount(max_index_counter_tests_passed, testCaseFunction);
|
||||
|
||||
try {
|
||||
l_cCon = DBCPDataSource.getConnection();
|
||||
for (JSONObject dataRandom : randomDataSet) {
|
||||
Double failed_score = (Double) dataRandom.get("failed_testcase_score");
|
||||
System.out.println("failed_score: " + failed_score + " max_index_counter_tests_passed: " + max_index_counter_tests_passed);
|
||||
//updating rows that reached better test
|
||||
if ((int)dataRandom.get("index_counter_tests_passed") > max_index_counter_tests_passed) {
|
||||
String l_sSQL = "update `" + testCaseFunction + "` set index_counter_tests_passed = " + dataRandom.get("index_counter_tests_passed") +
|
||||
" , failed_testcase_score = " + dataRandom.get("failed_testcase_score") +
|
||||
" , java_pid = NULL " +
|
||||
" where rowid = " + dataRandom.get("rowid");
|
||||
l_pStatement = l_cCon.prepareStatement(l_sSQL);
|
||||
l_pStatement.executeUpdate();
|
||||
}
|
||||
//add the result if its better than the last.
|
||||
else if ((int) dataRandom.get("index_counter_tests_passed") == max_index_counter_tests_passed
|
||||
&& comperator_for_score_for_failing_testcase.contains(">") ?
|
||||
failed_score > bestScore :
|
||||
failed_score < bestScore) {
|
||||
String l_sSQL = "update `" + testCaseFunction + "` set index_counter_tests_passed = " + dataRandom.get("index_counter_tests_passed") +
|
||||
" , failed_testcase_score = " + dataRandom.get("failed_testcase_score") +
|
||||
" , java_pid = NULL " +
|
||||
" where rowid = " + dataRandom.get("rowid");
|
||||
l_pStatement = l_cCon.prepareStatement(l_sSQL);
|
||||
l_pStatement.executeUpdate();
|
||||
}
|
||||
else //deleting rows that gave a worse result.
|
||||
{
|
||||
String l_sSQL = "delete from `" + testCaseFunction + "` where rowid = " + dataRandom.get("rowid");
|
||||
l_pStatement = l_cCon.prepareStatement(l_sSQL);
|
||||
l_pStatement.executeUpdate();
|
||||
}
|
||||
}
|
||||
} catch (SQLException throwables) {
|
||||
throwables.printStackTrace();
|
||||
} finally {
|
||||
CloseConnections(l_pStatement, l_rsSearch, l_cCon);
|
||||
}
|
||||
}
|
||||
|
||||
public static int get_index_counter_tests_passed(String testCaseFunction, int manualInt)
|
||||
{
|
||||
Connection l_cCon = null;
|
||||
PreparedStatement l_pStatement = null;
|
||||
ResultSet l_rsSearch = null;
|
||||
int max_index_counter_tests_passed = -1;
|
||||
try {
|
||||
l_cCon = DBCPDataSource.getConnection();
|
||||
String l_sSQL = "select COALESCE(max(index_counter_tests_passed), 0) from `" + testCaseFunction + "`";
|
||||
if (manualInt != 0)
|
||||
{
|
||||
l_sSQL += "WHERE index_counter_tests_passed < " + manualInt;
|
||||
}
|
||||
l_pStatement = l_cCon.prepareStatement(l_sSQL);
|
||||
l_rsSearch = l_pStatement.executeQuery();
|
||||
l_rsSearch.next();
|
||||
max_index_counter_tests_passed = l_rsSearch.getInt(1);
|
||||
} catch (SQLException e) {
|
||||
throw new RuntimeException(e);
|
||||
} finally {
|
||||
CloseConnections(l_pStatement, l_rsSearch, l_cCon);
|
||||
}
|
||||
return max_index_counter_tests_passed;
|
||||
}
|
||||
|
||||
public static Set<JSONObject> pickHighestProgression(String testCaseFunction) {
|
||||
Set<JSONObject> randomDataSetToReturn = new HashSet<>();
|
||||
Connection l_cCon = null;
|
||||
PreparedStatement l_pStatement = null;
|
||||
ResultSet l_rsSearch = null;
|
||||
try {
|
||||
l_cCon = DBCPDataSource.getConnection();
|
||||
String CheckIfAnyRows = "select count(*) from `" + testCaseFunction + "`"; //do we have any rows at all?
|
||||
l_pStatement = l_cCon.prepareStatement(CheckIfAnyRows);
|
||||
l_rsSearch = l_pStatement.executeQuery();
|
||||
if (l_rsSearch.next()){
|
||||
int rowcount = l_rsSearch.getInt(1);
|
||||
if (rowcount == 0) {
|
||||
return randomDataSetToReturn; // we did not generate anything yet for this category. so start with random data instead.
|
||||
}
|
||||
}
|
||||
int limit = 2;
|
||||
int index_counter_tests_passed = 0;
|
||||
int attempts = 0;
|
||||
while (randomDataSetToReturn.size() < limit && attempts < 5)
|
||||
{
|
||||
attempts++;
|
||||
//select here max(index_counter_tests_passed) then pick the comparator from ArtificialAutism.sentence_testing_function
|
||||
//to know if the score has to aim for lowest or highest on the currently failing test case.
|
||||
index_counter_tests_passed = get_index_counter_tests_passed(testCaseFunction, index_counter_tests_passed);
|
||||
String comperator_for_score_for_failing_testcase = getMaxIndexComparator(index_counter_tests_passed);
|
||||
|
||||
String l_sSQL = "select e1.* from `" + testCaseFunction + "` e1 " +
|
||||
"inner join " +
|
||||
"(select rowid " +
|
||||
"from `" + testCaseFunction + "` " +
|
||||
"where index_counter_tests_passed = " + index_counter_tests_passed +
|
||||
" and failed_testcase_score is not null " +
|
||||
" group by failed_testcase_score " +
|
||||
"order by failed_testcase_score ";
|
||||
if (comperator_for_score_for_failing_testcase.contains(">")) {
|
||||
l_sSQL += "desc";
|
||||
} else {
|
||||
l_sSQL += "asc";
|
||||
}
|
||||
|
||||
l_sSQL += " ) as e2 on e1.rowid = e2.rowid";
|
||||
|
||||
l_pStatement = l_cCon.prepareStatement(l_sSQL);
|
||||
l_rsSearch = l_pStatement.executeQuery();
|
||||
ResultSetMetaData metaData = l_rsSearch.getMetaData();
|
||||
|
||||
JSONObject previousObject = null;
|
||||
while (l_rsSearch.next()) {
|
||||
JSONObject object = new JSONObject();
|
||||
for (int i = 1; i <= metaData.getColumnCount(); i++) //these indexes start at 1 instead of 0
|
||||
{
|
||||
//if (metaData.getColumnName(i).equals("created_on")) break;
|
||||
int columnType = metaData.getColumnType(i);
|
||||
Object Value = null;
|
||||
if (Types.INTEGER == columnType) {
|
||||
Value = l_rsSearch.getInt(i);
|
||||
} else if (Types.DECIMAL == columnType || Types.DOUBLE == columnType) {
|
||||
Value = l_rsSearch.getDouble(i);
|
||||
} else continue;
|
||||
object.put(metaData.getColumnName(i), Value);
|
||||
}
|
||||
if (previousObject == null)
|
||||
{
|
||||
randomDataSetToReturn.add(object);
|
||||
previousObject = (JSONObject) object.clone();
|
||||
}
|
||||
else
|
||||
{
|
||||
int minimumDifference = 200;
|
||||
Double test = (Double) object.get("failed_testcase_score") - (Double) previousObject.get("failed_testcase_score");
|
||||
if (test > minimumDifference || test < (minimumDifference * -1)) {
|
||||
randomDataSetToReturn.add(object);
|
||||
previousObject = object;
|
||||
}
|
||||
}
|
||||
if (randomDataSetToReturn.size() >= limit)
|
||||
{
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
} catch (SQLException throwables) {
|
||||
throwables.printStackTrace();
|
||||
} finally {
|
||||
CloseConnections(l_pStatement, l_rsSearch, l_cCon);
|
||||
}
|
||||
//create here out of it only unique objects still, so not that many repeating rows.
|
||||
for (JSONObject obj : randomDataSetToReturn){
|
||||
obj.remove("failed_testcase_score");
|
||||
}
|
||||
return randomDataSetToReturn;
|
||||
}
|
||||
|
||||
public static boolean getMaxIndex_counter_tests_passedCount(int max_index_counter_tests_passed, String testCaseFunction){
|
||||
Connection l_cCon = null;
|
||||
PreparedStatement l_pStatement = null;
|
||||
ResultSet l_rsSearch = null;
|
||||
int index_count_rows_counted = -1;
|
||||
try {
|
||||
l_cCon = DBCPDataSource.getConnection();
|
||||
String l_sSQL = "select count(*) from `" + testCaseFunction + "` " +
|
||||
" where index_counter_tests_passed = " + max_index_counter_tests_passed;
|
||||
l_pStatement = l_cCon.prepareStatement(l_sSQL);
|
||||
l_rsSearch = l_pStatement.executeQuery();
|
||||
l_rsSearch.next();
|
||||
index_count_rows_counted = l_rsSearch.getInt(1);
|
||||
} catch (SQLException e) {
|
||||
throw new RuntimeException(e);
|
||||
} finally {
|
||||
CloseConnections(l_pStatement, l_rsSearch, l_cCon);
|
||||
}
|
||||
return index_count_rows_counted < 1000;
|
||||
|
||||
}
|
||||
|
||||
public static String getMaxIndexComparator(int max_index){
|
||||
Connection l_cCon = null;
|
||||
PreparedStatement l_pStatement = null;
|
||||
ResultSet l_rsSearch = null;
|
||||
String comperator_for_score_for_failing_testcase = "";
|
||||
try {
|
||||
l_cCon = DBCPDataSource.getConnection();
|
||||
String l_sSQL = "select comperator_for_score " +
|
||||
"from ArtificialAutism.sentence_testing_function stf " +
|
||||
"order by rowid asc limit 1 OFFSET " + max_index;
|
||||
l_pStatement = l_cCon.prepareStatement(l_sSQL);
|
||||
l_rsSearch = l_pStatement.executeQuery();
|
||||
l_rsSearch.next();
|
||||
comperator_for_score_for_failing_testcase = l_rsSearch.getString(1);
|
||||
} catch (SQLException e) {
|
||||
throw new RuntimeException(e);
|
||||
} finally {
|
||||
CloseConnections(l_pStatement, l_rsSearch, l_cCon);
|
||||
}
|
||||
return comperator_for_score_for_failing_testcase;
|
||||
}
|
||||
|
||||
public static HashMap<Integer, ArrayList<Integer>> get_parameter_generations(Set<JSONObject> randomDataSet, String testCaseFunction) {
|
||||
Connection l_cCon = null;
|
||||
PreparedStatement l_pStatement = null;
|
||||
ResultSet l_rsSearch = null;
|
||||
HashMap<Integer, ArrayList<Integer>> arr = new HashMap<Integer, ArrayList<Integer>>(); //key rowid, value is arraylist of modified params.
|
||||
try {
|
||||
l_cCon = DBCPDataSource.getConnection();
|
||||
for (JSONObject jobject : randomDataSet)
|
||||
{
|
||||
int rowid = (int) jobject.get("rowid");
|
||||
String l_sSQL = "select params_tried from " +
|
||||
" ArtificialAutism.parameters_generation " +
|
||||
" where rowid_function_ref = '" + rowid + "' " +
|
||||
" and function_ref = ' " + testCaseFunction + "'";
|
||||
l_pStatement = l_cCon.prepareStatement(l_sSQL);
|
||||
l_rsSearch = l_pStatement.executeQuery();
|
||||
if (l_rsSearch.next())
|
||||
{
|
||||
//found existing row, reading params_tried.
|
||||
String params_tried = l_rsSearch.getString(1);
|
||||
String[] split = params_tried.split(",");
|
||||
ArrayList<Integer> integers = new ArrayList<>();
|
||||
for (String number : split) {
|
||||
if (!number.isBlank()) {
|
||||
integers.add(Integer.parseInt(number));
|
||||
}
|
||||
}
|
||||
arr.put(rowid, integers);
|
||||
}
|
||||
else
|
||||
{
|
||||
//insert new row
|
||||
l_sSQL = "insert into ArtificialAutism.parameters_generation (rowid_function_ref, params_tried, function_ref) " +
|
||||
" values (' " + rowid + "', '', ' " + testCaseFunction + "')";
|
||||
l_pStatement = l_cCon.prepareStatement(l_sSQL);
|
||||
l_pStatement.executeUpdate();
|
||||
arr.put(rowid, new ArrayList<Integer>());
|
||||
}
|
||||
|
||||
}
|
||||
} catch (SQLException e) {
|
||||
throw new RuntimeException(e);
|
||||
} finally {
|
||||
CloseConnections(l_pStatement, l_rsSearch, l_cCon);
|
||||
}
|
||||
return arr;
|
||||
}
|
||||
|
||||
public static void deleteRow(int rowid, String testCaseFunction) {
|
||||
Connection l_cCon = null;
|
||||
PreparedStatement l_pStatement = null;
|
||||
ResultSet l_rsSearch = null;
|
||||
try {
|
||||
l_cCon = DBCPDataSource.getConnection();
|
||||
String l_sSQL = "delete from `" + testCaseFunction + "` where rowid = " + rowid;
|
||||
l_pStatement = l_cCon.prepareStatement(l_sSQL);
|
||||
l_pStatement.executeUpdate();
|
||||
} catch (SQLException e) {
|
||||
throw new RuntimeException(e);
|
||||
} finally {
|
||||
CloseConnections(l_pStatement, l_rsSearch, l_cCon);
|
||||
}
|
||||
}
|
||||
|
||||
public static Double getbestScoreFormax_index_counter_tests_passed(int maxIndexCounterTestsPassed, String testCaseFunction, String comperator_for_score_for_failing_testcase) {
|
||||
Connection l_cCon = null;
|
||||
PreparedStatement l_pStatement = null;
|
||||
ResultSet l_rsSearch = null;
|
||||
Double bestScores = 0.0;
|
||||
try {
|
||||
l_cCon = DBCPDataSource.getConnection();
|
||||
String l_sSQL = "select failed_testcase_score from `" + testCaseFunction + "` " +
|
||||
"where index_counter_tests_passed = " + maxIndexCounterTestsPassed + " " +
|
||||
"order by failed_testcase_score ";
|
||||
|
||||
if (comperator_for_score_for_failing_testcase.contains(">")) {
|
||||
l_sSQL += "desc";
|
||||
} else {
|
||||
l_sSQL += "asc";
|
||||
}
|
||||
l_sSQL += " limit 1";
|
||||
l_pStatement = l_cCon.prepareStatement(l_sSQL);
|
||||
l_rsSearch = l_pStatement.executeQuery();
|
||||
l_rsSearch.next();
|
||||
bestScores = l_rsSearch.getDouble(1);
|
||||
} catch (SQLException e) {
|
||||
throw new RuntimeException(e);
|
||||
} finally {
|
||||
CloseConnections(l_pStatement, l_rsSearch, l_cCon);
|
||||
}
|
||||
return bestScores;
|
||||
}
|
||||
|
||||
public static void UpdateModifiedKeyForRowID(Set<JSONObject> randomDataSet, String testCaseFunction) {
|
||||
Connection l_cCon = null;
|
||||
PreparedStatement l_pStatement = null;
|
||||
ResultSet l_rsSearch = null;
|
||||
|
||||
HashMap<Integer, ArrayList<Integer>> arr = get_parameter_generations(randomDataSet, testCaseFunction);
|
||||
try {
|
||||
l_cCon = DBCPDataSource.getConnection();
|
||||
for (JSONObject jobject : randomDataSet) {
|
||||
int rowid = (int) jobject.get("rowid");
|
||||
ArrayList<Integer> integers = arr.get(rowid);
|
||||
|
||||
String l_sSQL = " UPDATE ArtificialAutism.parameters_generation " +
|
||||
" set params_tried = '";
|
||||
for (int triedParamter : integers){
|
||||
l_sSQL += triedParamter + ",";
|
||||
}
|
||||
l_sSQL = l_sSQL.substring(0, l_sSQL.length() - 1); //remove trailing comma.
|
||||
|
||||
l_sSQL += "' where rowid_function_ref = " + rowid +
|
||||
" and function_ref = `" + testCaseFunction + "`";
|
||||
l_pStatement = l_cCon.prepareStatement(l_sSQL);
|
||||
System.out.println("l_sSQL: " + l_sSQL);
|
||||
l_pStatement.executeUpdate();
|
||||
}
|
||||
} catch (SQLException e) {
|
||||
throw new RuntimeException(e);
|
||||
} finally {
|
||||
CloseConnections(l_pStatement, l_rsSearch, l_cCon);
|
||||
}
|
||||
}
|
||||
}
|
31
src/main/java/DataLayer/RunnerClient.java
Normal file
31
src/main/java/DataLayer/RunnerClient.java
Normal file
@ -0,0 +1,31 @@
|
||||
package DataLayer;
|
||||
|
||||
import FunctionLayer.Datahandler;
|
||||
import edu.stanford.nlp.pipeline.StanfordCoreNLP;
|
||||
import discord4j.core.event.domain.lifecycle.ReadyEvent;
|
||||
import discord4j.core.event.domain.message.MessageCreateEvent;
|
||||
import discord4j.core.object.entity.Message;
|
||||
import net.dv8tion.jda.api.events.message.MessageReceivedEvent;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.net.*;
|
||||
|
||||
public class RunnerClient {
|
||||
|
||||
public RunnerClient(String contentF, boolean mentionedBot, String channelName, Datahandler datahandler,
|
||||
StanfordCoreNLP stanfordCoreNLP, StanfordCoreNLP stanfordCoreNLPSentiment,
|
||||
MessageReceivedEvent event, String username) {
|
||||
if (mentionedBot || channelName.contains("general-autism")) {
|
||||
String ResponseStr = datahandler.getResponseMsg(contentF, username,
|
||||
stanfordCoreNLP, stanfordCoreNLPSentiment,
|
||||
false);
|
||||
if (!ResponseStr.isEmpty()) {
|
||||
System.out.print("\nResponseStr3: " + ResponseStr + "\n");
|
||||
event.getMessage().getChannel().sendMessage(ResponseStr).queue();
|
||||
}
|
||||
} else {
|
||||
String strF = datahandler.trimString(contentF);
|
||||
datahandler.getResponseFutures(strF, stanfordCoreNLP, stanfordCoreNLPSentiment);
|
||||
}
|
||||
}
|
||||
}
|
109
src/main/java/DataLayer/ThreadClient.java
Normal file
109
src/main/java/DataLayer/ThreadClient.java
Normal file
@ -0,0 +1,109 @@
|
||||
package DataLayer;
|
||||
|
||||
import FunctionLayer.Datahandler;
|
||||
import edu.stanford.nlp.pipeline.StanfordCoreNLP;
|
||||
|
||||
import java.io.FileInputStream;
|
||||
import java.io.FileNotFoundException;
|
||||
import java.io.IOException;
|
||||
import java.io.UnsupportedEncodingException;
|
||||
import java.net.*;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Properties;
|
||||
|
||||
public class ThreadClient {
|
||||
public ThreadClient(Datahandler datahandler, StanfordCoreNLP stanfordCoreNLP, StanfordCoreNLP stanfordCoreNLPSentiment) {
|
||||
ArrayList<Integer> ports = new ArrayList<Integer>();
|
||||
ports.add(48475);
|
||||
ports.add(48476);
|
||||
ports.add(48477);
|
||||
ports.add(48478);
|
||||
|
||||
Properties prop = new Properties();
|
||||
String fileName = "app.config";
|
||||
try (FileInputStream fis = new FileInputStream(fileName)) {
|
||||
prop.load(fis);
|
||||
} catch (FileNotFoundException ex) {
|
||||
} catch (IOException ex) {
|
||||
}
|
||||
|
||||
String hostIP = prop.getProperty("app.hostip");
|
||||
String hostIP2 = prop.getProperty("app.hostip2");
|
||||
try {
|
||||
InetAddress ipAddress = InetAddress.getByName(hostIP);//used ip's
|
||||
InetAddress ipAddress2 = InetAddress.getByName(hostIP2);//used ip's
|
||||
try (DatagramSocket serverSocket = new DatagramSocket(ports.get(0))) {
|
||||
try (DatagramSocket serverSocket1 = new DatagramSocket(ports.get(1))) {
|
||||
try (DatagramSocket serverSocket2 = new DatagramSocket(ports.get(2))) {
|
||||
try (DatagramSocket serverSocket3 = new DatagramSocket(ports.get(3))) {
|
||||
while (true) {
|
||||
try {
|
||||
receiveAndSendPacket(serverSocket, ipAddress, ports.get(0), datahandler, stanfordCoreNLP, stanfordCoreNLPSentiment);
|
||||
receiveAndSendPacket(serverSocket1, ipAddress, ports.get(1), datahandler, stanfordCoreNLP, stanfordCoreNLPSentiment);
|
||||
receiveAndSendPacket(serverSocket2, ipAddress2, ports.get(2), datahandler, stanfordCoreNLP, stanfordCoreNLPSentiment);
|
||||
receiveAndSendPacket(serverSocket3, ipAddress2, ports.get(3), datahandler, stanfordCoreNLP, stanfordCoreNLPSentiment);
|
||||
} catch (IOException e) {
|
||||
throw new RuntimeException(e);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
} catch (SocketException e) {
|
||||
e.printStackTrace();
|
||||
}
|
||||
} catch (UnknownHostException e) {
|
||||
e.printStackTrace();
|
||||
}
|
||||
}
|
||||
|
||||
private static void receiveAndSendPacket(DatagramSocket serverSocket, InetAddress ipAddress, int port,
|
||||
Datahandler datahandler, StanfordCoreNLP stanfordCoreNLP, StanfordCoreNLP stanfordCoreNLPSentiment) throws
|
||||
IOException {
|
||||
byte[] receiveData = new byte[4096];
|
||||
DatagramPacket receivePacket = new DatagramPacket(receiveData, receiveData.length);
|
||||
try {
|
||||
/*
|
||||
Only one DatagramSocket can call receive at a time since its a blocking call. yet somehow
|
||||
the other DatagramSockets still get their UDP packets from receive() even if the call is made
|
||||
many minutes after the actual UDP packet was sent. Maybe Security manager context?
|
||||
*/
|
||||
serverSocket.receive(receivePacket);
|
||||
} catch (IOException e) {
|
||||
e.printStackTrace();
|
||||
}
|
||||
String sentence = new String(receivePacket.getData(), 0,
|
||||
receivePacket.getLength());
|
||||
sentence = sentence.replace("clientmessage:", "");
|
||||
String ResponseMsg = datahandler.getResponseMsg(sentence, "", stanfordCoreNLP, stanfordCoreNLPSentiment,
|
||||
true);
|
||||
System.out.println("port: " + port + ". ResponseMsg ingame: " + ResponseMsg);
|
||||
byte[] sendData = new byte[0];
|
||||
try {
|
||||
sendData = ResponseMsg.getBytes("UTF-8");
|
||||
} catch (UnsupportedEncodingException e) {
|
||||
e.printStackTrace();
|
||||
}
|
||||
int deliver_port = 0;
|
||||
switch (port) {
|
||||
case 48475:
|
||||
deliver_port = 48470;
|
||||
break;
|
||||
case 48476:
|
||||
deliver_port = 48471;
|
||||
break;
|
||||
case 48477:
|
||||
deliver_port = 48472;
|
||||
break;
|
||||
case 48478:
|
||||
deliver_port = 48473;
|
||||
break;
|
||||
}
|
||||
DatagramPacket sendPacket = new DatagramPacket(sendData, sendData.length, ipAddress, deliver_port);
|
||||
try {
|
||||
serverSocket.send(sendPacket);
|
||||
} catch (IOException e) {
|
||||
e.printStackTrace();
|
||||
}
|
||||
}
|
||||
}
|
21
src/main/java/DataLayer/settings.java
Executable file
21
src/main/java/DataLayer/settings.java
Executable file
@ -0,0 +1,21 @@
|
||||
/*
|
||||
* To change this license header, choose License Headers in Project Properties.
|
||||
* To change this template file, choose Tools | Templates
|
||||
* and open the template in the editor.
|
||||
*/
|
||||
package DataLayer;
|
||||
|
||||
/**
|
||||
* @author install1
|
||||
*/
|
||||
public class settings {
|
||||
|
||||
static String password = "";
|
||||
static String url = "";
|
||||
static String username = "";
|
||||
static String discordToken = "";
|
||||
String hostIP = "";
|
||||
String hostIP2 = "";
|
||||
int hostport = ;
|
||||
int hostport2 = ;
|
||||
}
|
51
src/main/java/DataLayer/testClasses.java
Normal file
51
src/main/java/DataLayer/testClasses.java
Normal file
@ -0,0 +1,51 @@
|
||||
package DataLayer;
|
||||
|
||||
public class testClasses {
|
||||
private String Sentence1;
|
||||
private String Sentence2;
|
||||
|
||||
private Double resultScore;
|
||||
|
||||
public Double getResultScore() {
|
||||
return resultScore;
|
||||
}
|
||||
|
||||
public void setResultScore(Double resultScore) {
|
||||
this.resultScore = resultScore;
|
||||
}
|
||||
|
||||
public boolean isPerformTestingFittingLess() {
|
||||
return PerformTestingFittingLess;
|
||||
}
|
||||
|
||||
private boolean PerformTestingFittingLess;
|
||||
|
||||
public testClasses(String sentence1, String sentence2, int score, String comparator, boolean PerformTestingFittingLess) {
|
||||
this.Sentence1 = sentence1;
|
||||
this.Sentence2 = sentence2;
|
||||
this.Score = score;
|
||||
this.comparator = comparator;
|
||||
this.PerformTestingFittingLess = PerformTestingFittingLess;
|
||||
}
|
||||
|
||||
public String getSentence1() {
|
||||
return Sentence1;
|
||||
}
|
||||
|
||||
|
||||
public String getSentence2() {
|
||||
return Sentence2;
|
||||
}
|
||||
|
||||
|
||||
public int getScore() {
|
||||
return Score;
|
||||
}
|
||||
|
||||
public String getComparator() {
|
||||
return comparator;
|
||||
}
|
||||
|
||||
private int Score;
|
||||
private String comparator;
|
||||
}
|
926
src/main/java/FunctionLayer/Datahandler.java
Normal file
926
src/main/java/FunctionLayer/Datahandler.java
Normal file
@ -0,0 +1,926 @@
|
||||
package FunctionLayer;
|
||||
|
||||
import DataLayer.DataMapper;
|
||||
import FunctionLayer.StanfordParser.SentimentAnalyzerTestDynamicTesting;
|
||||
import edu.mit.jmwe.data.IMWE;
|
||||
import edu.mit.jmwe.data.IToken;
|
||||
import edu.stanford.nlp.ie.AbstractSequenceClassifier;
|
||||
import edu.stanford.nlp.ie.crf.CRFClassifier;
|
||||
import edu.stanford.nlp.ling.CoreAnnotations;
|
||||
import edu.stanford.nlp.ling.CoreLabel;
|
||||
import edu.stanford.nlp.ling.TaggedWord;
|
||||
import edu.stanford.nlp.parser.lexparser.LexicalizedParser;
|
||||
import edu.stanford.nlp.pipeline.Annotation;
|
||||
import edu.stanford.nlp.pipeline.CoreDocument;
|
||||
import edu.stanford.nlp.pipeline.CoreEntityMention;
|
||||
import edu.stanford.nlp.pipeline.StanfordCoreNLP;
|
||||
import edu.stanford.nlp.tagger.maxent.MaxentTagger;
|
||||
import edu.stanford.nlp.trees.*;
|
||||
import edu.stanford.nlp.util.CoreMap;
|
||||
import org.ejml.simple.SimpleMatrix;
|
||||
import org.json.simple.JSONObject;
|
||||
import org.json.simple.parser.JSONParser;
|
||||
import org.json.simple.parser.ParseException;
|
||||
|
||||
import java.io.FileInputStream;
|
||||
import java.io.FileNotFoundException;
|
||||
import java.io.FileReader;
|
||||
import java.io.IOException;
|
||||
import java.util.*;
|
||||
import java.util.concurrent.*;
|
||||
import java.util.regex.Matcher;
|
||||
import java.util.regex.Pattern;
|
||||
|
||||
|
||||
public class Datahandler {
|
||||
|
||||
private JSONObject data;
|
||||
|
||||
private JSONParser parser = new JSONParser();
|
||||
|
||||
//wanted to put this in config too but welp cant be arsed to set this up differently.
|
||||
//4 threads for the rest of eternity it is.
|
||||
private ExecutorService pool = Executors.newFixedThreadPool(4);
|
||||
private CompletionService completionService = new ExecutorCompletionService(pool);
|
||||
private HashMap<String, Annotation> pipelineAnnotationCache;
|
||||
private HashMap<String, Annotation> pipelineSentimentAnnotationCache;
|
||||
private HashMap<String, CoreDocument> coreDocumentAnnotationCache;
|
||||
private HashMap<String, Annotation> jmweAnnotationCache;
|
||||
|
||||
private MaxentTagger tagger = new MaxentTagger();
|
||||
|
||||
private GrammaticalStructureFactory gsf;
|
||||
private AbstractSequenceClassifier<CoreLabel> classifier;
|
||||
|
||||
//SentimentAnalyzer Hashmaps
|
||||
private HashMap<String, Integer> tokenizeCountingHashMap = new HashMap();
|
||||
|
||||
private HashMap<String, List<List<TaggedWord>>> taggedWordListHashMap = new HashMap();
|
||||
|
||||
private HashMap<String, java.util.ArrayList<String>> retrieveTGWListHashMap = new HashMap();
|
||||
|
||||
private HashMap<String, List<CoreMap>> sentences1HashMap = new HashMap();
|
||||
|
||||
private HashMap<String, List<CoreMap>> sentencesSentimentHashMap = new HashMap();
|
||||
|
||||
private HashMap<String, java.util.ArrayList<Tree>> trees1HashMap = new HashMap();
|
||||
|
||||
private HashMap<String, java.util.ArrayList<GrammaticalStructure>> grammaticalStructureHashMap = new HashMap();
|
||||
|
||||
private HashMap<String, java.util.ArrayList<TypedDependency>> typedDependenciesHashMap = new HashMap();
|
||||
|
||||
private HashMap<String, java.util.ArrayList<Integer>> rnnCoreAnnotationsPredictedHashMap = new HashMap();
|
||||
|
||||
private HashMap<String, java.util.ArrayList<SimpleMatrix>> simpleMatricesHashMap = new HashMap();
|
||||
|
||||
private HashMap<String, java.util.ArrayList<SimpleMatrix>> simpleMatricesNodevectorsHashMap = new HashMap();
|
||||
|
||||
private HashMap<String, List> listHashMap = new HashMap();
|
||||
|
||||
private HashMap<String, Integer> longestHashMap = new HashMap();
|
||||
|
||||
private HashMap<String, Integer> sentimentHashMap = new HashMap();
|
||||
|
||||
private HashMap<String, List<IMWE<IToken>>> imwesHashMap = new HashMap();
|
||||
|
||||
private HashMap<String, Integer> InflectedCounterNegativeHashMap = new HashMap();
|
||||
|
||||
private HashMap<String, Integer> InflectedCounterPositiveHashMap = new HashMap();
|
||||
|
||||
private HashMap<String, ArrayList<String>> tokenEntryHashMap = new HashMap();
|
||||
|
||||
private HashMap<String, Integer> MarkedContinuousCounterHashMap = new HashMap();
|
||||
|
||||
private HashMap<String, Integer> UnmarkedPatternCounterHashMap = new HashMap();
|
||||
|
||||
private HashMap<String, java.util.ArrayList<String>> strTokensIpartFormHashMap = new HashMap();
|
||||
|
||||
private HashMap<String, java.util.ArrayList<String>> tokenFormsHashMap = new HashMap();
|
||||
|
||||
private HashMap<String, java.util.ArrayList<String>> strTokenEntryGetPOSHashMap = new HashMap();
|
||||
|
||||
private HashMap<String, java.util.ArrayList<Integer>> intTokenEntyCountsHashMap = new HashMap();
|
||||
|
||||
private HashMap<String, java.util.ArrayList<String>> ITokenTagsHashMap = new HashMap();
|
||||
|
||||
private HashMap<String, java.util.ArrayList<String>> strTokenStemsHashMap = new HashMap();
|
||||
|
||||
private HashMap<String, Integer> AnotatorcounterHashMap = new HashMap();
|
||||
|
||||
private HashMap<String, Integer> TokensCounterHashMap = new HashMap();
|
||||
|
||||
private HashMap<String, java.util.ArrayList<String>> entityTokenTagsHashMap = new HashMap();
|
||||
|
||||
private HashMap<String, java.util.ArrayList<String>> nerEntitiesHashMap = new HashMap();
|
||||
|
||||
private HashMap<String, java.util.ArrayList<String>> nerEntitiesTypeHashMap = new HashMap();
|
||||
|
||||
private HashMap<String, java.util.ArrayList<String>> stopWordTokenHashMap = new HashMap();
|
||||
|
||||
private HashMap<String, java.util.ArrayList<String>> stopWordLemmaHashMap = new HashMap();
|
||||
|
||||
private HashMap<String, Integer> PairCounterHashMap = new HashMap();
|
||||
|
||||
private HashMap<String, ArrayList<String>> strResponses = new HashMap<>();
|
||||
private void LoadDataFromJson() {
|
||||
try {
|
||||
data = (JSONObject) parser.parse(
|
||||
new FileReader("/mnt/hdd/home/christian/content/sourcemod_plugins_and_extensions/addons/" +
|
||||
"sourcemod/scripting/gogs/ArtificialAutism/dynamicScore.json"));//path to the JSON file.
|
||||
} catch (IOException e) {
|
||||
try {
|
||||
String whoami = System.getProperty("user.name");
|
||||
data = (JSONObject) parser.parse(
|
||||
new FileReader("/home/" + whoami + "/autism_bot_number_crunching/dynamicScore.json"));//path to the JSON file.
|
||||
} catch (IOException ex) {
|
||||
throw new RuntimeException(ex);
|
||||
} catch (ParseException ex) {
|
||||
throw new RuntimeException(ex);
|
||||
}
|
||||
} catch (ParseException e) {
|
||||
throw new RuntimeException(e);
|
||||
}
|
||||
}
|
||||
|
||||
public Datahandler() {
|
||||
LoadDataFromJson();
|
||||
jmweAnnotationCache = new HashMap<String, Annotation>();
|
||||
pipelineAnnotationCache = new HashMap<String, Annotation>();
|
||||
pipelineSentimentAnnotationCache = new HashMap<String, Annotation>();
|
||||
coreDocumentAnnotationCache = new HashMap<String, CoreDocument>();
|
||||
gsf = initiateGrammaticalStructureFactory();
|
||||
String nerModel = "edu/stanford/nlp/models/ner/english.all.3class.distsim.crf.ser.gz";
|
||||
classifier = CRFClassifier.getClassifierNoExceptions(nerModel);
|
||||
}
|
||||
|
||||
private GrammaticalStructureFactory initiateGrammaticalStructureFactory() {
|
||||
// lexParserEnglishRNN = "edu/stanford/nlp/models/lexparser/englishRNN.ser.gz"
|
||||
String lexParserEnglishPCFG = "edu/stanford/nlp/models/lexparser/englishPCFG.ser.gz";
|
||||
LexicalizedParser lp = LexicalizedParser.loadModel(lexParserEnglishPCFG, "-maxLength", "100");
|
||||
TreebankLanguagePack tlp = lp.getOp().langpack();
|
||||
return tlp.grammaticalStructureFactory();
|
||||
}
|
||||
|
||||
public StanfordCoreNLP pipeLineSetUp() {
|
||||
Properties props = new Properties();
|
||||
String shiftReduceParserPath = "edu/stanford/nlp/models/srparser/englishSR.ser.gz";
|
||||
// nerModel2 = "edu/stanford/nlp/models/ner/english.conll.4class.caseless.distsim.crf.ser.gz"
|
||||
//String nerModel2 = "edu/stanford/nlp/models/ner/english.conll.4class.distsim.crf.ser.gz";
|
||||
// nerModel3 = "edu/stanford/nlp/models/ner/english.muc.7class.caseless.distsim.crf.ser.gz"
|
||||
//String nerModel3 = "edu/stanford/nlp/models/ner/english.muc.7class.distsim.crf.ser.gz";
|
||||
props.setProperty("annotators", "tokenize,ssplit,pos,lemma,ner,parse");
|
||||
props.setProperty("parse.model", shiftReduceParserPath);
|
||||
props.setProperty("parse.maxlen", "90");
|
||||
props.setProperty("parse.binaryTrees", "true");
|
||||
props.setProperty("threads", "1");
|
||||
props.setProperty("pos.maxlen", "90");
|
||||
props.setProperty("tokenize.maxlen", "90");
|
||||
props.setProperty("ssplit.maxlen", "90");
|
||||
props.setProperty("lemma.maxlen", "90");
|
||||
props.setProperty("ner.model", "edu/stanford/nlp/models/ner/english.all.3class.distsim.crf.ser.gz" +
|
||||
",edu/stanford/nlp/models/ner/english.conll.4class.distsim.crf.ser.gz" +
|
||||
",edu/stanford/nlp/models/ner/english.muc.7class.distsim.crf.ser.gz");
|
||||
props.setProperty("ner.combinationMode", "HIGH_RECALL");
|
||||
props.setProperty("regexner.ignorecase", "true");
|
||||
props.setProperty("ner.fine.regexner.ignorecase", "true");
|
||||
props.setProperty("tokenize.options", "untokenizable=firstKeep");
|
||||
return new StanfordCoreNLP(props);
|
||||
}
|
||||
|
||||
public StanfordCoreNLP shiftReduceParserInitiate() {
|
||||
Properties propsSentiment = new Properties();
|
||||
// lexParserEnglishRNN = "edu/stanford/nlp/models/lexparser/englishRNN.ser.gz"
|
||||
String lexParserEnglishPCFG = "edu/stanford/nlp/models/lexparser/englishPCFG.ser.gz";
|
||||
String sentimentModel = "edu/stanford/nlp/models/sentiment/sentiment.ser.gz";
|
||||
// taggerPath = "edu/stanford/nlp/models/pos-tagger/english-left3words/english-left3words-distsim.tagger"
|
||||
String taggerPath = "edu/stanford/nlp/models/pos-tagger/english-left3words-distsim.tagger";
|
||||
String customStopWordList = "start,starts,period,periods,a,an,and,are,as,at,be,but,by,for,if,in,into,is,it,no,not,of," +
|
||||
"on,or,such,that,the,their,then,there,these,they,this,to,was,will,with";
|
||||
propsSentiment.setProperty("parse.model", lexParserEnglishPCFG);
|
||||
propsSentiment.setProperty("sentiment.model", sentimentModel);
|
||||
propsSentiment.setProperty("parse.maxlen", "90");
|
||||
propsSentiment.setProperty("threads", "1");
|
||||
propsSentiment.setProperty("pos.maxlen", "90");
|
||||
propsSentiment.setProperty("tokenize.maxlen", "90");
|
||||
propsSentiment.setProperty("ssplit.maxlen", "90");
|
||||
propsSentiment.setProperty("annotators", "tokenize,ssplit,pos,parse,sentiment,lemma,stopword"); //coref too expensive memorywise
|
||||
propsSentiment.setProperty("customAnnotatorClass.stopword", "FunctionLayer.StopwordAnnotator");
|
||||
propsSentiment.setProperty(StopwordAnnotator.STOPWORDS_LIST, customStopWordList);
|
||||
propsSentiment.setProperty("tokenize.options", "untokenizable=firstKeep");
|
||||
tagger = new MaxentTagger(taggerPath);
|
||||
return new StanfordCoreNLP(propsSentiment);
|
||||
}
|
||||
|
||||
public String trimString(String str) {
|
||||
String message = str.trim();
|
||||
if (message.startsWith("<@")) {
|
||||
message = message.substring(message.indexOf("> ") + 2);
|
||||
}
|
||||
if (!message.isEmpty()) {
|
||||
message = message.replace("@", "");
|
||||
if (message.contains("<>")) {
|
||||
message = message.substring(message.indexOf(">"));
|
||||
}
|
||||
if (message.startsWith("[ *")) {
|
||||
message = message.substring(message.indexOf("]"));
|
||||
}
|
||||
}
|
||||
return message;
|
||||
}
|
||||
|
||||
private void createStrAnnotation(String str, StanfordCoreNLP stanfordCoreNLP, Boolean sentimentBool) {
|
||||
Annotation strAnno2 = new Annotation(str);
|
||||
strAnno2.compact();
|
||||
try {
|
||||
stanfordCoreNLP.annotate(strAnno2);
|
||||
if (sentimentBool) {
|
||||
pipelineSentimentAnnotationCache.put(str, strAnno2);
|
||||
} else {
|
||||
pipelineAnnotationCache.put(str, strAnno2);
|
||||
}
|
||||
} catch (Exception e) {
|
||||
System.out.println("stanfordcorenlp annotate failed" + e.getMessage());
|
||||
}
|
||||
}
|
||||
|
||||
private SentimentAnalyzerTestDynamicTesting getReponseFuturesHelper(String strF, String str1, StanfordCoreNLP stanfordCoreNLP,
|
||||
StanfordCoreNLP stanfordCoreNLPSentiment,
|
||||
List<CoreMap> coreMaps1, Annotation strAnno,
|
||||
Annotation strAnnoSentiment, CoreDocument coreDocument
|
||||
, Integer tokenizeCountingF, List<List<TaggedWord>> taggedWordListF, ArrayList<TypedDependency> typedDependenciesF
|
||||
, ArrayList<Integer> rnnCoreAnnotationsPredictedF, ArrayList<SimpleMatrix> simpleMatricesF
|
||||
, ArrayList<SimpleMatrix> simpleMatricesNodevectorsF, List<String> listF, Integer longestF, List<CoreMap> sentencesF
|
||||
, List<CoreMap> sentencesSentimentF, ArrayList<Tree> treesF, ArrayList<GrammaticalStructure> grammaticalStructuresF
|
||||
, Integer sentimentLongestF, List<IMWE<IToken>> imwesF, Integer inflectedCounterNegativeF, Integer inflectedCounterPositiveF
|
||||
, ArrayList<String> tokenEntryF, Integer unmarkedPatternCounterF, ArrayList<String> strTokensIpartFormF, ArrayList<String> tokenFormsF
|
||||
, ArrayList<Integer> intTokenEntyCountsF, Integer markedContinuousCounterF, ArrayList<String> ITokenTagsF
|
||||
, ArrayList<String> strTokenEntryGetPOSF, ArrayList<String> retrieveTGWListF, Integer pairCounterF
|
||||
, Integer tokensCounterF, ArrayList<String> stopWordLemmaF, ArrayList<String> nerEntitiesF
|
||||
, ArrayList<String> stopWordTokenF, ArrayList<String> entityTokenTagsF, ArrayList<String> nerEntitiesTypeF
|
||||
, Integer anotatorcounterF, ArrayList<String> strTokenStemsF) {
|
||||
Annotation annotation2 = pipelineSentimentAnnotationCache.getOrDefault(str1, null);
|
||||
Annotation annotation4 = pipelineAnnotationCache.getOrDefault(str1, null);
|
||||
CoreDocument coreDocument1 = coreDocumentAnnotationCache.getOrDefault(str1, null);
|
||||
Annotation jmweAnnotation = jmweAnnotationCache.getOrDefault(str1, null);
|
||||
if (annotation2 == null) {
|
||||
createStrAnnotation(str1, stanfordCoreNLPSentiment, true);
|
||||
}
|
||||
if (annotation4 == null) {
|
||||
createStrAnnotation(str1, stanfordCoreNLP, false);
|
||||
}
|
||||
if (coreDocument1 == null) {
|
||||
getCoreDocumentsSuggested(stanfordCoreNLP, str1);
|
||||
}
|
||||
if (jmweAnnotation == null) {
|
||||
getJMWEAnnotation(str1);
|
||||
jmweAnnotation = jmweAnnotationCache.get(str1);
|
||||
}
|
||||
Integer tokenizeCounting = tokenizeCountingHashMap.getOrDefault(str1, null);
|
||||
|
||||
List<List<TaggedWord>> taggedWordList1 = taggedWordListHashMap.getOrDefault(str1, null);
|
||||
|
||||
java.util.ArrayList<String> retrieveTGWList1 = retrieveTGWListHashMap.getOrDefault(str1, null);
|
||||
|
||||
List<CoreMap> sentence1 = sentences1HashMap.getOrDefault(str1, null);
|
||||
|
||||
List<CoreMap> sentenceSentiment1 = sentencesSentimentHashMap.getOrDefault(str1, null);
|
||||
ArrayList<Tree> trees1 = trees1HashMap.getOrDefault(str1, null);
|
||||
List<CoreMap> coreMaps2 = new ArrayList<>();
|
||||
ArrayList<GrammaticalStructure> grammaticalStructures1 = grammaticalStructureHashMap.getOrDefault(str1, null);
|
||||
if (jmweAnnotation != null) {
|
||||
coreMaps2 = jmweAnnotation.get(CoreAnnotations.SentencesAnnotation.class);
|
||||
}
|
||||
ArrayList<TypedDependency> typedDependencies1 = typedDependenciesHashMap.getOrDefault(str1, null);
|
||||
ArrayList<Integer> rnnCoreAnnotationsPredicted1 = rnnCoreAnnotationsPredictedHashMap.getOrDefault(str1, null);
|
||||
ArrayList<SimpleMatrix> simpleMatrices1 = simpleMatricesHashMap.getOrDefault(str1, null);
|
||||
simpleMatricesHashMap.getOrDefault(str1, null);
|
||||
ArrayList<SimpleMatrix> simpleMatricesNodevectors1 = simpleMatricesNodevectorsHashMap.getOrDefault(str1, null);
|
||||
List list1 = listHashMap.getOrDefault(str1, null);
|
||||
Integer longest1 = longestHashMap.getOrDefault(str1, null);
|
||||
Integer sentimentLongest1 = sentimentHashMap.getOrDefault(str1, null);
|
||||
List<IMWE<IToken>> imwes1 = imwesHashMap.getOrDefault(str1, null);
|
||||
Integer InflectedCounterNegative1 = InflectedCounterNegativeHashMap.getOrDefault(str1, null);
|
||||
Integer InflectedCounterPositive1 = InflectedCounterPositiveHashMap.getOrDefault(str1, null);
|
||||
ArrayList<String> tokenEntry1 = tokenEntryHashMap.getOrDefault(str1, null);
|
||||
Integer MarkedContinuousCounter1 = MarkedContinuousCounterHashMap.getOrDefault(str1, null);
|
||||
Integer UnmarkedPatternCounter1 = UnmarkedPatternCounterHashMap.getOrDefault(str1, null);
|
||||
ArrayList<String> strTokensIpartForm1 = strTokensIpartFormHashMap.getOrDefault(str1, null);
|
||||
ArrayList<String> tokenForms1 = tokenFormsHashMap.getOrDefault(str1, null);
|
||||
ArrayList<String> strTokenEntryGetPOS1 = strTokenEntryGetPOSHashMap.getOrDefault(str1, null);
|
||||
ArrayList<Integer> intTokenEntyCounts1 = intTokenEntyCountsHashMap.getOrDefault(str1, null);
|
||||
ArrayList<String> ITokenTags1 = ITokenTagsHashMap.getOrDefault(str1, null);
|
||||
ArrayList<String> strTokenStems1 = strTokenStemsHashMap.getOrDefault(str1, null);
|
||||
Integer Anotatorcounter1 = AnotatorcounterHashMap.getOrDefault(str1, null);
|
||||
Integer TokensCounter1 = TokensCounterHashMap.getOrDefault(str1, null);
|
||||
ArrayList<String> entityTokenTags1 = entityTokenTagsHashMap.getOrDefault(str1, null);
|
||||
ArrayList<String> nerEntities1 = nerEntitiesHashMap.getOrDefault(str1, null);
|
||||
ArrayList<String> nerEntitiesType1 = nerEntitiesTypeHashMap.getOrDefault(str1, null);
|
||||
ArrayList<String> stopWordToken1 = stopWordTokenHashMap.getOrDefault(str1, null);
|
||||
ArrayList<String> stopWordLemma1 = stopWordLemmaHashMap.getOrDefault(str1, null);
|
||||
Integer PairCounter1 = PairCounterHashMap.getOrDefault(str1, null);
|
||||
|
||||
Annotation annotationStrPipeLine1 = pipelineAnnotationCache.get(str1);
|
||||
Annotation annotationStrPipeLineSentiment1 = pipelineSentimentAnnotationCache.get(str1);
|
||||
|
||||
SentimentAnalyzerTestDynamicTesting SMX = new SentimentAnalyzerTestDynamicTesting(strF, str1,
|
||||
coreMaps1, coreMaps2, strAnno,
|
||||
//sometimes Annotation(str) returns null so in that case better use result of sentiment
|
||||
annotationStrPipeLine1 == null ? annotationStrPipeLineSentiment1 : annotationStrPipeLine1,
|
||||
strAnnoSentiment,
|
||||
annotationStrPipeLineSentiment1, coreDocument, coreDocumentAnnotationCache.get(str1),
|
||||
tagger, gsf, classifier, tokenizeCounting, tokenizeCountingF,
|
||||
taggedWordListF, taggedWordList1, retrieveTGWListF, retrieveTGWList1,
|
||||
sentencesF, sentence1, sentencesSentimentF, sentenceSentiment1, treesF, trees1,
|
||||
grammaticalStructuresF, grammaticalStructures1, typedDependenciesF,
|
||||
typedDependencies1, rnnCoreAnnotationsPredictedF, rnnCoreAnnotationsPredicted1,
|
||||
simpleMatricesF, simpleMatrices1, simpleMatricesNodevectorsF, simpleMatricesNodevectors1,
|
||||
listF, list1, longestF, longest1, sentimentLongestF, sentimentLongest1, imwesF,
|
||||
imwes1, inflectedCounterNegativeF, InflectedCounterNegative1, inflectedCounterPositiveF,
|
||||
InflectedCounterPositive1, tokenEntryF, tokenEntry1, markedContinuousCounterF,
|
||||
MarkedContinuousCounter1, unmarkedPatternCounterF, UnmarkedPatternCounter1,
|
||||
strTokensIpartFormF, strTokensIpartForm1, tokenFormsF, tokenForms1,
|
||||
strTokenEntryGetPOSF, strTokenEntryGetPOS1, intTokenEntyCountsF,
|
||||
intTokenEntyCounts1, ITokenTagsF, ITokenTags1, strTokenStemsF, strTokenStems1,
|
||||
anotatorcounterF, Anotatorcounter1, tokensCounterF, TokensCounter1,
|
||||
entityTokenTagsF, entityTokenTags1, nerEntitiesF, nerEntities1, nerEntitiesTypeF,
|
||||
nerEntitiesType1, stopWordTokenF, stopWordToken1, stopWordLemmaF, stopWordLemma1,
|
||||
pairCounterF, PairCounter1, data, false
|
||||
);
|
||||
if (tokenizeCounting == null) {
|
||||
tokenizeCountingHashMap.put(str1, SMX.getTokenizeCounting());
|
||||
}
|
||||
if (taggedWordList1 == null) {
|
||||
taggedWordListHashMap.put(str1, SMX.getTaggedWordList1());
|
||||
}
|
||||
if (retrieveTGWList1 == null) {
|
||||
retrieveTGWListHashMap.put(str1, SMX.getRetrieveTGWList1());
|
||||
}
|
||||
if (sentence1 == null) {
|
||||
sentences1HashMap.put(str1, SMX.getSentences1());
|
||||
}
|
||||
if (sentenceSentiment1 == null) {
|
||||
sentencesSentimentHashMap.put(str1, SMX.getSentencesSentiment1());
|
||||
}
|
||||
if (trees1 == null) {
|
||||
trees1HashMap.put(str1, SMX.getTrees1());
|
||||
}
|
||||
if (grammaticalStructures1 == null) {
|
||||
grammaticalStructureHashMap.put(str1, SMX.getGrammaticalStructures1());
|
||||
}
|
||||
if (typedDependencies1 == null) {
|
||||
typedDependenciesHashMap.put(str1, SMX.getTypedDependencies1());
|
||||
}
|
||||
if (rnnCoreAnnotationsPredicted1 == null) {
|
||||
rnnCoreAnnotationsPredictedHashMap.put(str1, SMX.getRnnCoreAnnotationsPredicted1());
|
||||
}
|
||||
if (simpleMatrices1 == null) {
|
||||
simpleMatricesHashMap.put(str1, SMX.getSimpleMatrices1());
|
||||
}
|
||||
if (simpleMatricesNodevectors1 == null) {
|
||||
simpleMatricesNodevectorsHashMap.put(str1, SMX.getSimpleMatricesNodevectors1());
|
||||
}
|
||||
if (list1 == null) {
|
||||
listHashMap.put(str1, SMX.getList1());
|
||||
}
|
||||
if (longest1 == null) {
|
||||
longestHashMap.put(str1, SMX.getLongest1());
|
||||
}
|
||||
if (sentimentLongest1 == null) {
|
||||
sentimentHashMap.put(str1, SMX.getSentimentLongest1());
|
||||
}
|
||||
if (imwes1 == null) {
|
||||
imwesHashMap.put(str1, SMX.getImwes1());
|
||||
}
|
||||
if (InflectedCounterNegative1 == null) {
|
||||
InflectedCounterNegativeHashMap.put(str1, SMX.getInflectedCounterNegative1());
|
||||
}
|
||||
if (InflectedCounterPositive1 == null) {
|
||||
InflectedCounterPositiveHashMap.put(str1, SMX.getInflectedCounterPositive1());
|
||||
}
|
||||
if (tokenEntry1 == null) {
|
||||
tokenEntryHashMap.put(str1, SMX.getTokenEntry1());
|
||||
}
|
||||
if (MarkedContinuousCounter1 == null) {
|
||||
MarkedContinuousCounterHashMap.put(str1, SMX.getMarkedContinuousCounter1());
|
||||
}
|
||||
if (UnmarkedPatternCounter1 == null) {
|
||||
UnmarkedPatternCounterHashMap.put(str1, SMX.getUnmarkedPatternCounter1());
|
||||
}
|
||||
if (strTokensIpartForm1 == null) {
|
||||
strTokensIpartFormHashMap.put(str1, SMX.getStrTokensIpartForm1());
|
||||
}
|
||||
if (tokenForms1 == null) {
|
||||
tokenFormsHashMap.put(str1, SMX.getTokenForms1());
|
||||
}
|
||||
if (strTokenEntryGetPOS1 == null) {
|
||||
strTokenEntryGetPOSHashMap.put(str1, SMX.getStrTokenEntryGetPOS1());
|
||||
}
|
||||
if (intTokenEntyCounts1 == null) {
|
||||
intTokenEntyCountsHashMap.put(str1, SMX.getIntTokenEntyCounts1());
|
||||
}
|
||||
if (ITokenTags1 == null) {
|
||||
ITokenTagsHashMap.put(str1, SMX.getITokenTags1());
|
||||
}
|
||||
if (strTokenStems1 == null) {
|
||||
strTokenStemsHashMap.put(str1, SMX.getStrTokenStems1());
|
||||
}
|
||||
if (Anotatorcounter1 == null) {
|
||||
AnotatorcounterHashMap.put(str1, SMX.getAnotatorcounter1());
|
||||
}
|
||||
if (TokensCounter1 == null) {
|
||||
TokensCounterHashMap.put(str1, SMX.getTokensCounter1());
|
||||
}
|
||||
if (entityTokenTags1 == null) {
|
||||
entityTokenTagsHashMap.put(str1, SMX.getEntityTokenTags1());
|
||||
}
|
||||
if (nerEntities1 == null) {
|
||||
nerEntitiesHashMap.put(str1, SMX.getNerEntities1());
|
||||
}
|
||||
if (nerEntitiesType1 == null) {
|
||||
nerEntitiesTypeHashMap.put(str1, SMX.getNerEntitiesType1());
|
||||
}
|
||||
if (stopWordToken1 == null) {
|
||||
stopWordTokenHashMap.put(str1, SMX.getStopWordToken1());
|
||||
}
|
||||
if (stopWordLemma1 == null) {
|
||||
stopWordLemmaHashMap.put(str1, SMX.getStopWordLemma1());
|
||||
}
|
||||
if (PairCounter1 == null) {
|
||||
PairCounterHashMap.put(str1, SMX.getPairCounter1());
|
||||
}
|
||||
return SMX;
|
||||
}
|
||||
|
||||
private class get_res implements Callable<SentimentAnalyzerTestDynamicTesting> {
|
||||
private final String strF;
|
||||
private final String str1;
|
||||
private final StanfordCoreNLP stanfordCoreNLP;
|
||||
private final StanfordCoreNLP stanfordCoreNLPSentiment;
|
||||
private final List<CoreMap> coreMaps1;
|
||||
private final Annotation strAnno;
|
||||
private final Annotation strAnnoSentiment;
|
||||
private final CoreDocument coreDocument;
|
||||
private final Integer tokenizeCountingF;
|
||||
private final List<List<TaggedWord>> taggedWordListF;
|
||||
private final ArrayList<TypedDependency> typedDependenciesF;
|
||||
private final ArrayList<Integer> rnnCoreAnnotationsPredictedF;
|
||||
private final ArrayList<SimpleMatrix> simpleMatricesF;
|
||||
private final ArrayList<SimpleMatrix> simpleMatricesNodevectorsF;
|
||||
private final List<String> listF;
|
||||
private final Integer longestF;
|
||||
private final List<CoreMap> sentencesF;
|
||||
private final List<CoreMap> sentencesSentimentF;
|
||||
private final ArrayList<Tree> treesF;
|
||||
private final ArrayList<GrammaticalStructure> grammaticalStructuresF;
|
||||
private final Integer sentimentLongestF;
|
||||
private final List<IMWE<IToken>> imwesF;
|
||||
private final Integer inflectedCounterNegativeF;
|
||||
private final Integer inflectedCounterPositiveF;
|
||||
private final ArrayList<String> tokenEntryF;
|
||||
private final Integer unmarkedPatternCounterF;
|
||||
private final ArrayList<String> strTokensIpartFormF;
|
||||
private final ArrayList<String> tokenFormsF;
|
||||
private final ArrayList<Integer> intTokenEntyCountsF;
|
||||
private final Integer markedContinuousCounterF;
|
||||
private final ArrayList<String> iTokenTagsF;
|
||||
private final ArrayList<String> strTokenEntryGetPOSF;
|
||||
private final ArrayList<String> retrieveTGWListF;
|
||||
private final Integer pairCounterF;
|
||||
private final Integer tokensCounterF;
|
||||
private final ArrayList<String> stopWordLemmaF;
|
||||
private final ArrayList<String> nerEntitiesF;
|
||||
private final ArrayList<String> stopWordTokenF;
|
||||
private final ArrayList<String> entityTokenTagsF;
|
||||
private final ArrayList<String> nerEntitiesTypeF;
|
||||
private final Integer anotatorcounterF;
|
||||
private final ArrayList<String> strTokenStemsF;
|
||||
|
||||
public get_res(String strF, String str1, StanfordCoreNLP stanfordCoreNLP, StanfordCoreNLP stanfordCoreNLPSentiment, List<CoreMap> coreMaps1, Annotation strAnno, Annotation strAnnoSentiment, CoreDocument coreDocument, Integer tokenizeCountingF, List<List<TaggedWord>> taggedWordListF, ArrayList<TypedDependency> typedDependenciesF, ArrayList<Integer> rnnCoreAnnotationsPredictedF, ArrayList<SimpleMatrix> simpleMatricesF, ArrayList<SimpleMatrix> simpleMatricesNodevectorsF, List<String> listF, Integer longestF, List<CoreMap> sentencesF, List<CoreMap> sentencesSentimentF, ArrayList<Tree> treesF, ArrayList<GrammaticalStructure> grammaticalStructuresF, Integer sentimentLongestF, List<IMWE<IToken>> imwesF, Integer inflectedCounterNegativeF, Integer inflectedCounterPositiveF, ArrayList<String> tokenEntryF, Integer unmarkedPatternCounterF, ArrayList<String> strTokensIpartFormF, ArrayList<String> tokenFormsF, ArrayList<Integer> intTokenEntyCountsF, Integer markedContinuousCounterF, ArrayList<String> iTokenTagsF, ArrayList<String> strTokenEntryGetPOSF, ArrayList<String> retrieveTGWListF, Integer pairCounterF, Integer tokensCounterF, ArrayList<String> stopWordLemmaF, ArrayList<String> nerEntitiesF, ArrayList<String> stopWordTokenF, ArrayList<String> entityTokenTagsF, ArrayList<String> nerEntitiesTypeF, Integer anotatorcounterF, ArrayList<String> strTokenStemsF) {
|
||||
|
||||
this.strF = strF;
|
||||
this.str1 = str1;
|
||||
this.stanfordCoreNLP = stanfordCoreNLP;
|
||||
this.stanfordCoreNLPSentiment = stanfordCoreNLPSentiment;
|
||||
this.coreMaps1 = coreMaps1;
|
||||
this.strAnno = strAnno;
|
||||
this.strAnnoSentiment = strAnnoSentiment;
|
||||
this.coreDocument = coreDocument;
|
||||
this.tokenizeCountingF = tokenizeCountingF;
|
||||
this.taggedWordListF = taggedWordListF;
|
||||
this.typedDependenciesF = typedDependenciesF;
|
||||
this.rnnCoreAnnotationsPredictedF = rnnCoreAnnotationsPredictedF;
|
||||
this.simpleMatricesF = simpleMatricesF;
|
||||
this.simpleMatricesNodevectorsF = simpleMatricesNodevectorsF;
|
||||
this.listF = listF;
|
||||
this.longestF = longestF;
|
||||
this.sentencesF = sentencesF;
|
||||
this.sentencesSentimentF = sentencesSentimentF;
|
||||
this.treesF = treesF;
|
||||
this.grammaticalStructuresF = grammaticalStructuresF;
|
||||
this.sentimentLongestF = sentimentLongestF;
|
||||
this.imwesF = imwesF;
|
||||
this.inflectedCounterNegativeF = inflectedCounterNegativeF;
|
||||
this.inflectedCounterPositiveF = inflectedCounterPositiveF;
|
||||
this.tokenEntryF = tokenEntryF;
|
||||
this.unmarkedPatternCounterF = unmarkedPatternCounterF;
|
||||
this.strTokensIpartFormF = strTokensIpartFormF;
|
||||
this.tokenFormsF = tokenFormsF;
|
||||
this.intTokenEntyCountsF = intTokenEntyCountsF;
|
||||
this.markedContinuousCounterF = markedContinuousCounterF;
|
||||
this.iTokenTagsF = iTokenTagsF;
|
||||
this.strTokenEntryGetPOSF = strTokenEntryGetPOSF;
|
||||
this.retrieveTGWListF = retrieveTGWListF;
|
||||
this.pairCounterF = pairCounterF;
|
||||
this.tokensCounterF = tokensCounterF;
|
||||
this.stopWordLemmaF = stopWordLemmaF;
|
||||
this.nerEntitiesF = nerEntitiesF;
|
||||
this.stopWordTokenF = stopWordTokenF;
|
||||
this.entityTokenTagsF = entityTokenTagsF;
|
||||
this.nerEntitiesTypeF = nerEntitiesTypeF;
|
||||
this.anotatorcounterF = anotatorcounterF;
|
||||
this.strTokenStemsF = strTokenStemsF;
|
||||
}
|
||||
|
||||
@Override
|
||||
public SentimentAnalyzerTestDynamicTesting call() throws Exception {
|
||||
return getReponseFuturesHelper(strF, str1, stanfordCoreNLP, stanfordCoreNLPSentiment,
|
||||
coreMaps1, strAnno, strAnnoSentiment, coreDocument, tokenizeCountingF, taggedWordListF
|
||||
, typedDependenciesF, rnnCoreAnnotationsPredictedF, simpleMatricesF, simpleMatricesNodevectorsF
|
||||
, listF, longestF, sentencesF, sentencesSentimentF, treesF, grammaticalStructuresF, sentimentLongestF
|
||||
, imwesF, inflectedCounterNegativeF, inflectedCounterPositiveF, tokenEntryF, unmarkedPatternCounterF
|
||||
, strTokensIpartFormF, tokenFormsF, intTokenEntyCountsF, markedContinuousCounterF, iTokenTagsF
|
||||
, strTokenEntryGetPOSF, retrieveTGWListF, pairCounterF, tokensCounterF, stopWordLemmaF, nerEntitiesF
|
||||
, stopWordTokenF, entityTokenTagsF, nerEntitiesTypeF, anotatorcounterF, strTokenStemsF);
|
||||
}
|
||||
}
|
||||
|
||||
public String getResponseFutures(String strF, StanfordCoreNLP stanfordCoreNLP, StanfordCoreNLP stanfordCoreNLPSentiment) {
|
||||
if (strResponses.getOrDefault(strF, null) == null) {
|
||||
strResponses.put(strF, new ArrayList<>());
|
||||
}
|
||||
|
||||
Annotation strAnno = new Annotation(strF);
|
||||
strAnno.compact();
|
||||
stanfordCoreNLP.annotate(strAnno);
|
||||
|
||||
Annotation strAnnoSentiment = new Annotation(strF);
|
||||
strAnnoSentiment.compact();
|
||||
stanfordCoreNLPSentiment.annotate(strAnnoSentiment);
|
||||
|
||||
Annotation annotation = new Annotation(strF);
|
||||
stanfordCoreNLP.annotate(annotation);
|
||||
CoreDocument coreDocument = new CoreDocument(annotation);
|
||||
Annotation jmweAnnotationF = PipelineJMWESingleton.INSTANCE.getJMWEAnnotation(strF);
|
||||
List<CoreMap> coreMaps1 = jmweAnnotationF.get(CoreAnnotations.SentencesAnnotation.class);
|
||||
|
||||
Integer tokenizeCountingF = null;
|
||||
List<List<TaggedWord>> taggedWordListF = null;
|
||||
java.util.ArrayList<String> retrieveTGWListF = null;
|
||||
List<CoreMap> sentencesF = null;
|
||||
List<CoreMap> sentencesSentimentF = null;
|
||||
java.util.ArrayList<Tree> treesF = null;
|
||||
ArrayList<GrammaticalStructure> grammaticalStructuresF = null;
|
||||
java.util.ArrayList<TypedDependency> typedDependenciesF = null;
|
||||
java.util.ArrayList<Integer> rnnCoreAnnotationsPredictedF = null;
|
||||
java.util.ArrayList<SimpleMatrix> simpleMatricesF = null;
|
||||
java.util.ArrayList<SimpleMatrix> simpleMatricesNodevectorsF = null;
|
||||
List<String> listF = null;
|
||||
Integer longestF = null;
|
||||
Integer sentimentLongestF = null;
|
||||
List<IMWE<IToken>> imwesF = null;
|
||||
Integer InflectedCounterNegativeF = null;
|
||||
Integer InflectedCounterPositiveF = null;
|
||||
ArrayList<String> tokenEntryF = null;
|
||||
Integer MarkedContinuousCounterF = null;
|
||||
Integer UnmarkedPatternCounterF = null;
|
||||
ArrayList<String> strTokensIpartFormF = null;
|
||||
java.util.ArrayList<String> tokenFormsF = null;
|
||||
ArrayList<String> strTokenEntryGetPOSF = null;
|
||||
java.util.ArrayList<Integer> intTokenEntyCountsF = null;
|
||||
ArrayList<String> ITokenTagsF = null;
|
||||
java.util.ArrayList<String> strTokenStemsF = null;
|
||||
Integer AnotatorcounterF = null;
|
||||
Integer TokensCounterF = null;
|
||||
java.util.ArrayList<String> entityTokenTagsF = null;
|
||||
java.util.ArrayList<String> nerEntitiesF = null;
|
||||
java.util.ArrayList<String> nerEntitiesTypeF = null;
|
||||
java.util.ArrayList<String> stopWordTokenF = null;
|
||||
java.util.ArrayList<String> stopWordLemmaF = null;
|
||||
Integer PairCounterF = null;
|
||||
|
||||
ArrayList<String> concurrentRelations = new ArrayList();
|
||||
StringBuilder SB = new StringBuilder();
|
||||
List<String> ues_copy = new ArrayList(DataMapper.getAllStrings());
|
||||
double preRelationUserCounters = -1.123456789;
|
||||
|
||||
//System.out.println(ues_copy.toString());
|
||||
ArrayList<Future<SentimentAnalyzerTestDynamicTesting>> futures = new ArrayList<>();
|
||||
Properties prop = new Properties();
|
||||
String fileName = "app.config";
|
||||
try (FileInputStream fis = new FileInputStream(fileName)) {
|
||||
prop.load(fis);
|
||||
} catch (FileNotFoundException ex) {
|
||||
} catch (IOException ex) {
|
||||
}
|
||||
for (String str1 : ues_copy) {
|
||||
if (strF != str1) {
|
||||
//critical section
|
||||
Future<SentimentAnalyzerTestDynamicTesting> submit = completionService.submit(new get_res(strF, str1, stanfordCoreNLP, stanfordCoreNLPSentiment,
|
||||
coreMaps1, strAnno, strAnnoSentiment, coreDocument, tokenizeCountingF, taggedWordListF
|
||||
, typedDependenciesF, rnnCoreAnnotationsPredictedF, simpleMatricesF, simpleMatricesNodevectorsF
|
||||
, listF, longestF, sentencesF, sentencesSentimentF, treesF, grammaticalStructuresF, sentimentLongestF
|
||||
, imwesF, InflectedCounterNegativeF, InflectedCounterPositiveF, tokenEntryF, UnmarkedPatternCounterF
|
||||
, strTokensIpartFormF, tokenFormsF, intTokenEntyCountsF, MarkedContinuousCounterF, ITokenTagsF
|
||||
, strTokenEntryGetPOSF, retrieveTGWListF, PairCounterF, TokensCounterF, stopWordLemmaF, nerEntitiesF
|
||||
, stopWordTokenF, entityTokenTagsF, nerEntitiesTypeF, AnotatorcounterF, strTokenStemsF));
|
||||
futures.add(submit);
|
||||
//end of critical section, do the rest sequential.
|
||||
}
|
||||
}
|
||||
|
||||
int pending = futures.size();
|
||||
while (pending > 0) {
|
||||
try {
|
||||
Future<SentimentAnalyzerTestDynamicTesting> completed = completionService.poll(100, TimeUnit.MILLISECONDS);
|
||||
if (completed != null) {
|
||||
--pending;
|
||||
SentimentAnalyzerTestDynamicTesting SMX = completed.get();
|
||||
if (SMX == null) continue;
|
||||
double scoreRelationLastUserMsg = SMX.getScore();
|
||||
if (scoreRelationLastUserMsg > preRelationUserCounters
|
||||
|| preRelationUserCounters == -1.123456789) { //dafuck???
|
||||
preRelationUserCounters = scoreRelationLastUserMsg;
|
||||
concurrentRelations.add(SMX.getSecondaryString());
|
||||
}
|
||||
|
||||
//this part below should be sequential hopefully
|
||||
if (tokenizeCountingF == null) {
|
||||
tokenizeCountingF = SMX.getTokenizeCountingF();
|
||||
}
|
||||
if (taggedWordListF == null) {
|
||||
taggedWordListF = SMX.getTaggedWordListF();
|
||||
}
|
||||
if (typedDependenciesF == null) {
|
||||
typedDependenciesF = SMX.getTypedDependenciesF();
|
||||
}
|
||||
if (rnnCoreAnnotationsPredictedF == null) {
|
||||
rnnCoreAnnotationsPredictedF = SMX.getRnnCoreAnnotationsPredictedF();
|
||||
}
|
||||
if (simpleMatricesF == null) {
|
||||
simpleMatricesF = SMX.getSimpleMatricesF();
|
||||
}
|
||||
if (simpleMatricesNodevectorsF == null) {
|
||||
simpleMatricesNodevectorsF = SMX.getSimpleMatricesNodevectorsF();
|
||||
}
|
||||
if (listF == null) {
|
||||
listF = SMX.getListF();
|
||||
}
|
||||
if (longestF == null) {
|
||||
longestF = SMX.getLongestF();
|
||||
}
|
||||
if (sentencesF == null) {
|
||||
sentencesF = SMX.getSentencesF();
|
||||
}
|
||||
if (sentencesSentimentF == null) {
|
||||
sentencesSentimentF = SMX.getSentencesSentimentF();
|
||||
}
|
||||
if (treesF == null) {
|
||||
treesF = SMX.getTreesF();
|
||||
}
|
||||
if (grammaticalStructuresF == null) {
|
||||
grammaticalStructuresF = SMX.getGrammaticalStructuresF();
|
||||
}
|
||||
if (sentimentLongestF == null) {
|
||||
sentimentLongestF = SMX.getSentimentLongestF();
|
||||
}
|
||||
if (imwesF == null) {
|
||||
imwesF = SMX.getImwesF();
|
||||
}
|
||||
if (InflectedCounterNegativeF == null) {
|
||||
InflectedCounterNegativeF = SMX.getInflectedCounterNegativeF();
|
||||
}
|
||||
if (InflectedCounterPositiveF == null) {
|
||||
InflectedCounterPositiveF = SMX.getInflectedCounterPositiveF();
|
||||
}
|
||||
if (tokenEntryF == null) {
|
||||
tokenEntryF = SMX.getTokenEntryF();
|
||||
}
|
||||
if (UnmarkedPatternCounterF == null) {
|
||||
UnmarkedPatternCounterF = SMX.getUnmarkedPatternCounterF();
|
||||
}
|
||||
if (strTokensIpartFormF == null) {
|
||||
strTokensIpartFormF = SMX.getStrTokensIpartFormF();
|
||||
}
|
||||
if (tokenFormsF == null) {
|
||||
tokenFormsF = SMX.getTokenFormsF();
|
||||
}
|
||||
if (intTokenEntyCountsF == null) {
|
||||
intTokenEntyCountsF = SMX.getIntTokenEntyCountsF();
|
||||
}
|
||||
if (MarkedContinuousCounterF == null) {
|
||||
MarkedContinuousCounterF = SMX.getMarkedContinuousCounterF();
|
||||
}
|
||||
if (ITokenTagsF == null) {
|
||||
ITokenTagsF = SMX.getITokenTagsF();
|
||||
}
|
||||
if (strTokenEntryGetPOSF == null) {
|
||||
strTokenEntryGetPOSF = SMX.getStrTokenEntryGetPOSF();
|
||||
}
|
||||
if (retrieveTGWListF == null) {
|
||||
retrieveTGWListF = SMX.getRetrieveTGWListF();
|
||||
}
|
||||
if (PairCounterF == null) {
|
||||
PairCounterF = SMX.getPairCounterF();
|
||||
}
|
||||
if (TokensCounterF == null) {
|
||||
TokensCounterF = SMX.getTokensCounterF();
|
||||
}
|
||||
if (stopWordLemmaF == null) {
|
||||
stopWordLemmaF = SMX.getStopWordLemmaF();
|
||||
}
|
||||
if (nerEntitiesF == null) {
|
||||
nerEntitiesF = SMX.getNerEntitiesF();
|
||||
}
|
||||
if (stopWordTokenF == null) {
|
||||
stopWordTokenF = SMX.getStopWordTokenF();
|
||||
}
|
||||
if (entityTokenTagsF == null) {
|
||||
entityTokenTagsF = SMX.getEntityTokenTagsF();
|
||||
}
|
||||
if (nerEntitiesTypeF == null) {
|
||||
nerEntitiesTypeF = SMX.getNerEntitiesTypeF();
|
||||
}
|
||||
if (AnotatorcounterF == null) {
|
||||
AnotatorcounterF = SMX.getAnotatorcounterF();
|
||||
}
|
||||
if (strTokenStemsF == null) {
|
||||
strTokenStemsF = SMX.getStrTokenStemsF();
|
||||
}
|
||||
}
|
||||
} catch (InterruptedException e) {
|
||||
//throw new RuntimeException(e);
|
||||
pending = 0;
|
||||
try (FileInputStream fis = new FileInputStream(fileName)) {
|
||||
prop.load(fis);
|
||||
} catch (FileNotFoundException ex) {
|
||||
} catch (IOException ex) {
|
||||
}
|
||||
System.out.printf(Arrays.toString(e.getStackTrace()));
|
||||
pool.shutdown();
|
||||
pool = Executors.newFixedThreadPool(Integer.valueOf(prop.getProperty("app.thread_count")));
|
||||
completionService = new ExecutorCompletionService(pool);
|
||||
} catch (ExecutionException e) {
|
||||
//throw new RuntimeException(e);
|
||||
pending = 0;
|
||||
try (FileInputStream fis = new FileInputStream(fileName)) {
|
||||
prop.load(fis);
|
||||
} catch (FileNotFoundException ex) {
|
||||
} catch (IOException ex) {
|
||||
}
|
||||
System.out.printf(Arrays.toString(e.getStackTrace()));
|
||||
pool.shutdown();
|
||||
pool = Executors.newFixedThreadPool(Integer.valueOf(prop.getProperty("app.thread_count")));
|
||||
completionService = new ExecutorCompletionService(pool);
|
||||
}
|
||||
}
|
||||
|
||||
int cacheRequirement = 8500;
|
||||
if (preRelationUserCounters > cacheRequirement && !ues_copy.contains(strF) && filterContent(strF)) {
|
||||
DataMapper.InsertMYSQLStrings(strF);
|
||||
DataMapper.checkStringsToDelete();
|
||||
}
|
||||
//double randomLenghtPermit = strF.length() * (Math.random() * Math.random() * (Math.random() * 10));
|
||||
Collections.reverse(concurrentRelations);
|
||||
ArrayList<String> mysqlUpdateLastUsed = new ArrayList();
|
||||
Double aDouble = Double.valueOf(prop.getProperty("app.random_length"));
|
||||
if (!concurrentRelations.isEmpty()) {
|
||||
for (String secondaryRelation : concurrentRelations) {
|
||||
if (SB.toString().length() > strF.length() * aDouble && !SB.toString().isEmpty()) {
|
||||
break;
|
||||
}
|
||||
|
||||
ArrayList<String> orDefault = strResponses.getOrDefault(strF, null);
|
||||
boolean skip = false;
|
||||
for (String strItr : orDefault) {
|
||||
if (secondaryRelation.equalsIgnoreCase(strItr)) {
|
||||
skip = true;
|
||||
//wtf why does this exist.
|
||||
if (orDefault.size() + 3 >= concurrentRelations.size()) {
|
||||
orDefault = new ArrayList<>();
|
||||
strResponses.put(strF, orDefault);
|
||||
} else if (orDefault.size() > 5) {
|
||||
double v = Math.random() * 10;
|
||||
if (v > 5.6) {
|
||||
orDefault = new ArrayList<>();
|
||||
strResponses.put(strF, orDefault);
|
||||
}
|
||||
}
|
||||
break;
|
||||
}
|
||||
}
|
||||
if (skip) continue;
|
||||
/*
|
||||
if (!SB.isEmpty()) {
|
||||
String testSTR = SB.toString() + " " + secondaryRelation;
|
||||
SentimentAnalyzerTestDynamic SMX = getReponseFuturesHelper(strF, testSTR, stanfordCoreNLP, stanfordCoreNLPSentiment,
|
||||
coreMaps1, strAnno, strAnnoSentiment, coreDocument, tokenizeCountingF, taggedWordListF
|
||||
, typedDependenciesF, rnnCoreAnnotationsPredictedF, simpleMatricesF, simpleMatricesNodevectorsF
|
||||
, listF, longestF, sentencesF, sentencesSentimentF, treesF, grammaticalStructuresF, sentimentLongestF
|
||||
, imwesF, InflectedCounterNegativeF, InflectedCounterPositiveF, tokenEntryF, UnmarkedPatternCounterF
|
||||
, strTokensIpartFormF, tokenFormsF, intTokenEntyCountsF, MarkedContinuousCounterF, ITokenTagsF
|
||||
, strTokenEntryGetPOSF, retrieveTGWListF, PairCounterF, TokensCounterF, stopWordLemmaF, nerEntitiesF
|
||||
, stopWordTokenF, entityTokenTagsF, nerEntitiesTypeF, AnotatorcounterF, strTokenStemsF);
|
||||
double scoreRelationLastUserMsg = SMX.getScore();
|
||||
|
||||
if (preRelationUserCounters > scoreRelationLastUserMsg) {
|
||||
break;
|
||||
}
|
||||
|
||||
} */
|
||||
|
||||
SB.append(secondaryRelation).append(". ");
|
||||
mysqlUpdateLastUsed.add(secondaryRelation);
|
||||
orDefault.add(secondaryRelation);
|
||||
strResponses.put(strF, orDefault);
|
||||
}
|
||||
}
|
||||
if (SB.toString().isEmpty()) {
|
||||
return "failure, preventing stuckness";
|
||||
}
|
||||
DataMapper.updateLastUsed(mysqlUpdateLastUsed);
|
||||
return SB.toString();
|
||||
}
|
||||
|
||||
private void getJMWEAnnotation(String str1) {
|
||||
Annotation jmweAnnotation = PipelineJMWESingleton.INSTANCE.getJMWEAnnotation(str1);
|
||||
jmweAnnotationCache.put(str1, jmweAnnotation);
|
||||
}
|
||||
|
||||
public String getResponseMsg(String str, String personName, StanfordCoreNLP stanfordCoreNLP,
|
||||
StanfordCoreNLP stanfordCoreNLPSentiment, Boolean ingameResponse) {
|
||||
String responseFutures = "";
|
||||
String strF = trimString(str);
|
||||
//System.out.println("post trimstring(). strF: " + strF);
|
||||
responseFutures = getResponseFutures(strF, stanfordCoreNLP, stanfordCoreNLPSentiment);
|
||||
if (!ingameResponse) {
|
||||
responseFutures = checkPersonPresentInSentence(personName, responseFutures, strF, stanfordCoreNLP,
|
||||
stanfordCoreNLPSentiment);
|
||||
}
|
||||
return responseFutures;
|
||||
}
|
||||
|
||||
private String checkPersonPresentInSentence(String personName, String responseMsg, String userLastMessage,
|
||||
StanfordCoreNLP stanfordCoreNLP, StanfordCoreNLP stanfordCoreNLPSentiment) {
|
||||
try {
|
||||
CoreDocument pipelineCoreDcoument = new CoreDocument(responseMsg);
|
||||
CoreDocument pipelineCoreDcoumentLastMsg = new CoreDocument(userLastMessage);
|
||||
stanfordCoreNLP.annotate(pipelineCoreDcoument);
|
||||
stanfordCoreNLPSentiment.annotate(pipelineCoreDcoumentLastMsg);
|
||||
String regex = "(.*?\\d){10,}";
|
||||
if (pipelineCoreDcoument.entityMentions() != null) {
|
||||
for (CoreEntityMention em : pipelineCoreDcoument.entityMentions()) {
|
||||
String entityType = em.entityType();
|
||||
if (entityType == "PERSON") {
|
||||
String str = responseMsg;
|
||||
String emText = em.text();
|
||||
Pattern pattern = Pattern.compile(regex);
|
||||
Matcher matcher = pattern.matcher(personName);
|
||||
boolean isMatched = matcher.matches();
|
||||
if (emText != personName && !isMatched) {
|
||||
if (pipelineCoreDcoumentLastMsg.entityMentions() != null) {
|
||||
for (CoreEntityMention emLastMsg : pipelineCoreDcoumentLastMsg.entityMentions()) {
|
||||
if (emText != emLastMsg.text() && !Character.isDigit(Integer.parseInt(emLastMsg.text().trim()))) {
|
||||
str = (responseMsg.substring(0, responseMsg.indexOf(emText)) + " "
|
||||
+ emLastMsg + " " + responseMsg.substring(responseMsg.indexOf(emText)));
|
||||
}
|
||||
}
|
||||
}
|
||||
str += personName;
|
||||
return str;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
} catch (Exception e) {
|
||||
System.out.println("SCUFFED JAYZ: " + e.getMessage());
|
||||
}
|
||||
return responseMsg;
|
||||
}
|
||||
|
||||
public boolean filterContent(String str) {
|
||||
if (!str.isEmpty() && str.length() > 3) {
|
||||
String str1Local = str.trim();
|
||||
if (str1Local.length() > 2 && !str1Local.startsWith("!")) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
public void getCoreDocumentsSuggested(StanfordCoreNLP pipeline, String str) {
|
||||
Annotation annotation = new Annotation(str);
|
||||
pipeline.annotate(annotation);
|
||||
CoreDocument coreDocument = new CoreDocument(annotation);
|
||||
coreDocumentAnnotationCache.put(str, coreDocument);
|
||||
}
|
||||
}
|
43
src/main/java/FunctionLayer/LevenshteinDistance.java
Normal file
43
src/main/java/FunctionLayer/LevenshteinDistance.java
Normal file
@ -0,0 +1,43 @@
|
||||
/*
|
||||
* To change this license header, choose License Headers in Project Properties.
|
||||
* To change this template file, choose Tools | Templates
|
||||
* and open the template in the editor.
|
||||
*/
|
||||
package FunctionLayer;
|
||||
|
||||
/**
|
||||
*
|
||||
* @author install1
|
||||
*/
|
||||
public class LevenshteinDistance {
|
||||
private CharSequence lhs;
|
||||
private CharSequence rhs;
|
||||
|
||||
private static int minimum(int a, int b, int c) {
|
||||
return Math.min(Math.min(a, b), c);
|
||||
}
|
||||
|
||||
public LevenshteinDistance(CharSequence lhs, CharSequence rhs) {
|
||||
this.lhs = lhs;
|
||||
this.rhs = rhs;
|
||||
}
|
||||
|
||||
public double computeLevenshteinDistance() {
|
||||
int[][] distance = new int[lhs.length() + 1][rhs.length() + 1];
|
||||
for (int i = 0; i <= lhs.length(); i++) {
|
||||
distance[i][0] = i;
|
||||
}
|
||||
for (int j = 1; j <= rhs.length(); j++) {
|
||||
distance[0][j] = j;
|
||||
}
|
||||
for (int i = 1; i <= lhs.length(); i++) {
|
||||
for (int j = 1; j <= rhs.length(); j++) {
|
||||
distance[i][j] = minimum(
|
||||
distance[i - 1][j] + 1,
|
||||
distance[i][j - 1] + 1,
|
||||
distance[i - 1][j - 1] + ((lhs.charAt(i - 1) == rhs.charAt(j - 1)) ? 0 : 1));
|
||||
}
|
||||
}
|
||||
return distance[lhs.length()][rhs.length()];
|
||||
}
|
||||
}
|
157
src/main/java/FunctionLayer/PipelineJMWESingleton.java
Normal file
157
src/main/java/FunctionLayer/PipelineJMWESingleton.java
Normal file
@ -0,0 +1,157 @@
|
||||
/*
|
||||
* To change this license header, choose License Headers in Project Properties.
|
||||
* To change this template file, choose Tools | Templates
|
||||
* and open the template in the editor.
|
||||
*/
|
||||
package FunctionLayer;
|
||||
|
||||
import edu.mit.jmwe.data.IMWE;
|
||||
import edu.mit.jmwe.data.IToken;
|
||||
import edu.mit.jmwe.data.Token;
|
||||
import edu.mit.jmwe.detect.CompositeDetector;
|
||||
import edu.mit.jmwe.detect.Consecutive;
|
||||
import edu.mit.jmwe.detect.Exhaustive;
|
||||
import edu.mit.jmwe.detect.IMWEDetector;
|
||||
import edu.mit.jmwe.detect.InflectionPattern;
|
||||
import edu.mit.jmwe.detect.MoreFrequentAsMWE;
|
||||
import edu.mit.jmwe.detect.ProperNouns;
|
||||
import edu.mit.jmwe.index.IMWEIndex;
|
||||
import edu.mit.jmwe.index.MWEIndex;
|
||||
import edu.stanford.nlp.ling.CoreAnnotations;
|
||||
import edu.stanford.nlp.ling.CoreLabel;
|
||||
import edu.stanford.nlp.ling.JMWEAnnotation;
|
||||
import edu.stanford.nlp.pipeline.Annotation;
|
||||
import edu.stanford.nlp.pipeline.StanfordCoreNLP;
|
||||
import edu.stanford.nlp.util.CoreMap;
|
||||
|
||||
import java.io.File;
|
||||
import java.io.FileNotFoundException;
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import java.util.Properties;
|
||||
|
||||
/**
|
||||
* @author install1
|
||||
*/
|
||||
//maybe not public?
|
||||
public class PipelineJMWESingleton {
|
||||
|
||||
//if not needed to be volatile dont make it, increases time
|
||||
//public volatile static PipelineJMWESingleton INSTANCE;
|
||||
public static PipelineJMWESingleton INSTANCE;
|
||||
private static StanfordCoreNLP localNLP = initializeJMWE();
|
||||
private static String underscoreSpaceReplacement;
|
||||
private static IMWEIndex index;
|
||||
private static IMWEDetector detector;
|
||||
|
||||
private PipelineJMWESingleton() {
|
||||
String whoami = System.getProperty("user.name");
|
||||
String jmweIndexData = "/home/" + whoami + "/autism_bot_number_crunching/lib/mweindex_wordnet3.0_semcor1.6.data"; // ./lib/mweindex_wordnet3.0_semcor1.6.data
|
||||
String jmweIndexDataLocalTest = "/mnt/hdd/home/christian/content/sourcemod_plugins_and_extensions/addons/sourcemod/scripting/gogs/ArtificialAutism/lib/mweindex_wordnet3.0_semcor1.6.data";
|
||||
File indexFile = null;
|
||||
indexFile = new File((String) jmweIndexData);
|
||||
index = new MWEIndex(indexFile);
|
||||
try {
|
||||
index.open();
|
||||
} catch (IOException e) {
|
||||
indexFile = new File((String) jmweIndexDataLocalTest);
|
||||
index = new MWEIndex(indexFile);
|
||||
try {
|
||||
index.open();
|
||||
} catch (IOException ex) {
|
||||
throw new RuntimeException(ex);
|
||||
}
|
||||
}
|
||||
detector = getDetector(index, "Exhaustive");
|
||||
//index.close();
|
||||
}
|
||||
|
||||
public static void getINSTANCE() {
|
||||
INSTANCE = new PipelineJMWESingleton();
|
||||
}
|
||||
|
||||
public final Annotation getJMWEAnnotation(String str) {
|
||||
try {
|
||||
index.open();
|
||||
} catch (IOException e) {
|
||||
throw new RuntimeException("unable to open IMWEIndex index: " + e + "\n");
|
||||
}
|
||||
Annotation annoStr = new Annotation(str);
|
||||
localNLP.annotate(annoStr);
|
||||
Class<CoreAnnotations.SentencesAnnotation> sentencesAnnotationClass = CoreAnnotations.SentencesAnnotation.class;
|
||||
for (CoreMap sentence : annoStr.get(sentencesAnnotationClass)) {
|
||||
List<IMWE<IToken>> mwes = getjMWEInSentence(sentence, index, detector, false);
|
||||
//annoStr.set(JMWEAnnotation.class, mwes);
|
||||
sentence.set(JMWEAnnotation.class, mwes);
|
||||
}
|
||||
//index.close();
|
||||
return annoStr;
|
||||
}
|
||||
|
||||
public final static StanfordCoreNLP initializeJMWE() {
|
||||
Properties propsJMWE;
|
||||
propsJMWE = new Properties();
|
||||
propsJMWE.setProperty("annotators", "tokenize,ssplit,pos,lemma");
|
||||
propsJMWE.setProperty("tokenize.options", "untokenizable=firstKeep");
|
||||
propsJMWE.setProperty("threads", "1");
|
||||
propsJMWE.setProperty("pos.maxlen", "90");
|
||||
propsJMWE.setProperty("tokenize.maxlen", "90");
|
||||
propsJMWE.setProperty("ssplit.maxlen", "90");
|
||||
propsJMWE.setProperty("lemma.maxlen", "90");
|
||||
underscoreSpaceReplacement = "-";
|
||||
localNLP = new StanfordCoreNLP(propsJMWE);
|
||||
System.out.println("finished JMWE constructor \n");
|
||||
return localNLP;
|
||||
}
|
||||
|
||||
public IMWEDetector getDetector(IMWEIndex index, String detector) {
|
||||
IMWEDetector iMWEdetector = null;
|
||||
switch (detector) {
|
||||
case "Consecutive":
|
||||
iMWEdetector = new Consecutive(index);
|
||||
break;
|
||||
case "Exhaustive":
|
||||
iMWEdetector = new Exhaustive(index);
|
||||
break;
|
||||
case "ProperNouns":
|
||||
iMWEdetector = ProperNouns.getInstance();
|
||||
break;
|
||||
case "Complex":
|
||||
iMWEdetector = new CompositeDetector(ProperNouns.getInstance(),
|
||||
new MoreFrequentAsMWE(new InflectionPattern(new Consecutive(index))));
|
||||
break;
|
||||
case "CompositeConsecutiveProperNouns":
|
||||
iMWEdetector = new CompositeDetector(new Consecutive(index), ProperNouns.getInstance());
|
||||
break;
|
||||
default:
|
||||
throw new IllegalArgumentException("Invalid detector argument " + detector
|
||||
+ ", only \"Consecutive\", \"Exhaustive\", \"ProperNouns\", \"Complex\" or \"CompositeConsecutiveProperNouns\" are supported.");
|
||||
}
|
||||
return iMWEdetector;
|
||||
}
|
||||
|
||||
public List<IMWE<IToken>> getjMWEInSentence(CoreMap sentence, IMWEIndex index, IMWEDetector detector,
|
||||
boolean verbose) {
|
||||
List<IToken> tokens = getITokens(sentence.get(CoreAnnotations.TokensAnnotation.class));
|
||||
List<IMWE<IToken>> mwes = detector.detect(tokens);
|
||||
if (verbose) {
|
||||
for (IMWE<IToken> token : mwes) {
|
||||
System.out.println("IMWE<IToken>: " + token);
|
||||
}
|
||||
}
|
||||
return mwes;
|
||||
}
|
||||
|
||||
public List<IToken> getITokens(List<CoreLabel> tokens) {
|
||||
return getITokens(tokens, underscoreSpaceReplacement);
|
||||
}
|
||||
|
||||
public List<IToken> getITokens(List<CoreLabel> tokens, String underscoreSpaceReplacement) {
|
||||
List<IToken> sentence = new ArrayList<IToken>();
|
||||
for (CoreLabel token : tokens) {
|
||||
sentence.add(new Token(token.originalText().replaceAll("_", underscoreSpaceReplacement).replaceAll(" ", underscoreSpaceReplacement), token.get(CoreAnnotations.PartOfSpeechAnnotation.class), token.lemma().replaceAll("_", underscoreSpaceReplacement).replaceAll(" ", underscoreSpaceReplacement)));
|
||||
}
|
||||
return sentence;
|
||||
}
|
||||
}
|
32
src/main/java/FunctionLayer/SimilarityMatrix.java
Normal file
32
src/main/java/FunctionLayer/SimilarityMatrix.java
Normal file
@ -0,0 +1,32 @@
|
||||
/*
|
||||
* To change this license header, choose License Headers in Project Properties.
|
||||
* To change this template file, choose Tools | Templates
|
||||
* and open the template in the editor.
|
||||
*/
|
||||
package FunctionLayer;
|
||||
|
||||
/**
|
||||
* @author install1
|
||||
*/
|
||||
public class SimilarityMatrix {
|
||||
|
||||
private String PrimaryString;
|
||||
private String SecondaryString;
|
||||
private double distance;
|
||||
|
||||
public SimilarityMatrix(String str1, String str2) {
|
||||
this.PrimaryString = str1;
|
||||
this.SecondaryString = str2;
|
||||
}
|
||||
|
||||
public SimilarityMatrix(String str1, String str2, double result) {
|
||||
this.PrimaryString = str1;
|
||||
this.SecondaryString = str2;
|
||||
this.distance = result;
|
||||
}
|
||||
|
||||
public final String getSecondaryString() {
|
||||
return SecondaryString;
|
||||
}
|
||||
|
||||
}
|
File diff suppressed because it is too large
Load Diff
108
src/main/java/FunctionLayer/StopwordAnnotator.java
Normal file
108
src/main/java/FunctionLayer/StopwordAnnotator.java
Normal file
@ -0,0 +1,108 @@
|
||||
package FunctionLayer;
|
||||
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
import java.util.Properties;
|
||||
import java.util.Set;
|
||||
import java.util.Arrays;
|
||||
|
||||
import edu.stanford.nlp.ling.CoreAnnotation;
|
||||
import edu.stanford.nlp.pipeline.Annotator;
|
||||
import org.apache.lucene.analysis.CharArraySet;
|
||||
import org.apache.lucene.analysis.core.StopAnalyzer;
|
||||
|
||||
import edu.stanford.nlp.ling.*;
|
||||
import edu.stanford.nlp.pipeline.Annotation;
|
||||
import edu.stanford.nlp.util.Pair;
|
||||
import edu.stanford.nlp.util.ArraySet;
|
||||
|
||||
/**
|
||||
* CoreNlp Annotator that checks if in coming token is a stopword
|
||||
*
|
||||
* @author John Conwell
|
||||
* @author Paul Landes
|
||||
*/
|
||||
public class StopwordAnnotator implements Annotator, CoreAnnotation<Pair<Boolean, Boolean>> {
|
||||
|
||||
/**
|
||||
* stopword annotator class name used in annotators property
|
||||
*/
|
||||
public static final String ANNOTATOR_CLASS = "stopword";
|
||||
|
||||
/**
|
||||
* Property key to specify the comma delimited list of custom stopwords
|
||||
*/
|
||||
public static final String STOPWORDS_LIST = "stopword-list";
|
||||
|
||||
/**
|
||||
* Property key to specify if stopword list is case insensitive
|
||||
*/
|
||||
public static final String IGNORE_STOPWORD_CASE = "ignore-stopword-case";
|
||||
|
||||
private static Class<? extends Pair> boolPair = Pair.makePair(true, true).getClass();
|
||||
|
||||
private Properties props;
|
||||
private CharArraySet stopwords;
|
||||
|
||||
public StopwordAnnotator() {
|
||||
this(new Properties());
|
||||
}
|
||||
|
||||
public StopwordAnnotator(String notUsed, Properties props) {
|
||||
this(props);
|
||||
}
|
||||
|
||||
public StopwordAnnotator(Properties props) {
|
||||
this.props = props;
|
||||
if (this.props.containsKey(STOPWORDS_LIST)) {
|
||||
String stopwordList = props.getProperty(STOPWORDS_LIST);
|
||||
boolean ignoreCase = Boolean.parseBoolean(props.getProperty(IGNORE_STOPWORD_CASE, "false"));
|
||||
this.stopwords = getStopWordList(stopwordList, ignoreCase);
|
||||
} else {
|
||||
this.stopwords = (CharArraySet) StopAnalyzer.ENGLISH_STOP_WORDS_SET;
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void annotate(Annotation annotation) {
|
||||
if (stopwords != null && stopwords.size() > 0 && annotation.containsKey(CoreAnnotations.TokensAnnotation.class)) {
|
||||
List<CoreLabel> tokens = annotation.get(CoreAnnotations.TokensAnnotation.class);
|
||||
for (CoreLabel token : tokens) {
|
||||
boolean isWordStopword = stopwords.contains(token.word().toLowerCase());
|
||||
boolean isLemmaStopword = stopwords.contains(token.lemma().toLowerCase());
|
||||
Pair<Boolean, Boolean> pair = Pair.makePair(isWordStopword, isLemmaStopword);
|
||||
token.set(StopwordAnnotator.class, pair);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public Set<Class<? extends CoreAnnotation>> requirementsSatisfied() {
|
||||
return Collections.singleton(StopwordAnnotator.class);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Set<Class<? extends CoreAnnotation>> requires() {
|
||||
return Collections.unmodifiableSet(new ArraySet<>(Arrays.asList(
|
||||
CoreAnnotations.TextAnnotation.class,
|
||||
CoreAnnotations.TokensAnnotation.class,
|
||||
CoreAnnotations.LemmaAnnotation.class,
|
||||
CoreAnnotations.PartOfSpeechAnnotation.class
|
||||
)));
|
||||
}
|
||||
|
||||
@Override
|
||||
@SuppressWarnings("unchecked")
|
||||
public Class<Pair<Boolean, Boolean>> getType() {
|
||||
return (Class<Pair<Boolean, Boolean>>) boolPair;
|
||||
}
|
||||
|
||||
public static CharArraySet getStopWordList(String stopwordList, boolean ignoreCase) {
|
||||
String[] terms = stopwordList.split(",");
|
||||
CharArraySet stopwordSet = new CharArraySet(terms.length, ignoreCase);
|
||||
for (String term : terms) {
|
||||
stopwordSet.add(term);
|
||||
}
|
||||
return CharArraySet.unmodifiableSet(stopwordSet);
|
||||
}
|
||||
}
|
3
src/main/java/META-INF/MANIFEST.MF
Normal file
3
src/main/java/META-INF/MANIFEST.MF
Normal file
@ -0,0 +1,3 @@
|
||||
Manifest-Version: 1.0
|
||||
Main-Class: PresentationLayer.DiscordHandler
|
||||
|
114
src/main/java/PresentationLayer/DiscordHandler.java
Normal file
114
src/main/java/PresentationLayer/DiscordHandler.java
Normal file
@ -0,0 +1,114 @@
|
||||
package PresentationLayer;
|
||||
|
||||
import DataLayer.RunnerClient;
|
||||
import DataLayer.ThreadClient;
|
||||
import DataLayer.settings;
|
||||
import FunctionLayer.Datahandler;
|
||||
import FunctionLayer.PipelineJMWESingleton;
|
||||
import edu.stanford.nlp.pipeline.StanfordCoreNLP;
|
||||
import discord4j.core.event.domain.lifecycle.ReadyEvent;
|
||||
import discord4j.core.event.domain.message.MessageCreateEvent;
|
||||
import discord4j.core.object.entity.Message;
|
||||
import net.dv8tion.jda.api.JDABuilder;
|
||||
import net.dv8tion.jda.api.entities.Activity;
|
||||
import net.dv8tion.jda.api.entities.Member;
|
||||
import net.dv8tion.jda.api.events.message.MessageReceivedEvent;
|
||||
import net.dv8tion.jda.api.hooks.ListenerAdapter;
|
||||
import net.dv8tion.jda.api.requests.GatewayIntent;
|
||||
|
||||
import javax.security.auth.login.LoginException;
|
||||
import java.io.FileInputStream;
|
||||
import java.io.FileNotFoundException;
|
||||
import java.io.IOException;
|
||||
import java.io.UnsupportedEncodingException;
|
||||
import java.net.*;
|
||||
import java.sql.SQLException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import java.util.Properties;
|
||||
import java.util.concurrent.Executors;
|
||||
|
||||
|
||||
/**
|
||||
* @author install1
|
||||
*/
|
||||
public class DiscordHandler extends ListenerAdapter {
|
||||
private static StanfordCoreNLP stanfordCoreNLP;
|
||||
private static Datahandler datahandler;
|
||||
private static StanfordCoreNLP stanfordCoreNLPSentiment;
|
||||
|
||||
public static void main(String[] args) {
|
||||
datahandler = new Datahandler();
|
||||
PipelineJMWESingleton.getINSTANCE();
|
||||
stanfordCoreNLP = datahandler.pipeLineSetUp();
|
||||
stanfordCoreNLPSentiment = datahandler.shiftReduceParserInitiate();
|
||||
|
||||
System.out.println("FINISHED ALL ANNOTATIONS");
|
||||
String strF = datahandler.trimString("abcdef");
|
||||
datahandler.getResponseFutures(strF, stanfordCoreNLP, stanfordCoreNLPSentiment);
|
||||
Properties prop = new Properties();
|
||||
String fileName = "app.config";
|
||||
try (FileInputStream fis = new FileInputStream(fileName)) {
|
||||
prop.load(fis);
|
||||
} catch (FileNotFoundException ex) {
|
||||
} catch (IOException ex) {
|
||||
}
|
||||
String token = prop.getProperty("app.discordtoken");
|
||||
|
||||
JDABuilder.createLight(token, GatewayIntent.GUILD_MESSAGES, GatewayIntent.DIRECT_MESSAGES)
|
||||
.addEventListeners(new DiscordHandler())
|
||||
.setActivity(Activity.playing("Being the autism bot"))
|
||||
.build();
|
||||
new ThreadClient(datahandler, stanfordCoreNLP, stanfordCoreNLPSentiment);
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
public void onMessageReceived(MessageReceivedEvent event) {
|
||||
String content = event.getMessage().getContentRaw();
|
||||
String username = event.getMessage().getAuthor().getName();
|
||||
List<Member> mentionedMembers = event.getMessage().getMentions().getMembers();
|
||||
//List<Member> mentionedMembers = event.getMessage().getMentionedMembers();
|
||||
for (Member member : mentionedMembers) {
|
||||
content = content.replace(member.getId(), "");
|
||||
}
|
||||
if (username != null && !event.getAuthor().isBot() && !content.isEmpty()
|
||||
&& event.getMessage().getCategory() != null) {
|
||||
String channelName = event.getMessage().getChannel().getName().toLowerCase();
|
||||
boolean channelpermissionsDenied = false;
|
||||
if (channelName.contains("suggestion-box")) {
|
||||
channelpermissionsDenied = true;
|
||||
}
|
||||
String categoryName = event.getMessage().getCategory().getName().toLowerCase();
|
||||
switch (categoryName) {
|
||||
case "public area":
|
||||
case "information area": {
|
||||
break;
|
||||
}
|
||||
default: {
|
||||
channelpermissionsDenied = true;
|
||||
break;
|
||||
}
|
||||
}
|
||||
if (!channelpermissionsDenied) {
|
||||
boolean mentionedBot = false;
|
||||
if (mentionedMembers != null) {
|
||||
for (Member member : mentionedMembers) {
|
||||
if (member.getEffectiveName().equals(event.getJDA().getSelfUser().getName())) {
|
||||
mentionedBot = true;
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
final String contentF = content;
|
||||
try {
|
||||
new RunnerClient(contentF, mentionedBot, channelName, datahandler, stanfordCoreNLP, stanfordCoreNLPSentiment,
|
||||
event, username);
|
||||
} catch (Exception e) {
|
||||
e.printStackTrace();
|
||||
System.exit(1);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
560
src/test/java/TestJunit.java
Normal file
560
src/test/java/TestJunit.java
Normal file
@ -0,0 +1,560 @@
|
||||
import DataLayer.DataMapper;
|
||||
import DataLayer.testClasses;
|
||||
import FunctionLayer.Datahandler;
|
||||
import FunctionLayer.PipelineJMWESingleton;
|
||||
import FunctionLayer.StanfordParser.SentimentAnalyzerTestDynamicTesting;
|
||||
import edu.mit.jmwe.data.IMWE;
|
||||
import edu.mit.jmwe.data.IToken;
|
||||
import edu.stanford.nlp.ie.AbstractSequenceClassifier;
|
||||
import edu.stanford.nlp.ie.crf.CRFClassifier;
|
||||
import edu.stanford.nlp.ling.CoreAnnotations;
|
||||
import edu.stanford.nlp.ling.CoreLabel;
|
||||
import edu.stanford.nlp.ling.TaggedWord;
|
||||
import edu.stanford.nlp.parser.lexparser.LexicalizedParser;
|
||||
import edu.stanford.nlp.pipeline.Annotation;
|
||||
import edu.stanford.nlp.pipeline.CoreDocument;
|
||||
import edu.stanford.nlp.pipeline.StanfordCoreNLP;
|
||||
import edu.stanford.nlp.tagger.maxent.MaxentTagger;
|
||||
import edu.stanford.nlp.trees.*;
|
||||
import edu.stanford.nlp.util.CoreMap;
|
||||
import org.ejml.simple.SimpleMatrix;
|
||||
import org.json.simple.JSONObject;
|
||||
import org.json.simple.parser.JSONParser;
|
||||
import org.json.simple.parser.ParseException;
|
||||
import org.junit.Assert;
|
||||
//import org.junit.Test;
|
||||
import org.junit.jupiter.api.Test;
|
||||
|
||||
import java.io.FileReader;
|
||||
import java.io.IOException;
|
||||
import java.text.DecimalFormat;
|
||||
import java.text.DecimalFormatSymbols;
|
||||
import java.util.*;
|
||||
|
||||
public class TestJunit {
|
||||
|
||||
private String taggerPath = "edu/stanford/nlp/models/pos-tagger/english-left3words-distsim.tagger";
|
||||
private MaxentTagger tagger = new MaxentTagger(taggerPath);
|
||||
private GrammaticalStructureFactory gsf = initiateGrammaticalStructureFactory();
|
||||
|
||||
private JSONParser parser = new JSONParser();
|
||||
private JSONObject dataFromJson = null;
|
||||
|
||||
String nerModel = "edu/stanford/nlp/models/ner/english.all.3class.distsim.crf.ser.gz";
|
||||
AbstractSequenceClassifier<CoreLabel> classifier = CRFClassifier.
|
||||
getClassifierNoExceptions(nerModel);
|
||||
|
||||
public GrammaticalStructureFactory initiateGrammaticalStructureFactory() {
|
||||
String lexParserEnglishPCFG = "edu/stanford/nlp/models/lexparser/englishPCFG.ser.gz";
|
||||
LexicalizedParser lp = LexicalizedParser.
|
||||
loadModel(lexParserEnglishPCFG, "-maxLength", "100");
|
||||
TreebankLanguagePack langpack = lp.getOp().langpack();
|
||||
return langpack.grammaticalStructureFactory();
|
||||
}
|
||||
|
||||
public Double testCall(testClasses testClass, StanfordCoreNLP stanfordCoreNLP, StanfordCoreNLP stanfordCoreNLPSentiment, JSONObject dataRandom,
|
||||
String tableTestType, boolean testAll, int indexCounter) {
|
||||
String sent1 = testClass.getSentence1();
|
||||
String sent2 = testClass.getSentence2();
|
||||
|
||||
//System.out.println("sent1: " + sent1);
|
||||
//System.out.println("sent2: " + sent2);
|
||||
|
||||
ArrayList<String> concurrentRelations = new ArrayList<String>();
|
||||
Annotation jmweAnnotationF = PipelineJMWESingleton.INSTANCE.getJMWEAnnotation(sent1);
|
||||
Annotation jmweAnnotation = PipelineJMWESingleton.INSTANCE.getJMWEAnnotation(sent2);
|
||||
|
||||
Integer tokenizeCountingF = null;
|
||||
List<List<TaggedWord>> taggedWordListF = null;
|
||||
List<List<TaggedWord>> taggedWordList1 = null;
|
||||
ArrayList<String> retrieveTGWListF = null;
|
||||
java.util.ArrayList<String> retrieveTGWList1 = null;
|
||||
List<CoreMap> sentencesF = null;
|
||||
List<CoreMap> sentence1 = null;
|
||||
List<CoreMap> sentencesSentimentF = null;
|
||||
List<CoreMap> sentenceSentiment1 = null;
|
||||
List<CoreMap> coreMaps1 = jmweAnnotationF.get(CoreAnnotations.SentencesAnnotation.class);
|
||||
ArrayList<Tree> treesF = null;
|
||||
ArrayList<Tree> trees1 = null;
|
||||
ArrayList<GrammaticalStructure> grammaticalStructuresF = null;
|
||||
ArrayList<GrammaticalStructure> grammaticalStructures1 = null;
|
||||
ArrayList<TypedDependency> typedDependenciesF = null;
|
||||
ArrayList<Integer> rnnCoreAnnotationsPredictedF = null;
|
||||
ArrayList<SimpleMatrix> simpleMatricesF = null;
|
||||
ArrayList<SimpleMatrix> simpleMatricesNodevectorsF = null;
|
||||
ArrayList<?> listF = null;
|
||||
Integer longestF = null;
|
||||
Integer sentimentLongestF = null;
|
||||
List<IMWE<IToken>> imwesF = null;
|
||||
Integer InflectedCounterNegativeF = null;
|
||||
Integer InflectedCounterPositiveF = null;
|
||||
ArrayList<String> tokenEntryF = null;
|
||||
Integer MarkedContinuousCounterF = null;
|
||||
Integer UnmarkedPatternCounterF = null;
|
||||
ArrayList<String> strTokensIpartFormF = null;
|
||||
ArrayList<String> tokenFormsF = null;
|
||||
ArrayList<String> strTokenEntryGetPOSF = null;
|
||||
ArrayList<Integer> intTokenEntyCountsF = null;
|
||||
ArrayList<String> ITokenTagsF = null;
|
||||
ArrayList<String> strTokenStemsF = null;
|
||||
Integer AnotatorcounterF = null;
|
||||
Integer TokensCounterF = null;
|
||||
ArrayList<String> entityTokenTagsF = null;
|
||||
ArrayList<String> nerEntitiesF = null;
|
||||
ArrayList<String> nerEntitiesTypeF = null;
|
||||
ArrayList<String> stopWordTokenF = null;
|
||||
ArrayList<String> stopWordLemmaF = null;
|
||||
Integer PairCounterF = null;
|
||||
|
||||
java.util.ArrayList<TypedDependency> typedDependencies1 = null;
|
||||
ArrayList<Integer> rnnCoreAnnotationsPredicted1 = null;
|
||||
ArrayList<SimpleMatrix> simpleMatrices1 = null;
|
||||
ArrayList<SimpleMatrix> simpleMatricesNodevectors1 = null;
|
||||
List<?> list1 = null;
|
||||
Integer longest1 = null;
|
||||
Integer sentimentLongest1 = null;
|
||||
List<IMWE<IToken>> imwes1 = null;
|
||||
Integer InflectedCounterNegative1 = null;
|
||||
Integer InflectedCounterPositive1 = null;
|
||||
ArrayList<String> tokenEntry1 = null;
|
||||
Integer MarkedContinuousCounter1 = null;
|
||||
Integer UnmarkedPatternCounter1 = null;
|
||||
ArrayList<String> strTokensIpartForm1 = null;
|
||||
ArrayList<String> tokenForms1 = null;
|
||||
ArrayList<String> strTokenEntryGetPOS1 = null;
|
||||
ArrayList<Integer> intTokenEntyCounts1 = null;
|
||||
ArrayList<String> ITokenTags1 = null;
|
||||
ArrayList<String> strTokenStems1 = null;
|
||||
Integer Anotatorcounter1 = null;
|
||||
Integer TokensCounter1 = null;
|
||||
ArrayList<String> entityTokenTags1 = null;
|
||||
ArrayList<String> nerEntities1 = null;
|
||||
ArrayList<String> nerEntitiesType1 = null;
|
||||
ArrayList<String> stopWordToken1 = null;
|
||||
ArrayList<String> stopWordLemma1 = null;
|
||||
Integer PairCounter1 = null;
|
||||
List<CoreMap> coreMaps2 = jmweAnnotation.get(CoreAnnotations.SentencesAnnotation.class);
|
||||
|
||||
|
||||
|
||||
Annotation strAnno = new Annotation(sent1);
|
||||
strAnno.compact();
|
||||
stanfordCoreNLP.annotate(strAnno);
|
||||
|
||||
Annotation strAnno2 = new Annotation(sent2);
|
||||
strAnno2.compact();
|
||||
stanfordCoreNLP.annotate(strAnno2);
|
||||
|
||||
|
||||
|
||||
Annotation strAnnoSentiment = new Annotation(sent1);
|
||||
strAnnoSentiment.compact();
|
||||
stanfordCoreNLPSentiment.annotate(strAnnoSentiment);
|
||||
|
||||
Annotation strAnnoSentimen2 = new Annotation(sent2);
|
||||
strAnnoSentimen2.compact();
|
||||
stanfordCoreNLPSentiment.annotate(strAnnoSentimen2);
|
||||
|
||||
|
||||
|
||||
Annotation annotation = new Annotation(sent1);
|
||||
stanfordCoreNLP.annotate(annotation);
|
||||
CoreDocument coreDocument = new CoreDocument(annotation);
|
||||
|
||||
annotation = new Annotation(sent2);
|
||||
stanfordCoreNLP.annotate(annotation);
|
||||
CoreDocument coreDocument1 = new CoreDocument(annotation);
|
||||
|
||||
|
||||
Integer tokenizeCounting = null;
|
||||
|
||||
SentimentAnalyzerTestDynamicTesting sentimentAnalyzerTest = new SentimentAnalyzerTestDynamicTesting(sent1, sent2,
|
||||
coreMaps1, coreMaps2, strAnno,
|
||||
strAnno2, strAnnoSentiment,
|
||||
strAnnoSentimen2, coreDocument,
|
||||
coreDocument1,
|
||||
tagger, gsf, classifier, tokenizeCounting, tokenizeCountingF,
|
||||
taggedWordListF, taggedWordList1, retrieveTGWListF, retrieveTGWList1,
|
||||
sentencesF, sentence1, sentencesSentimentF, sentenceSentiment1, treesF, trees1,
|
||||
grammaticalStructuresF, grammaticalStructures1, typedDependenciesF,
|
||||
typedDependencies1, rnnCoreAnnotationsPredictedF, rnnCoreAnnotationsPredicted1,
|
||||
simpleMatricesF, simpleMatrices1, simpleMatricesNodevectorsF, simpleMatricesNodevectors1,
|
||||
listF, list1, longestF, longest1, sentimentLongestF, sentimentLongest1, imwesF,
|
||||
imwes1, InflectedCounterNegativeF, InflectedCounterNegative1, InflectedCounterPositiveF,
|
||||
InflectedCounterPositive1, tokenEntryF, tokenEntry1, MarkedContinuousCounterF,
|
||||
MarkedContinuousCounter1, UnmarkedPatternCounterF, UnmarkedPatternCounter1,
|
||||
strTokensIpartFormF, strTokensIpartForm1, tokenFormsF, tokenForms1,
|
||||
strTokenEntryGetPOSF, strTokenEntryGetPOS1, intTokenEntyCountsF,
|
||||
intTokenEntyCounts1, ITokenTagsF, ITokenTags1, strTokenStemsF, strTokenStems1,
|
||||
AnotatorcounterF, Anotatorcounter1, TokensCounterF, TokensCounter1,
|
||||
entityTokenTagsF, entityTokenTags1, nerEntitiesF, nerEntities1, nerEntitiesTypeF,
|
||||
nerEntitiesType1, stopWordTokenF, stopWordToken1, stopWordLemmaF, stopWordLemma1,
|
||||
PairCounterF, PairCounter1, dataRandom, !testAll); //testAll = True -> testingfunction = false
|
||||
|
||||
if (testAll) {
|
||||
Double score = sentimentAnalyzerTest.callSMX();
|
||||
return score;
|
||||
}
|
||||
|
||||
//Just do all the fucking rest, put them in small individual tables, get each function to pass. use that result. see if it can pass combined afterwards.
|
||||
Double score = 0.0;
|
||||
//there should be no null values cause calling validateStringCaches().
|
||||
if (tableTestType.equals("tokensCounterScoring")) {
|
||||
score = sentimentAnalyzerTest.tokensCounterScoring(0.0, sentimentAnalyzerTest.getTokensCounterF(), sentimentAnalyzerTest.getTokensCounter1());
|
||||
}
|
||||
else if (tableTestType.equals("stopWordTokenLemmaScoring"))
|
||||
{
|
||||
score = sentimentAnalyzerTest.stopWordTokenLemmaScoring(0.0, sentimentAnalyzerTest.getStopWordTokenF(), sentimentAnalyzerTest.getStopWordToken1(),
|
||||
sentimentAnalyzerTest.getStopWordLemmaF(), sentimentAnalyzerTest.getStopWordLemma1());
|
||||
}
|
||||
else if (tableTestType.equals("simpleRNNMaxtrixVectors"))
|
||||
{
|
||||
//System.out.println("sentimentAnalyzerTest.getSimpleMatricesNodevectorsF(): " + sentimentAnalyzerTest.getSimpleMatricesNodevectorsF());
|
||||
//System.out.println("sentimentAnalyzerTest.getSimpleMatricesNodevectors1(): " + sentimentAnalyzerTest.getSimpleMatricesNodevectors1());
|
||||
score = sentimentAnalyzerTest.simpleRNNMaxtrixVectors(score, sentimentAnalyzerTest.getSimpleMatricesNodevectorsF(), sentimentAnalyzerTest.getSimpleMatricesNodevectors1());
|
||||
}
|
||||
|
||||
return score;
|
||||
}
|
||||
|
||||
//@RetryingTest(500)
|
||||
@Test
|
||||
public void testScoring() {
|
||||
Set<JSONObject> randomDataSet = null;
|
||||
|
||||
//is only used when testing specific function, not used when testing all.
|
||||
//String testCaseFunction = "tokensCounterScoring";
|
||||
//String testCaseFunction = "stopWordTokenLemmaScoring";
|
||||
String testCaseFunction = "simpleRNNMaxtrixVectors";
|
||||
boolean testAll = false; //put to false when testing a specific function instead.
|
||||
if (!testAll) {
|
||||
Set<JSONObject> jsonObjects = DataMapper.pickHighestProgression(testCaseFunction);
|
||||
if (jsonObjects.isEmpty()) {
|
||||
//we start from the bottom with random data.
|
||||
LoadDataFromJson();
|
||||
randomDataSet = ModifyDataRandomly(testCaseFunction);
|
||||
}
|
||||
else {
|
||||
//we continue from the last highest iteration.
|
||||
randomDataSet = miscData(jsonObjects, testCaseFunction);
|
||||
}
|
||||
DataMapper.WriteDataToSpecificFunction(randomDataSet, testCaseFunction);
|
||||
randomDataSet = DataMapper.SelectRandomDataNotProcessed(testCaseFunction);
|
||||
}
|
||||
else {
|
||||
//select 1 row from all tables where passed_all_test_cases. comment out retryingtest when doing this here.
|
||||
}
|
||||
|
||||
Datahandler datahandler = new Datahandler();
|
||||
PipelineJMWESingleton.getINSTANCE();
|
||||
StanfordCoreNLP stanfordCoreNLP = datahandler.pipeLineSetUp();
|
||||
StanfordCoreNLP stanfordCoreNLPSentiment = datahandler.shiftReduceParserInitiate();
|
||||
|
||||
//running all tests
|
||||
List<testClasses> testClassesListAll = DataMapper.GetAllTestsCases();
|
||||
List<testClasses> testClassesSpecificFunction = DataMapper.GetFunctionTestCases();
|
||||
|
||||
boolean passedtests = false;
|
||||
int counter = 0;
|
||||
|
||||
int max_index_counter_tests_passed = DataMapper.get_index_counter_tests_passed(testCaseFunction, 0);
|
||||
|
||||
String comperator_for_score_for_failing_testcase = DataMapper.getMaxIndexComparator(max_index_counter_tests_passed);
|
||||
Double bestScore = DataMapper.getbestScoreFormax_index_counter_tests_passed(max_index_counter_tests_passed, testCaseFunction, comperator_for_score_for_failing_testcase);
|
||||
|
||||
|
||||
for (JSONObject dataRandom : randomDataSet) {
|
||||
if (counter % 100 == 0)
|
||||
{
|
||||
System.out.println("counter: " + counter + "/" + randomDataSet.size());
|
||||
}
|
||||
if (!testAll)
|
||||
{
|
||||
//here we run tests for individual functions.
|
||||
if (passedAllTests(dataRandom, testClassesSpecificFunction, stanfordCoreNLP, stanfordCoreNLPSentiment, testCaseFunction, testAll,
|
||||
max_index_counter_tests_passed)) {
|
||||
DataMapper.MarkSuccessfullFunctionData(testCaseFunction, (Integer) dataRandom.get("rowid"));
|
||||
System.out.println("passed all.");
|
||||
passedtests = true;
|
||||
break;
|
||||
}
|
||||
}
|
||||
else {
|
||||
//just curious if it might pass all original test cases afterwards.
|
||||
if (passedAllTests(dataRandom, testClassesListAll, stanfordCoreNLP, stanfordCoreNLPSentiment, testCaseFunction, testAll,
|
||||
max_index_counter_tests_passed)) {
|
||||
passedtests = true;
|
||||
}
|
||||
}
|
||||
counter++;
|
||||
}
|
||||
if (!testAll) {
|
||||
DataMapper.UpdateProcessed(testCaseFunction, randomDataSet, max_index_counter_tests_passed, bestScore, comperator_for_score_for_failing_testcase);
|
||||
DataMapper.UpdateModifiedKeyForRowID(randomDataSet, testCaseFunction);
|
||||
}
|
||||
if (!passedtests){
|
||||
Assert.assertTrue(1 > 800.0);
|
||||
}
|
||||
}
|
||||
|
||||
private boolean passedAllTests(JSONObject dataRandom, List<testClasses> testClassesList, StanfordCoreNLP stanfordCoreNLP, StanfordCoreNLP stanfordCoreNLPSentiment,
|
||||
String tableTestType, boolean testAll, int max_index_counter_tests_passed) {
|
||||
int indexCounter = 0;
|
||||
dataRandom.put("index_counter_tests_passed", indexCounter); //indicating how far a run got.
|
||||
for (testClasses testClass : testClassesList)
|
||||
{
|
||||
Double PerformTesting = testCall(testClass, stanfordCoreNLP, stanfordCoreNLPSentiment, dataRandom, tableTestType, testAll, indexCounter);
|
||||
if (PerformTesting.isNaN() || PerformTesting == null)
|
||||
{
|
||||
dataRandom.put("index_counter_tests_passed", 0); // we put it to 0 to indicate that it completely fucked up and is no useable.
|
||||
return false;
|
||||
}
|
||||
/*
|
||||
if (indexCounter > 46 && PerformTesting < 120.0)
|
||||
{
|
||||
System.out.println("indexCounter: " + indexCounter);
|
||||
System.out.println("str1: " + testClass.getSentence1());
|
||||
System.out.println("str2: " + testClass.getSentence2());
|
||||
System.out.println("getScore: " + testClass.getScore());
|
||||
System.out.println("getComparator: " + testClass.getComparator());
|
||||
System.out.println("PerformTesting: " + PerformTesting);
|
||||
}
|
||||
*/
|
||||
//update score when we hit the failing test case or were to surpass it.
|
||||
if (indexCounter >= max_index_counter_tests_passed)
|
||||
{
|
||||
dataRandom.put("failed_testcase_score", PerformTesting);
|
||||
}
|
||||
testClassesList.get(indexCounter).setResultScore(PerformTesting);
|
||||
//perfomrfittingless is only done when running the full test case, not individual functions.
|
||||
if (testClass.isPerformTestingFittingLess())
|
||||
{
|
||||
testClasses testClassPerformFittingBetter = testClassesList.get(indexCounter - 1);
|
||||
if (testClass.getComparator().equals("<")) {
|
||||
//passing this condition always requires the current score to give a worse result than the previous one because only one of the sentences is replaced and
|
||||
//is meant to act inferior compared to the previous sentence used instead.
|
||||
if (PerformTesting.intValue() > testClassPerformFittingBetter.getResultScore()) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
else if (testClass.getComparator().equals(">")) {
|
||||
//passing this condition always requires the current score to give a better result than the previous
|
||||
if (PerformTesting.intValue() < testClassPerformFittingBetter.getResultScore()) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
}
|
||||
else if (testClass.getComparator().equals("<"))
|
||||
{
|
||||
//if getscore 1 and PerformTesting 500 then we fail
|
||||
if (PerformTesting.intValue() > testClass.getScore())
|
||||
{
|
||||
return false;
|
||||
}
|
||||
}
|
||||
else if (testClass.getComparator().equals(">"))
|
||||
{
|
||||
//if getscore -1 and PerformTesting -500 then we fail
|
||||
if (PerformTesting.intValue() < testClass.getScore())
|
||||
{
|
||||
return false;
|
||||
}
|
||||
}
|
||||
//some cases have "no operation" since we just need the result of them for comparing to the next result.
|
||||
//only happens on full test run, not when doing functions.
|
||||
indexCounter++;
|
||||
//System.out.println("indexCounter: " + indexCounter);
|
||||
dataRandom.put("index_counter_tests_passed", indexCounter); //indicating how far a run got.
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
private void LoadDataFromJson() {
|
||||
try {
|
||||
dataFromJson = (JSONObject) parser.parse(
|
||||
new FileReader("/mnt/hdd/home/christian/content/sourcemod_plugins_and_extensions/addons/sourcemod/scripting/gogs/ArtificialAutism/dynamicScore.json"));//path to the JSON file.
|
||||
} catch (IOException e) {
|
||||
try {
|
||||
String whoami = System.getProperty("user.name");
|
||||
dataFromJson = (JSONObject) parser.parse(
|
||||
new FileReader("/home/" + whoami + "/autism_bot_number_crunching/dynamicScore.json"));//path to the JSON file.
|
||||
} catch (IOException ex) {
|
||||
throw new RuntimeException(ex);
|
||||
} catch (ParseException ex) {
|
||||
throw new RuntimeException(ex);
|
||||
}
|
||||
} catch (ParseException e) {
|
||||
throw new RuntimeException(e);
|
||||
}
|
||||
}
|
||||
|
||||
private Set<JSONObject> ModifyDataRandomly(String testCaseFunction) {
|
||||
Set<JSONObject> randomDataSet = new HashSet<>();
|
||||
for (int i = 0; i < 1; i++)
|
||||
{
|
||||
JSONObject object = new JSONObject();
|
||||
for (Object key : dataFromJson.keySet())
|
||||
{
|
||||
if (!key.toString().startsWith(testCaseFunction + "Double_param") &&
|
||||
!key.toString().startsWith(testCaseFunction + "_param")) continue;
|
||||
Object intValue = dataFromJson.get(key);
|
||||
if (key.toString().startsWith("simpleRNNMaxtrixVectorsDouble") || key.toString().startsWith("simpleRNNMatrixCalculationsDouble")
|
||||
|| key.toString().startsWith("typeDependenciesGrammaticalRelationDouble") || key.toString().startsWith("iterateTreesDouble")) {
|
||||
double doubleVal = (double) intValue;
|
||||
Double RandomUpperBound = (doubleVal) * (new Random().nextInt(10) + 0.1);
|
||||
double randomVal = RandomUpperBound * new Random().nextDouble();
|
||||
object.put(key, randomVal + 0.1);
|
||||
} else {
|
||||
intValue = ((Long) intValue).intValue(); //well needing this conversion is fucking scuffed
|
||||
int RandomUpperBound = ((int) intValue + 1) * (new Random().nextInt(10) + 1);
|
||||
int RandomLowerBound = (int) intValue / 2;
|
||||
object.put(key, new Random().nextInt((int) (RandomUpperBound - RandomLowerBound)) + RandomLowerBound);
|
||||
}
|
||||
}
|
||||
randomDataSet.add(object);
|
||||
}
|
||||
|
||||
Set<JSONObject> randomDataSetToReturn = miscData(randomDataSet, testCaseFunction);
|
||||
return randomDataSetToReturn;
|
||||
}
|
||||
|
||||
private JSONObject getCloneRandomShuffled(List<Object> keys, JSONObject clone)
|
||||
{
|
||||
int keyIndex = 0;
|
||||
int splitAmount = new Random().nextInt(2, clone.keySet().size() / 2); //determines how many random values may go into each iteration.
|
||||
|
||||
for (Object key : keys) {
|
||||
int negativeOrPositive = new Random().nextInt(3);
|
||||
if (key.toString().startsWith("simpleRNNMaxtrixVectorsDouble") || key.toString().startsWith("simpleRNNMatrixCalculationsDouble")
|
||||
|| key.toString().startsWith("typeDependenciesGrammaticalRelationDouble") || key.toString().startsWith("iterateTreesDouble")) {
|
||||
Double doubleValue1 = (Double) clone.get(key);
|
||||
DecimalFormat newFormat = new DecimalFormat("#.###");
|
||||
newFormat.setDecimalFormatSymbols(DecimalFormatSymbols.getInstance(Locale.ENGLISH));
|
||||
if (negativeOrPositive == 0) { //values below minus 1 million would be silly
|
||||
double newValue1 = doubleValue1 - (doubleValue1 * new Random().nextDouble(0.0, 25));
|
||||
if (newValue1 > -1000000) {
|
||||
clone.put(key, Double.valueOf(newFormat.format(newValue1)));
|
||||
}
|
||||
} else if (negativeOrPositive == 1) { //values over 1 million would be silly
|
||||
double newValue1 = doubleValue1 + (doubleValue1 * new Random().nextDouble(0.0,25));
|
||||
if (newValue1 < 1000000) {
|
||||
clone.put(key, Double.valueOf(newFormat.format(newValue1)));
|
||||
}
|
||||
} else {
|
||||
//we just truncate spaces are comma
|
||||
double newValue1 = Double.valueOf(newFormat.format(doubleValue1));
|
||||
clone.put(key, newValue1);
|
||||
}
|
||||
}
|
||||
else {
|
||||
int intValue1 = (int) clone.get(key);
|
||||
if (negativeOrPositive == 0) { //values below minus 1 million would be silly
|
||||
int i = intValue1 - (int) (intValue1 * new Random().nextDouble(0.0, 25));
|
||||
if (i > -1000000) {
|
||||
clone.put(key, i);
|
||||
}
|
||||
} else if (negativeOrPositive == 1) { //values over 1 million would be silly
|
||||
int i = intValue1 + (int) (intValue1 * new Random().nextDouble(0.0, 25));
|
||||
if (i < 1000000) {
|
||||
clone.put(key, i);
|
||||
}
|
||||
} else {
|
||||
//we dont change anything about the key.
|
||||
clone.put(key, intValue1);
|
||||
}
|
||||
}
|
||||
keyIndex++;
|
||||
if (keyIndex % splitAmount == 0 && keyIndex > 0) {
|
||||
break;
|
||||
}
|
||||
}
|
||||
return clone;
|
||||
}
|
||||
|
||||
private Set<JSONObject> miscData(Set<JSONObject> randomDataSet, String testCaseFunction){
|
||||
Set<JSONObject> randomDataSetToReturn = new HashSet<>();
|
||||
|
||||
int max_index_counter_tests_passed = DataMapper.get_index_counter_tests_passed(testCaseFunction, 0);
|
||||
boolean maxIndexCounterTestsPassedCount = DataMapper.getMaxIndex_counter_tests_passedCount(max_index_counter_tests_passed, testCaseFunction);
|
||||
|
||||
HashMap<Integer, ArrayList<Integer>> arr = DataMapper.get_parameter_generations(randomDataSet, testCaseFunction);
|
||||
|
||||
for (int i = 0; i < 2; i++) {
|
||||
for (JSONObject data : randomDataSet) {
|
||||
List<Object> keys = new ArrayList<>(data.keySet());
|
||||
Collections.shuffle(keys);
|
||||
//first approach if less than 1000 rows for the particular index_counter_tests_passed. we just randomly generate some data.
|
||||
if (maxIndexCounterTestsPassedCount) {
|
||||
randomDataSetToReturn.add(getCloneRandomShuffled(keys, (JSONObject) data.clone()));
|
||||
} else {
|
||||
JSONObject rowid = compareDataTo(keys, (JSONObject) data.clone(), arr.get(data.get("rowid")), testCaseFunction, true);
|
||||
JSONObject rowid_negative = compareDataTo(keys, (JSONObject) data.clone(), arr.get(data.get("rowid")), testCaseFunction, false);
|
||||
if (rowid != null) {
|
||||
randomDataSetToReturn.add(rowid);
|
||||
randomDataSetToReturn.add(rowid_negative);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
System.out.println("randomDataSetToReturn size: " + randomDataSetToReturn.size());
|
||||
return randomDataSetToReturn;
|
||||
}
|
||||
|
||||
private JSONObject compareDataTo(List<Object> keys, JSONObject clone, ArrayList<Integer> arr, String testCaseFunction, boolean isPlussed) {
|
||||
int keyIndex = 0;
|
||||
int splitAmount = 1;
|
||||
|
||||
boolean modifiedKey = false;
|
||||
for (Object key : keys) {
|
||||
boolean alreadyDone = false;
|
||||
for (int i : arr)
|
||||
{
|
||||
if (key.toString().endsWith("param" + key))
|
||||
{
|
||||
alreadyDone = true;
|
||||
break;
|
||||
}
|
||||
}
|
||||
if (alreadyDone)
|
||||
{
|
||||
continue;
|
||||
}
|
||||
modifiedKey = true;
|
||||
|
||||
if (key.toString().startsWith("simpleRNNMaxtrixVectorsDouble") || key.toString().startsWith("simpleRNNMatrixCalculationsDouble")
|
||||
|| key.toString().startsWith("typeDependenciesGrammaticalRelationDouble") || key.toString().startsWith("iterateTreesDouble")) {
|
||||
Double doubleValue1 = (Double) clone.get(key);
|
||||
DecimalFormat newFormat = new DecimalFormat("#.###");
|
||||
newFormat.setDecimalFormatSymbols(DecimalFormatSymbols.getInstance(Locale.ENGLISH));
|
||||
Double v = Double.valueOf(newFormat.format(doubleValue1));
|
||||
if (isPlussed) {
|
||||
clone.put(key, v + (v * 0.10));
|
||||
}
|
||||
else {
|
||||
clone.put(key, v - (v * 0.10));
|
||||
}
|
||||
}
|
||||
else {
|
||||
int intValue1 = (int) clone.get(key);
|
||||
if (isPlussed) {
|
||||
clone.put(key, (int) (intValue1 + (intValue1 * 0.10)));
|
||||
}
|
||||
else {
|
||||
clone.put(key, (int) (intValue1 - (intValue1 * 0.10)));
|
||||
}
|
||||
}
|
||||
clone.put("modified_key", key);
|
||||
keyIndex++; //maybe remove keyindex and splitamount again.
|
||||
if (keyIndex % splitAmount == 0) {
|
||||
break;
|
||||
}
|
||||
}
|
||||
if (!modifiedKey)
|
||||
{
|
||||
DataMapper.deleteRow((int) clone.get("rowid"), testCaseFunction);
|
||||
return null;
|
||||
}
|
||||
return clone;
|
||||
}
|
||||
}
|
||||
|
Loading…
Reference in New Issue
Block a user