diff --git a/UD_any/morpho/machine.rm b/UD_any/morpho/machine.rm index ede473e773c00d5067b3888dfa6096ffc4555eff..5c94b8085684b9dfa5d3e400e5d11e77cd501d93 100644 --- a/UD_any/morpho/machine.rm +++ b/UD_any/morpho/machine.rm @@ -1,5 +1,5 @@ Name : Feats Machine -Classifier : feats CNN(2,2,0,{FORM,UPOS},{-1,0,1},{},{FORM,FEATS},{10,8}) data/morpho_parts.ts +Classifier : feats CNN(2,2,0,{FORM,UPOS},{-1,0,1},{},{FORM,FEATS},{10,8}) {data/morpho_parts.ts} Predictions : FEATS Strategy : sequential morpho morpho NOTHING 1 diff --git a/UD_any/tagger/machine.rm b/UD_any/tagger/machine.rm index 5e733d26f99fdecb384e8e9b702640dbaa0e3fee..dd17940e55c6d49964403592eb79185f5f648886 100644 --- a/UD_any/tagger/machine.rm +++ b/UD_any/tagger/machine.rm @@ -1,5 +1,5 @@ Name : Tagger Machine -Classifier : tagger CNN(-1,{-2,-1,0,1,2},{},{FORM,UPOS},{-1,0,1},{},{ID,FORM},{1,10},-1,-1) data/tagger.ts +Classifier : tagger CNN(-1,{-2,-1,0,1,2},{},{FORM,UPOS},{-1,0,1},{},{ID,FORM},{1,10},-1,-1) {data/tagger.ts} Predictions : UPOS Strategy : sequential tagger tagger 1 diff --git a/UD_any/tokenizer/machine.rm b/UD_any/tokenizer/machine.rm index 5ff7eb25676c4a17bebc430e926b743e638835e1..91c03318486f114be3ee81f1d31ede40689b9847 100644 --- a/UD_any/tokenizer/machine.rm +++ b/UD_any/tokenizer/machine.rm @@ -1,5 +1,5 @@ Name : Tokenizer Machine -Classifier : tokenizer LSTM(-1,{-3,-2,-1},{},{FORM},{-1,0},{},{ID,FORM},{1,10},5,5) data/tokenizer.ts +Classifier : tokenizer LSTM(-1,{-3,-2,-1},{},{FORM},{-1,0},{},{ID,FORM},{1,10},5,5) {data/tokenizer.ts} Splitwords : data/splitwords.ts Predictions : ID FORM EOS Strategy : sequential diff --git a/UD_any/toketagger_incr/machine.rm b/UD_any/toketagger_incr/machine.rm new file mode 100644 index 0000000000000000000000000000000000000000..eedf496f11763fda6400d3d92c2eff7dcf6b7301 --- /dev/null +++ b/UD_any/toketagger_incr/machine.rm @@ -0,0 +1,9 @@ +Name : Tokenizer and Tagger Machine +Classifier : toketagger LSTM(-1,{-3,-2,-1},{},{FORM,UPOS},{-1,0},{},{ID,FORM},{1,10},5,5) {data/tokenizer.ts data/tagger.ts} +Splitwords : data/splitwords.ts +Predictions : ID FORM UPOS EOS +Strategy : incremental + tokenizer tagger ENDWORD 0 + tokenizer tagger SPLIT 0 + tokenizer tokenizer 0 + tagger tokenizer 1 diff --git a/UD_any/toketagger_seq/machine.rm b/UD_any/toketagger_seq/machine.rm new file mode 100644 index 0000000000000000000000000000000000000000..ec0efc8b7988fdc4962f9cad7336298a03dd375c --- /dev/null +++ b/UD_any/toketagger_seq/machine.rm @@ -0,0 +1,9 @@ +Name : Tokenizer and Tagger Machine +Classifier : toketagger LSTM(-1,{-3,-2,-1},{},{FORM,UPOS},{-1,0},{},{ID,FORM},{1,10},5,5) {data/tokenizer.ts data/tagger.ts} +Splitwords : data/splitwords.ts +Predictions : ID FORM UPOS EOS +Strategy : sequential + tokenizer tagger ENDWORD 1 + tokenizer tagger SPLIT 1 + tokenizer tagger 0 + tagger tokenizer 1