From df79cf1d2311fdd2da7900c84b4e018c5919c1ed Mon Sep 17 00:00:00 2001
From: Franck Dary <franck.dary@lis-lab.fr>
Date: Wed, 13 Apr 2022 16:51:55 +0200
Subject: [PATCH] updated templates machines

---
 UD_any/templates/tokeparser_incr/machine.rm | 20 ++++++++++---------
 UD_any/templates/tokeparser_seq/machine.rm  | 22 +++++++++++----------
 2 files changed, 23 insertions(+), 19 deletions(-)

diff --git a/UD_any/templates/tokeparser_incr/machine.rm b/UD_any/templates/tokeparser_incr/machine.rm
index e9e0a75..ad72b25 100644
--- a/UD_any/templates/tokeparser_incr/machine.rm
+++ b/UD_any/templates/tokeparser_incr/machine.rm
@@ -1,18 +1,21 @@
 Name : Tokenizer, Tagger, Morpho, Lemmatizer, Parser and Segmenter Machine
 Classifier : tokeparser
 {
-	Transitions : {tokenizer,data/tokenizer.ts tagger,data/tagger.ts morpho,data/morpho_whole.ts lemmatizer_rules,data/lemmatizer_rules.ts lemmatizer_case,data/lemmatizer_case.ts parser,data/parser_eager_rel_strict.ts segmenter,data/segmenter.ts}
+	Transitions : {tokenizer,data/tokenizer.ts tagger,data/tagger.ts morpho,data/morpho_whole.ts lemmatizer_rules,data/lemmatizer_rules.ts parser,data/parser_eager_rel_strict.ts segmenter,data/segmenter.ts}
 	LossMultiplier : {}
 	Network type : Modular
-  Contextual : Window{-10 0} Columns{FORM} LSTM{1 1 0 1} In{64} Out{128} w2v{FORM,data/FORM.w2v} Targets{b.-3 b.-2 b.-1 b.0 s.0 s.1 s.2 b.0.0 s.0.0 s.0.-1 s.1.0 s.1.-1 s.2.0 s.2.-1}
-  Context : Targets{b.-3 b.-2 b.-1 b.0 s.0 s.1 s.2 b.0.0 s.0.0 s.0.-1 s.1.0 s.1.-1 s.2.0 s.2.-1} Columns{EOS ID UPOS FEATS DEPREL} LSTM{1 1 0 1} In{64} Out{64} w2v{}
+  Context : Targets{b.-3 b.-2 b.-1 b.0 s.0 s.1 s.2 b.0.0 s.0.0 s.0.-1 s.1.0 s.1.-1 s.2.0 s.2.-1} Columns{FORM} Concat{1 1 0.0 1} In{300} Out{64} w2v{FORM,data/W2V/fasttextcleanfiltered.w2v}
+  Context : Targets{b.-3 b.-2 b.-1 b.0 s.0 s.1 s.2 b.0.0 s.0.0 s.0.-1 s.1.0 s.1.-1 s.2.0 s.2.-1} Columns{EOS ID UPOS FEATS DEPREL} LSTM{1 1 0 1} In{128} Out{64} w2v{}
   Focused : Column{prefix3:FORM} NbElem{3} Buffer{0} Stack{} LSTM{1 1 0 1} In{64} Out{64} w2v{}
   Focused : Column{suffix3:FORM} NbElem{3} Buffer{0} Stack{} LSTM{1 1 0 1} In{64} Out{64} w2v{}
 	RawInput : Left{5} Right{10} LSTM{1 1 0.0 1} In{32} Out{32}
-  History : NbElem{10} LSTM{1 1 0 1} In{32} Out{32}
-	SplitTrans : LSTM{1 1 0.0 1} In{64} Out{64}
-	InputDropout : 0.3
-	MLP : {1600 0.3 1600 0.3}
+  History : NbElem{10} LSTM{1 1 0 1} In{128} Out{64}
+  HistoryMine : NbElem{4} LSTM{1 1 0 1} In{128} Out{64}
+  StateName : Out{64}
+  Distance : FromBuffer{} FromStack{0 1 2} ToBuffer{0} ToStack{} Threshold{15} LSTM{1 1 0.0 1} In{128} Out{64}
+	SplitTrans : LSTM{1 1 0.0 1} In{128} Out{64}
+	InputDropout : 0.5
+	MLP : {3200 0.4 1600 0.4}
 	End
   Optimizer : Adagrad {0.01 0.000001 0 0.0000000001}
   Type : classification
@@ -28,8 +31,7 @@ Strategy
 	tokenizer tokenizer * 0
 	tagger morpho * 0
 	morpho lemmatizer_rules * 0
-  lemmatizer_rules lemmatizer_case * 0
-  lemmatizer_case parser * 0
+  lemmatizer_rules parser * 0
 	parser segmenter eager_SHIFT 0
 	parser segmenter eager_RIGHT_rel 0
 	parser parser * 0
diff --git a/UD_any/templates/tokeparser_seq/machine.rm b/UD_any/templates/tokeparser_seq/machine.rm
index 93d9bec..f63c06a 100644
--- a/UD_any/templates/tokeparser_seq/machine.rm
+++ b/UD_any/templates/tokeparser_seq/machine.rm
@@ -1,18 +1,21 @@
 Name : Tokenizer, Tagger, Morpho, Lemmatizer, Parser and Segmenter Machine
 Classifier : tokeparser
 {
-  Transitions : {tokenizer,data/tokenizer.ts tagger,data/tagger.ts morpho,data/morpho_whole.ts lemmatizer_rules,data/lemmatizer_rules.ts lemmatizer_case,data/lemmatizer_case.ts parser,data/parser_eager_rel_strict.ts segmenter,data/segmenter.ts}
-  LossMultiplier : {}
+	Transitions : {tokenizer,data/tokenizer.ts tagger,data/tagger.ts morpho,data/morpho_whole.ts lemmatizer_rules,data/lemmatizer_rules.ts parser,data/parser_eager_rel_strict.ts segmenter,data/segmenter.ts}
+	LossMultiplier : {}
 	Network type : Modular
-  Contextual : Window{-10 10} Columns{FORM} LSTM{1 1 0 1} In{64} Out{128} w2v{FORM,data/FORM.w2v} Targets{b.-3 b.-2 b.-1 b.0 b.1 b.2 s.0 s.1 s.2 b.0.0 s.0.0 s.0.-1 s.1.0 s.1.-1 s.2.0 s.2.-1}
-  Context : Targets{b.-3 b.-2 b.-1 b.0 b.1 b.2 s.0 s.1 s.2 b.0.0 s.0.0 s.0.-1 s.1.0 s.1.-1 s.2.0 s.2.-1} Columns{EOS ID UPOS FEATS DEPREL} LSTM{1 1 0 1} In{64} Out{64} w2v{}
+  Context : Targets{b.-3 b.-2 b.-1 b.0 b.1 b.2 s.0 s.1 s.2 b.0.0 s.0.0 s.0.-1 s.1.0 s.1.-1 s.2.0 s.2.-1} Columns{FORM} Concat{1 1 0.0 1} In{300} Out{64} w2v{FORM,data/W2V/fasttextcleanfiltered.w2v}
+  Context : Targets{b.-3 b.-2 b.-1 b.0 b.1 b.2 s.0 s.1 s.2 b.0.0 s.0.0 s.0.-1 s.1.0 s.1.-1 s.2.0 s.2.-1} Columns{EOS ID UPOS FEATS DEPREL} LSTM{1 1 0 1} In{128} Out{64} w2v{}
   Focused : Column{prefix3:FORM} NbElem{3} Buffer{0} Stack{} LSTM{1 1 0 1} In{64} Out{64} w2v{}
   Focused : Column{suffix3:FORM} NbElem{3} Buffer{0} Stack{} LSTM{1 1 0 1} In{64} Out{64} w2v{}
 	RawInput : Left{5} Right{10} LSTM{1 1 0.0 1} In{32} Out{32}
-  History : NbElem{10} LSTM{1 1 0 1} In{32} Out{32}
-	SplitTrans : LSTM{1 1 0.0 1} In{64} Out{64}
-	InputDropout : 0.3
-	MLP : {1600 0.3 1600 0.3}
+  History : NbElem{10} LSTM{1 1 0 1} In{128} Out{64}
+  HistoryMine : NbElem{4} LSTM{1 1 0 1} In{128} Out{64}
+  StateName : Out{64}
+  Distance : FromBuffer{} FromStack{0 1 2} ToBuffer{0} ToStack{} Threshold{15} LSTM{1 1 0.0 1} In{128} Out{64}
+	SplitTrans : LSTM{1 1 0.0 1} In{128} Out{64}
+	InputDropout : 0.5
+	MLP : {3200 0.4 1600 0.4}
 	End
   Optimizer : Adagrad {0.01 0.000001 0 0.0000000001}
   Type : classification
@@ -31,8 +34,7 @@ Strategy
 	Block : End{cannotMove}
 	morpho morpho * 1
 	Block : End{cannotMove}
-	lemmatizer_rules lemmatizer_case * 0
-  lemmatizer_case lemmatizer_rules * 1
+  lemmatizer_rules lemmatizer_rules * 1
 	Block : End{cannotMove}
 	parser segmenter eager_SHIFT 0
 	parser segmenter eager_RIGHT_rel 0
-- 
GitLab