Skip to content
Snippets Groups Projects
Commit ade63d52 authored by Franck Dary's avatar Franck Dary
Browse files

Added big machines

parent 37c07d3b
No related branches found
No related tags found
No related merge requests found
Name : Tokenizer, Tagger, Morpho, Lemmatizer, Parser and Segmenter Machine
Classifier : tokeparser
{
Transitions : {tokenizer,data/tokenizer.ts tagger,data/tagger.ts morpho,data/morpho_whole.ts lemmatizer_rules,data/lemmatizer_rules.ts lemmatizer_case,data/lemmatizer_case.ts parser,data/parser_eager_rel_strict.ts segmenter,data/segmenter.ts}
LossMultiplier : {}
Network type : Modular
Contextual : Window{-10 0} Columns{FORM} LSTM{1 1 0 1} In{64} Out{256} w2v{data/pretrained.w2v} Targets{b.-3 b.-2 b.-1 b.0 s.0 s.1 s.2 b.0.0 s.0.0 s.0.-1 s.1.0 s.1.-1 s.2.0 s.2.-1}
Context : Targets{b.-3 b.-2 b.-1 b.0 s.0 s.1 s.2 b.0.0 s.0.0 s.0.-1 s.1.0 s.1.-1 s.2.0 s.2.-1} Columns{EOS ID UPOS FEATS DEPREL} LSTM{1 1 0 1} In{64} Out{128} w2v{}
Focused : Column{prefix3:FORM} NbElem{3} Buffer{0} Stack{} LSTM{1 1 0 1} In{64} Out{64}
Focused : Column{suffix3:FORM} NbElem{3} Buffer{0} Stack{} LSTM{1 1 0 1} In{64} Out{64}
RawInput : Left{5} Right{15} LSTM{1 1 0.0 1} In{32} Out{32}
History : NbElem{10} LSTM{1 1 0 1} In{32} Out{32}
SplitTrans : LSTM{1 1 0.0 1} In{64} Out{64}
InputDropout : 0.3
MLP : {2500 0.3 2500 0.3}
End
Optimizer : Adagrad {0.01 0.000001 0 0.0000000001}
}
Splitwords : data/splitwords.ts
Predictions : ID FORM UPOS FEATS LEMMA HEAD DEPREL EOS
Strategy
{
Block : End{cannotMove}
tokenizer tokenizer ENDWORD 1
tokenizer tokenizer SPLIT 1
tokenizer tokenizer * 0
Block : End{cannotMove}
tagger tagger * 1
Block : End{cannotMove}
morpho morpho * 1
Block : End{cannotMove}
lemmatizer_rules lemmatizer_case * 0
lemmatizer_case lemmatizer_rules * 1
Block : End{cannotMove}
parser segmenter eager_SHIFT 0
parser segmenter eager_RIGHT_rel 0
parser parser * 0
segmenter parser * 1
}
Name : Tokenizer, Tagger, Morpho, Lemmatizer, Parser and Segmenter Machine
Classifier : tokeparser
{
Transitions : {tokenizer,data/tokenizer.ts tagger,data/tagger.ts morpho,data/morpho_whole.ts lemmatizer_rules,data/lemmatizer_rules.ts lemmatizer_case,data/lemmatizer_case.ts parser,data/parser_eager_rel_strict.ts segmenter,data/segmenter.ts}
LossMultiplier : {}
Network type : Modular
Contextual : Window{-10 0} Columns{FORM} LSTM{1 1 0 1} In{64} Out{256} w2v{data/pretrained.w2v} Targets{b.-3 b.-2 b.-1 b.0 s.0 s.1 s.2 b.0.0 s.0.0 s.0.-1 s.1.0 s.1.-1 s.2.0 s.2.-1}
Context : Targets{b.-3 b.-2 b.-1 b.0 s.0 s.1 s.2 b.0.0 s.0.0 s.0.-1 s.1.0 s.1.-1 s.2.0 s.2.-1} Columns{EOS ID UPOS FEATS DEPREL} LSTM{1 1 0 1} In{64} Out{128} w2v{}
Focused : Column{prefix3:FORM} NbElem{3} Buffer{0} Stack{} LSTM{1 1 0 1} In{64} Out{64}
Focused : Column{suffix3:FORM} NbElem{3} Buffer{0} Stack{} LSTM{1 1 0 1} In{64} Out{64}
RawInput : Left{5} Right{15} LSTM{1 1 0.0 1} In{32} Out{32}
History : NbElem{10} LSTM{1 1 0 1} In{32} Out{32}
SplitTrans : LSTM{1 1 0.0 1} In{64} Out{64}
InputDropout : 0.3
MLP : {2500 0.3 2500 0.3}
End
Optimizer : Adagrad {0.01 0.000001 0 0.0000000001}
}
Splitwords : data/splitwords.ts
Predictions : ID FORM UPOS FEATS LEMMA HEAD DEPREL EOS
Strategy
{
Block : End{cannotMove}
tokenizer tagger ENDWORD 0
tokenizer tagger SPLIT 0
tokenizer tokenizer * 0
tagger morpho * 0
morpho lemmatizer_rules * 0
lemmatizer_rules lemmatizer_case * 0
lemmatizer_case parser * 0
parser segmenter eager_SHIFT 0
parser segmenter eager_RIGHT_rel 0
parser parser * 0
segmenter tokenizer * 1
}
Name : Tokenizer, Tagger, Morpho, Lemmatizer, Parser and Segmenter Machine
Classifier : tokeparser
{
Transitions : {tokenizer,data/tokenizer.ts tagger,data/tagger.ts morpho,data/morpho_whole.ts lemmatizer_rules,data/lemmatizer_rules.ts lemmatizer_case,data/lemmatizer_case.ts parser,data/parser_eager_rel_strict.ts segmenter,data/segmenter.ts}
LossMultiplier : {}
Network type : Modular
Contextual : Window{-10 10} Columns{FORM} LSTM{1 1 0 1} In{64} Out{256} w2v{data/pretrained.w2v} Targets{b.-3 b.-2 b.-1 b.0 b.1 b.2 s.0 s.1 s.2 b.0.0 s.0.0 s.0.-1 s.1.0 s.1.-1 s.2.0 s.2.-1}
Context : Targets{b.-3 b.-2 b.-1 b.0 b.1 b.2 s.0 s.1 s.2 b.0.0 s.0.0 s.0.-1 s.1.0 s.1.-1 s.2.0 s.2.-1} Columns{EOS ID UPOS FEATS DEPREL} LSTM{1 1 0 1} In{64} Out{128} w2v{}
Focused : Column{prefix3:FORM} NbElem{3} Buffer{0} Stack{} LSTM{1 1 0 1} In{64} Out{64}
Focused : Column{suffix3:FORM} NbElem{3} Buffer{0} Stack{} LSTM{1 1 0 1} In{64} Out{64}
RawInput : Left{5} Right{15} LSTM{1 1 0.0 1} In{32} Out{32}
History : NbElem{10} LSTM{1 1 0 1} In{32} Out{32}
SplitTrans : LSTM{1 1 0.0 1} In{64} Out{64}
InputDropout : 0.3
MLP : {2500 0.3 2500 0.3}
End
Optimizer : Adagrad {0.01 0.000001 0 0.0000000001}
}
Splitwords : data/splitwords.ts
Predictions : ID FORM UPOS FEATS LEMMA HEAD DEPREL EOS
Strategy
{
Block : End{cannotMove}
tokenizer tokenizer ENDWORD 1
tokenizer tokenizer SPLIT 1
tokenizer tokenizer * 0
Block : End{cannotMove}
tagger tagger * 1
Block : End{cannotMove}
morpho morpho * 1
Block : End{cannotMove}
lemmatizer_rules lemmatizer_case * 0
lemmatizer_case lemmatizer_rules * 1
Block : End{cannotMove}
parser segmenter eager_SHIFT 0
parser segmenter eager_RIGHT_rel 0
parser parser * 0
segmenter parser * 1
}
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment