syntaxdot 0.2.0

Neural sequence labeler
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
[input]
tokenizer = { bert = { vocab = "bert-base-german-cased-vocab.txt" } }

[labeler]
labels = "sticker.labels"
encoders = [
  { name = "dep", encoder = { dependency = { encoder = { relativepos = "xpos" }, root_relation = "root" } } },
  { name = "lemma", encoder = { lemma = "form" } },
  { name = "pos", encoder = { sequence = "xpos" } },
]

[model]
parameters = "epoch-99"
position_embeddings = "model"
pretrain_config = "bert_config.json"
pretrain_type = "bert"