flow = "tokenizer"
[[process]]
source = "lib://flowruntime/stdio/readline"
[[process]]
source = "lib://flowstdlib/data/split"
input.separator = { always = " " }
[[connection]]
from = "readline"
to = "split/string"
[[connection]]
from = "split/partial"
to = "split/string"
[[process]]
alias = "work-counter"
source = "lib://flowstdlib/math/add"
input.i2 = { once = 1 }
[[process]]
alias = "token-counter"
source = "lib://flowstdlib/math/add"
input.i2 = { once = 0 }
[[connection]]
from = "split/delta"
to = "work-counter/i1"
[[connection]]
name = "pending-work"
from = "work-counter/sum"
to = "work-counter/i2"
[[connection]]
name = "tokens"
from = "split/token-count"
to = "token-counter/i1"
[[connection]]
from = "token-counter/sum"
to = "token-counter/i2"
[[process]]
source = "lib://flowstdlib/control/tap"
[[connection]]
name = "token-count"
from = "token-counter/sum"
to = "tap/data"
[[process]]
source = "lib://flowstdlib/math/compare"
input.left = { always = 0 }
[[connection]]
name = "pending-work"
from = "work-counter/sum"
to = "compare/right"
[[connection]]
name = "done"
from = "compare/equal"
to = "tap/control"
[[process]]
source = "lib://flowruntime/stdio/stdout"
[[connection]]
name = "last-token-count"
from = "tap"
to = "stdout"