1 USING: hashtables assocs sequences locals math accessors multiline delegate strings
2 delegate.protocols kernel peg peg.ebnf lexer namespaces combinators parser words ;
6 CONSULT: assoc-protocol lex-hash hash>> ;
7 : <lex-hash> ( a -- lex-hash ) lex-hash boa ;
9 : pos-or-0 ( neg? -- pos/0 ) dup 0 < [ drop 0 ] when ;
11 :: prepare-pos ( v i -- c l )
12 [let | n [ i v head-slice ] |
13 v CHAR: \n n last-index -1 or 1+ -
14 n [ CHAR: \n = ] count 1+ ] ;
16 : store-pos ( v a -- ) input swap at prepare-pos
17 lexer get [ (>>line) ] keep (>>column) ;
19 M: lex-hash set-at swap {
21 [ swap hash>> set-at ] } case ;
23 :: at-pos ( t l c -- p ) t l head-slice [ length ] map sum l 1- + c + ;
25 M: lex-hash at* swap {
26 { input [ drop lexer get text>> "\n" join t ] }
27 { pos [ drop lexer get [ text>> ] [ line>> 1- ] [ column>> 1+ ] tri at-pos t ] }
28 [ swap hash>> at* ] } case ;
30 : with-global-lexer ( quot -- result )
32 V{ } clone error-stack set H{ } clone \ heads set
33 H{ } clone \ packrat set ] f make-assoc <lex-hash>
36 : parse* ( parser -- ast ) compile
37 [ execute [ error-stack get first throw ] unless* ] with-global-lexer
40 : create-bnf ( name parser -- ) reset-tokenizer [ lexer get skip-blank parse* parsed ] curry
41 define word make-parsing ;
43 : ON-BNF: CREATE-WORD reset-tokenizer ";ON-BNF" parse-multiline-string parse-ebnf
44 main swap at create-bnf ; parsing
46 ! Tokenizer like standard factor lexer
48 space = " " | "\n" | "\t"
49 spaces = space* => [[ drop ignore ]]
50 chunk = (!(space) .)+ => [[ >string ]]