1 USING: hashtables assocs sequences locals math accessors multiline delegate strings
2 delegate.protocols kernel peg peg.ebnf peg.private lexer namespaces combinators parser
7 CONSULT: assoc-protocol lex-hash hash>> ;
8 : <lex-hash> ( a -- lex-hash ) lex-hash boa ;
10 : pos-or-0 ( neg? -- pos/0 ) dup 0 < [ drop 0 ] when ;
12 :: prepare-pos ( v i -- c l )
13 [let | n [ i v head-slice ] |
14 v CHAR: \n n last-index -1 or 1 + -
15 n [ CHAR: \n = ] count 1 +
18 : store-pos ( v a -- )
19 input swap at prepare-pos
20 lexer get [ (>>line) ] keep (>>column) ;
25 [ swap hash>> set-at ]
28 :: at-pos ( t l c -- p ) t l head-slice [ length ] map sum l 1 - + c + ;
32 { input [ drop lexer get text>> "\n" join t ] }
33 { pos [ drop lexer get [ text>> ] [ line>> 1 - ] [ column>> 1 + ] tri at-pos t ] }
37 : with-global-lexer ( quot -- result )
40 V{ } clone error-stack set H{ } clone \ heads set
41 H{ } clone \ packrat set
42 ] f make-assoc <lex-hash>
45 : parse* ( parser -- ast )
47 [ execute [ error-stack get first throw ] unless* ] with-global-lexer
50 : create-bnf ( name parser -- )
51 reset-tokenizer [ lexer get skip-blank parse* dup ignore? [ drop ] [ parsed ] if ] curry
52 define-syntax word make-inline ;
55 CREATE-WORD reset-tokenizer ";ON-BNF" parse-multiline-string parse-ebnf
56 main swap at create-bnf ;
58 ! Tokenizer like standard factor lexer
60 space = " " | "\n" | "\t"
61 spaces = space* => [[ drop ignore ]]
62 chunk = (!(space) .)+ => [[ >string ]]