1 USING: hashtables assocs sequences locals math accessors multiline delegate strings
2 delegate.protocols kernel peg peg.ebnf lexer namespaces combinators parser words ;
6 CONSULT: assoc-protocol lex-hash hash>> ;
7 : <lex-hash> ( a -- lex-hash ) lex-hash boa ;
9 : pos-or-0 ( neg? -- pos/0 ) dup 0 < [ drop 0 ] when ;
11 :: store-pos ( v a -- )
12 [let | n [ input a at v head-slice ] |
13 v "\n" n last-index 0 or - lexer get (>>column)
14 n [ "\n" = ] filter length 1 + lexer get (>>line) ] ;
16 M: lex-hash set-at swap {
18 [ swap hash>> set-at ] } case ;
20 :: at-pos ( t l c -- p ) t l 1 - head-slice [ length ] map sum pos-or-0 c + ;
22 M: lex-hash at* swap {
23 { input [ drop lexer get text>> "\n" join t ] }
24 { pos [ drop lexer get [ text>> ] [ line>> ] [ column>> ] tri at-pos t ] }
25 [ swap hash>> at* ] } case ;
27 : with-global-lexer ( quot -- result )
29 V{ } clone error-stack set H{ } clone \ heads set
30 H{ } clone \ packrat set ] f make-assoc <lex-hash>
34 ! ON-BNF: word expr= [1-9] ;ON-BNF
35 ! << name parser create-bnf >>
37 : parse* ( parser -- ast ) compile
38 [ execute [ error-stack get first throw ] unless* ] with-global-lexer
41 : create-bnf ( name parser -- ) [ lexer get skip-blank parse* dup V{ } = [ parsed ] unless ] curry
42 define POSTPONE: parsing ;
44 : ON-BNF: CREATE-WORD reset-tokenizer ";ON-BNF" parse-multiline-string parse-ebnf
45 main swap at reset-tokenizer create-bnf ; parsing
47 ! Tokenizer like standard factor lexer
49 space = " " | "\n" | "\t"
50 spaces = space* => [[ drop ignore ]]
51 chunk = (!(space) .)+ => [[ >string ]]