1 ! Copyright (C) 2005, 2007 Slava Pestov.
2 ! See http://factorcode.org/license.txt for BSD license.
3 USING: arrays definitions generic assocs kernel math
4 namespaces prettyprint sequences strings vectors words
5 quotations inspector io.styles io combinators sorting
6 splitting math.parser effects continuations debugger
7 io.files io.streams.string io.streams.lines vocabs
8 source-files classes hashtables ;
13 TUPLE: lexer text line column ;
15 : <lexer> ( text -- lexer ) 1 0 lexer construct-boa ;
17 : line-text ( lexer -- str )
18 dup lexer-line 1- swap lexer-text ?nth ;
21 file get lexer get lexer-line 2dup and
22 [ >r source-file-path r> 2array ] [ 2drop f ] if ;
24 SYMBOL: old-definitions
25 SYMBOL: new-definitions
27 TUPLE: redefine-error def ;
29 M: redefine-error error.
30 "Re-definition of " write
31 redefine-error-def . ;
33 : redefine-error ( definition -- )
34 \ redefine-error construct-boa
35 { { "Continue" t } } throw-restarts drop ;
37 : redefinition? ( definition -- ? )
38 dup class? [ drop f ] [ new-definitions get key? ] if ;
40 : (save-location) ( definition loc -- )
41 over redefinition? [ over redefine-error ] when
43 dup new-definitions get dup [ set-at ] [ 3drop ] if ;
45 : save-location ( definition -- )
46 location (save-location) ;
50 t parser-notes set-global
52 : parser-notes? ( -- ? )
53 parser-notes get "quiet" get not and ;
57 source-file-path <pathname> pprint
66 lexer-line number>string print
70 "Note: " write dup print
73 : next-line ( lexer -- )
74 0 over set-lexer-column
75 dup lexer-line 1+ swap set-lexer-line ;
77 : skip ( i seq quot -- n )
79 [ r> drop ] [ r> length ] if* ; inline
81 : change-column ( lexer quot -- )
83 [ dup lexer-column swap line-text rot call ] keep
84 set-lexer-column ; inline
86 GENERIC: skip-blank ( lexer -- )
88 M: lexer skip-blank ( lexer -- )
89 [ [ blank? not ] skip ] change-column ;
91 GENERIC: skip-word ( lexer -- )
93 M: lexer skip-word ( lexer -- )
96 [ drop 1+ ] [ [ blank? ] skip ] if
99 : still-parsing? ( lexer -- ? )
100 dup lexer-line swap lexer-text length <= ;
102 : still-parsing-line? ( lexer -- ? )
103 dup lexer-column swap line-text length < ;
105 : (parse-token) ( lexer -- str )
106 [ lexer-column ] keep
108 [ lexer-column ] keep
111 : parse-token ( lexer -- str/f )
114 dup still-parsing-line?
115 [ (parse-token) ] [ dup next-line parse-token ] if
118 : scan ( -- str/f ) lexer get parse-token ;
122 : bad-escape ( -- * ) \ bad-escape construct-empty throw ;
124 M: bad-escape summary drop "Bad escape code" ;
126 : escape ( escape -- ch )
133 { CHAR: \s CHAR: \s }
135 { CHAR: \\ CHAR: \\ }
136 { CHAR: \" CHAR: \" }
137 } at [ bad-escape ] unless* ;
139 : next-escape ( m str -- n ch )
141 [ >r 1+ dup 4 + tuck r> subseq hex> ]
142 [ over 1+ -rot nth escape ] if ;
144 : next-char ( m str -- n ch )
146 [ >r 1+ r> next-escape ] [ over 1+ -rot nth ] if ;
148 : (parse-string) ( m str -- n )
150 [ drop 1+ ] [ [ next-char , ] keep (parse-string) ] if ;
152 : parse-string ( -- str )
154 [ (parse-string) ] "" make swap
157 TUPLE: parse-error file line col text ;
159 : <parse-error> ( msg -- error )
162 lexer get lexer-column
164 parse-error construct-boa
165 [ set-delegate ] keep ;
167 : parse-dump ( error -- )
168 dup parse-error-file file.
169 dup parse-error-line number>string print
170 dup parse-error-text dup string? [ print ] [ drop ] if
171 parse-error-col 0 or CHAR: \s <string> write
174 M: parse-error error.
175 dup parse-dump delegate error. ;
180 : word/vocab% ( word -- )
181 "(" % dup word-vocabulary % " " % word-name % ")" % ;
183 : shadow-warning ( new old -- )
187 [ word/vocab% " shadowed by " % word/vocab% ] "" make
191 : shadow-warnings ( vocab vocabs -- )
193 swapd assoc-stack dup
194 [ shadow-warning ] [ 2drop ] if
197 : (use+) ( vocab -- )
198 vocab-words use get 2dup shadow-warnings push ;
203 : add-use ( seq -- ) [ use+ ] each ;
206 [ vocab-words ] map [ ] subset >vector use set ;
208 : check-vocab-string ( name -- name )
210 [ "Vocabulary name must be a string" throw ] unless ;
213 check-vocab-string dup in set create-vocab (use+) ;
215 : create-in ( string -- word )
216 in get create dup set-word dup save-location ;
218 TUPLE: unexpected want got ;
220 : unexpected ( want got -- * )
221 \ unexpected construct-boa throw ;
223 PREDICATE: unexpected unexpected-eof
226 : unexpected-eof ( word -- * ) f unexpected ;
228 : (parse-tokens) ( accum end -- accum )
232 [ pick push (parse-tokens) ] [ unexpected-eof ] if*
235 : parse-tokens ( end -- seq )
236 100 <vector> swap (parse-tokens) >array ;
238 : CREATE ( -- word ) scan create-in ;
240 : CREATE-CLASS ( -- word )
241 scan create-in dup predicate-word save-location ;
243 : word-restarts ( possibilities -- restarts )
245 [ "Use the word " swap summary append ] keep
248 TUPLE: no-word name ;
251 drop "Word not found in current vocabulary search path" ;
253 : no-word ( name -- newword )
254 dup \ no-word construct-boa
255 swap words-named word-restarts throw-restarts
256 dup word-vocabulary (use+) ;
258 : forward-reference? ( word -- ? )
259 dup old-definitions get key?
260 swap new-definitions get key? not and ;
262 TUPLE: forward-error word ;
264 M: forward-error error.
265 "Forward reference to " write forward-error-word . ;
267 : forward-error ( word -- )
268 \ forward-error construct-boa throw ;
270 : check-forward ( str word -- word )
271 dup forward-reference? [
274 [ at ] curry* map [ ] subset
275 [ forward-reference? not ] find nip
276 [ ] [ forward-error ] ?if
281 : search ( str -- word )
282 dup use get assoc-stack [ check-forward ] [ no-word ] if* ;
284 : scan-word ( -- word/number/f )
285 scan dup [ dup string>number [ ] [ search ] ?if ] when ;
287 : parse-step ( accum end -- accum ? )
289 { [ 2dup eq? ] [ 2drop f ] }
290 { [ dup not ] [ drop unexpected-eof t ] }
291 { [ dup delimiter? ] [ unexpected t ] }
292 { [ dup parsing? ] [ nip execute t ] }
293 { [ t ] [ pick push drop t ] }
296 : (parse-until) ( accum end -- accum )
297 dup >r parse-step [ r> (parse-until) ] [ r> drop ] if ;
299 : parse-until ( end -- vec )
300 100 <vector> swap (parse-until) ;
302 : parsed ( accum obj -- accum ) over push ;
304 : with-parser ( lexer quot -- newquot )
306 [ call >quotation ] [ <parse-error> rethrow ] recover ;
308 : (parse-lines) ( lexer -- quot )
309 [ f parse-until ] with-parser ;
311 SYMBOL: lexer-factory
313 [ <lexer> ] lexer-factory set-global
315 : parse-lines ( lines -- quot )
316 lexer-factory get call (parse-lines) ;
318 ! Parsing word utilities
319 : parse-effect ( -- effect )
320 ")" parse-tokens { "--" } split1 dup [
323 "Stack effect declaration must contain --" throw
328 : bad-number ( -- * ) \ bad-number construct-boa throw ;
330 : parse-base ( parsed base -- parsed )
331 scan swap base> [ bad-number ] unless* parsed ;
333 : parse-literal ( accum end quot -- accum )
334 >r parse-until r> call parsed ; inline
336 : parse-definition ( -- quot )
337 \ ; parse-until >quotation ;
339 GENERIC: expected>string ( obj -- str )
341 M: f expected>string drop "end of input" ;
342 M: word expected>string word-name ;
343 M: string expected>string ;
347 dup unexpected-want expected>string write
349 unexpected-got expected>string print ;
351 M: bad-number summary
352 drop "Bad number literal" ;
354 SYMBOL: bootstrap-syntax
358 { "syntax" "scratchpad" } set-use
359 bootstrap-syntax get [ use get push ] when* ;
361 : parse-fresh ( lines -- quot )
362 [ file-vocabs parse-lines ] with-scope ;
366 : do-parse-hook ( -- ) parse-hook get [ call ] when* ;
368 : parsing-file ( file -- )
372 "Loading " write <pathname> . flush
375 : no-parse-hook ( quot -- )
376 >r f parse-hook r> with-variable do-parse-hook ; inline
378 : start-parsing ( stream name -- )
379 H{ } clone new-definitions set
383 source-file-definitions clone old-definitions set
385 contents \ contents set ;
387 : smudged-usage-warning ( usages removed -- )
389 "Warning: the following definitions were removed from sources," print
390 "but are still referenced from other definitions:" print
394 "The following definitions need to be updated:" print
399 : outside-usages ( seq -- usages )
401 over usage [ pathname? not ] subset seq-diff
402 ] curry { } map>assoc ;
404 : filter-moved ( assoc -- newassoc )
406 drop where dup [ first ] when
407 file get source-file-path =
410 : smudged-usage ( -- usages referenced removed )
411 new-definitions get old-definitions get diff filter-moved
414 [ empty? swap pathname? or not ] assoc-subset
415 dup values concat prune swap keys
418 : forget-smudged ( -- )
419 smudged-usage [ forget ] each
420 over empty? [ 2dup smudged-usage-warning ] unless 2drop ;
422 : record-definitions ( file -- )
423 new-definitions get swap set-source-file-definitions ;
425 : finish-parsing ( quot -- )
428 [ record-modified ] keep
429 [ \ contents get record-checksum ] keep
436 : undo-parsing ( -- )
438 dup source-file-definitions new-definitions get union
439 swap set-source-file-definitions
442 : parse-stream ( stream name -- quot )
446 \ contents get string-lines parse-fresh
448 ] [ ] [ undo-parsing ] cleanup
451 : parse-file-restarts ( file -- restarts )
452 "Load " swap " again" 3append t 2array 1array ;
454 : parse-file ( file -- quot )
456 [ parsing-file ] keep
457 [ ?resource-path <file-reader> ] keep
460 over parse-file-restarts rethrow-restarts
464 : run-file ( file -- )
465 [ [ parse-file call ] keep ] assert-depth drop ;
467 : reload ( defspec -- )
468 where first [ run-file ] when* ;
470 : ?run-file ( path -- )
471 dup ?resource-path exists? [ run-file ] [ drop ] if ;
473 : bootstrap-file ( path -- )
475 parse-file [ call ] curry %
480 : ?bootstrap-file ( path -- )
481 dup ?resource-path exists? [ bootstrap-file ] [ drop ] if ;
483 : parse ( str -- quot ) string-lines parse-lines ;
485 : eval ( str -- ) parse call ;
487 : eval>string ( str -- output )
490 [ [ eval ] keep ] try drop