+++ /dev/null
-Sam Anklesaria
+++ /dev/null
-USING: help.syntax help.markup modules.rpc-server modules.using ;
-IN: modules.rpc-server
-HELP: service
-{ $syntax "IN: my-vocab service" }
-{ $description "Allows words defined in the vocabulary to be used as remote procedure calls by " { $link POSTPONE: USING*: } } ;
\ No newline at end of file
+++ /dev/null
-! Copyright (C) 2009 Sam Anklesaria.
-! See http://factorcode.org/license.txt for BSD license.
-USING: accessors assocs combinators continuations effects
-io.encodings.binary io.servers.connection kernel namespaces
-sequences serialize sets threads vocabs vocabs.parser init io ;
-IN: modules.rpc-server
-
-<PRIVATE
-TUPLE: rpc-request args vocabspec wordname ;
-SYMBOL: serving-vocabs serving-vocabs [ V{ } clone ] initialize
-
-: getter ( -- ) deserialize dup serving-vocabs get-global index
- [ vocab-words [ stack-effect ] { } assoc-map-as ]
- [ \ no-vocab boa ] if serialize flush ;
-
-: doer ( -- ) deserialize dup vocabspec>> serving-vocabs get-global index
- [ [ args>> ] [ wordname>> ] [ vocabspec>> vocab-words ] tri at [ execute ] curry with-datastack ]
- [ vocabspec>> \ no-vocab boa ] if serialize flush ;
-
-PRIVATE>
-SYNTAX: service current-vocab name>> serving-vocabs get-global adjoin ;
-
-: start-rpc-server ( -- )
- binary <threaded-server>
- "rpcs" >>name 9012 >>insecure
- [ deserialize {
- { "getter" [ getter ] }
- { "doer" [ doer ] }
- { "loader" [ deserialize vocab serialize flush ] }
- } case ] >>handler
- start-server ;
+++ /dev/null
-Serve factor words as rpcs
\ No newline at end of file
+++ /dev/null
-Sam Anklesaria
+++ /dev/null
-USING: help.syntax help.markup ;
-IN: modules.rpc
-ARTICLE: { "modules" "protocol" } "RPC Protocol"
-{ $list
- "Send vocab as string"
- "Send arglist"
- "Send word as string"
- "Receive result list"
-} ;
\ No newline at end of file
+++ /dev/null
-! Copyright (C) 2009 Sam Anklesaria.
-! See http://factorcode.org/license.txt for BSD license.
-USING: accessors assocs fry generalizations io.encodings.binary
-io.sockets kernel locals namespaces parser sequences serialize
-vocabs vocabs.parser words io ;
-IN: modules.rpc
-
-TUPLE: rpc-request args vocabspec wordname ;
-
-: send-with-check ( message -- reply/* )
- serialize flush deserialize dup no-vocab? [ throw ] when ;
-
-:: define-remote ( str effect addrspec vocabspec -- )
- str create-in effect [ in>> length ] [ out>> length ] bi
- '[ _ narray vocabspec str rpc-request boa addrspec 9012 <inet> binary
- [ "doer" serialize send-with-check ] with-client _ firstn ]
- effect define-declared ;
-
-:: remote-vocab ( addrspec vocabspec -- vocab )
- vocabspec "-remote" append dup vocab [ dup set-current-vocab
- vocabspec addrspec 9012 <inet> binary [ "getter" serialize send-with-check ] with-client
- [ first2 addrspec vocabspec define-remote ] each
- ] unless ;
-
-: remote-load ( addr vocabspec -- voabspec ) [ swap
- 9012 <inet> binary [ "loader" serialize serialize flush deserialize ] with-client ] keep
- [ dictionary get-global set-at ] keep ;
\ No newline at end of file
+++ /dev/null
-remote procedure call client
\ No newline at end of file
+++ /dev/null
-Sam Anklesaria
+++ /dev/null
-Improved module import syntax with network transparency
\ No newline at end of file
+++ /dev/null
-USING: help.syntax help.markup strings modules.using ;
-IN: modules.using
-ARTICLE: { "modules.using" "use" } "Using the modules.using vocab"
-"This vocabulary defines " { $link POSTPONE: USING*: } " as an alternative to " { $link POSTPONE: USING: } " which makes qualified imports easier. "
-"Secondly, it allows loading vocabularies from remote servers, as long as the remote vocabulary can be accessed at compile time. "
-"Finally, the word can treat words in remote vocabularies as remote procedure calls. Any inputs are passed to the imported words as normal, and the result will appear on the stack- the only difference is that the word isn't called locally." ;
-ABOUT: { "modules.using" "use" }
-
-HELP: USING*:
-{ $syntax "USING: rpc-server::module fetch-sever:module { module qualified-name } { module => word ... } { qualified-module } { module EXCEPT word ... } { module word => importname } ;" }
-{ $description "Adds vocabularies to the search path. Vocabularies can be loaded off a server or called as an rpc if preceded by a valid hostname. Bracketed pairs facilitate all types of qualified imports on both remote and local modules." } ;
\ No newline at end of file
+++ /dev/null
-! Copyright (C) 2009 Sam Anklesaria.
-! See http://factorcode.org/license.txt for BSD license.
-USING: kernel modules.rpc peg peg-lexer peg.ebnf sequences
-strings vocabs.parser ;
-IN: modules.using
-
-EBNF: modulize
-tokenpart = (!(':').)+ => [[ >string ]]
-s = ':' => [[ drop ignore ]]
-rpc = tokenpart s s tokenpart => [[ first2 remote-vocab ]]
-remote = tokenpart s tokenpart => [[ first2 remote-load ]]
-module = rpc | remote | tokenpart
-;EBNF
-
-ON-BNF: USING*:
-tokenizer = <foreign factor>
-sym = !(";"|"}"|"=>"|"EXCEPT").
-modspec = sym => [[ modulize ]]
-qualified-with = modspec sym => [[ first2 add-qualified ignore ]]
-qualified = modspec => [[ dup add-qualified ignore ]]
-from = modspec "=>" sym+ => [[ first3 nip add-words-from ignore ]]
-exclude = modspec "EXCEPT" sym+ => [[ first3 nip add-words-excluding ignore ]]
-rename = modspec sym "=>" sym => [[ first4 nip swapd add-renamed-word ignore ]]
-long = "{" ( from | exclude | rename | qualified-with | qualified ) "}" => [[ drop ignore ]]
-short = modspec => [[ use-vocab ignore ]]
-wordSpec = long | short
-using = wordSpec+ ";" => [[ drop ignore ]]
-;ON-BNF
\ No newline at end of file
+++ /dev/null
-Sam Anklesaria
\ No newline at end of file
+++ /dev/null
-USING: peg.ebnf help.syntax help.markup strings ;
-IN: peg-lexer
-
-HELP: ON-BNF:
-{ $syntax "ON-BNF: word ... ;ON-BNF" }
-{ $description "Creates a parsing word using a parser for lexer control, adding the resulting ast to the stack. Parser syntax is as in " { $link POSTPONE: EBNF: } } ;
-
-HELP: create-bnf
-{ $values { "name" string } { "parser" parser } }
-{ $description "Runtime equivalent of " { $link POSTPONE: ON-BNF: } " also useful with manually constructed parsers." } ;
-
-HELP: factor
-{ $values { "input" string } { "ast" "a sequence of tokens" } }
-{ $description "Tokenizer that acts like standard factor lexer, separating tokens by whitespace." } ;
\ No newline at end of file
+++ /dev/null
-USING: tools.test peg-lexer.test-parsers ;
-IN: peg-lexer.tests
-
-{ V{ "1234" "-end" } } [
- test1 1234-end
-] unit-test
-
-{ V{ 1234 53 } } [
- test2 12345
-] unit-test
-
-{ V{ "heavy" "duty" "testing" } } [
- test3 heavy duty testing
-] unit-test
\ No newline at end of file
+++ /dev/null
-USING: hashtables assocs sequences locals math accessors multiline delegate strings
-delegate.protocols kernel peg peg.ebnf peg.private lexer namespaces combinators parser
-words ;
-IN: peg-lexer
-
-TUPLE: lex-hash hash ;
-CONSULT: assoc-protocol lex-hash hash>> ;
-: <lex-hash> ( a -- lex-hash ) lex-hash boa ;
-
-: pos-or-0 ( neg? -- pos/0 ) dup 0 < [ drop 0 ] when ;
-
-:: prepare-pos ( v i -- c l )
- [let | n [ i v head-slice ] |
- v CHAR: \n n last-index -1 or 1 + -
- n [ CHAR: \n = ] count 1 +
- ] ;
-
-: store-pos ( v a -- )
- input swap at prepare-pos
- lexer get [ (>>line) ] keep (>>column) ;
-
-M: lex-hash set-at
- swap {
- { pos [ store-pos ] }
- [ swap hash>> set-at ]
- } case ;
-
-:: at-pos ( t l c -- p ) t l head-slice [ length ] map sum l 1 - + c + ;
-
-M: lex-hash at*
- swap {
- { input [ drop lexer get text>> "\n" join t ] }
- { pos [ drop lexer get [ text>> ] [ line>> 1 - ] [ column>> 1 + ] tri at-pos t ] }
- [ swap hash>> at* ]
- } case ;
-
-: with-global-lexer ( quot -- result )
- [
- f lrstack set
- V{ } clone error-stack set H{ } clone \ heads set
- H{ } clone \ packrat set
- ] f make-assoc <lex-hash>
- swap bind ; inline
-
-: parse* ( parser -- ast )
- compile
- [ execute [ error-stack get first throw ] unless* ] with-global-lexer
- ast>> ; inline
-
-: create-bnf ( name parser -- )
- reset-tokenizer [ lexer get skip-blank parse* dup ignore? [ drop ] [ parsed ] if ] curry
- define-syntax word make-inline ;
-
-SYNTAX: ON-BNF:
- CREATE-WORD reset-tokenizer ";ON-BNF" parse-multiline-string parse-ebnf
- main swap at create-bnf ;
-
-! Tokenizer like standard factor lexer
-EBNF: factor
-space = " " | "\n" | "\t"
-spaces = space* => [[ drop ignore ]]
-chunk = (!(space) .)+ => [[ >string ]]
-expr = spaces chunk
-;EBNF
+++ /dev/null
-Use peg to write parsing words
+++ /dev/null
-extensions
-reflection
+++ /dev/null
-USING: peg-lexer math.parser strings ;
-IN: peg-lexer.test-parsers
-
-ON-BNF: test1
- num = [1-4]* => [[ >string ]]
- expr = num ( "-end" | "-done" )
-;ON-BNF
-
-ON-BNF: test2
- num = [1-4]* => [[ >string string>number ]]
- expr= num [5-9]
-;ON-BNF
-
-ON-BNF: test3
- tokenizer = <foreign factor>
- expr= "heavy" "duty" "testing"
-;ON-BNF
\ No newline at end of file
--- /dev/null
+Sam Anklesaria
--- /dev/null
+USING: help.syntax help.markup modules.rpc-server modules.using ;
+IN: modules.rpc-server
+HELP: service
+{ $syntax "IN: my-vocab service" }
+{ $description "Allows words defined in the vocabulary to be used as remote procedure calls by " { $link POSTPONE: USING*: } } ;
\ No newline at end of file
--- /dev/null
+! Copyright (C) 2009 Sam Anklesaria.
+! See http://factorcode.org/license.txt for BSD license.
+USING: accessors assocs combinators continuations effects
+io.encodings.binary io.servers.connection kernel namespaces
+sequences serialize sets threads vocabs vocabs.parser init io ;
+IN: modules.rpc-server
+
+<PRIVATE
+TUPLE: rpc-request args vocabspec wordname ;
+SYMBOL: serving-vocabs serving-vocabs [ V{ } clone ] initialize
+
+: getter ( -- ) deserialize dup serving-vocabs get-global index
+ [ vocab-words [ stack-effect ] { } assoc-map-as ]
+ [ \ no-vocab boa ] if serialize flush ;
+
+: doer ( -- ) deserialize dup vocabspec>> serving-vocabs get-global index
+ [ [ args>> ] [ wordname>> ] [ vocabspec>> vocab-words ] tri at [ execute ] curry with-datastack ]
+ [ vocabspec>> \ no-vocab boa ] if serialize flush ;
+
+PRIVATE>
+SYNTAX: service current-vocab name>> serving-vocabs get-global adjoin ;
+
+: start-rpc-server ( -- )
+ binary <threaded-server>
+ "rpcs" >>name 9012 >>insecure
+ [ deserialize {
+ { "getter" [ getter ] }
+ { "doer" [ doer ] }
+ { "loader" [ deserialize vocab serialize flush ] }
+ } case ] >>handler
+ start-server ;
--- /dev/null
+Serve factor words as rpcs
\ No newline at end of file
--- /dev/null
+Sam Anklesaria
--- /dev/null
+USING: help.syntax help.markup ;
+IN: modules.rpc
+ARTICLE: { "modules" "protocol" } "RPC Protocol"
+{ $list
+ "Send vocab as string"
+ "Send arglist"
+ "Send word as string"
+ "Receive result list"
+} ;
\ No newline at end of file
--- /dev/null
+! Copyright (C) 2009 Sam Anklesaria.
+! See http://factorcode.org/license.txt for BSD license.
+USING: accessors assocs fry generalizations io.encodings.binary
+io.sockets kernel locals namespaces parser sequences serialize
+vocabs vocabs.parser words io ;
+IN: modules.rpc
+
+TUPLE: rpc-request args vocabspec wordname ;
+
+: send-with-check ( message -- reply/* )
+ serialize flush deserialize dup no-vocab? [ throw ] when ;
+
+:: define-remote ( str effect addrspec vocabspec -- )
+ str create-in effect [ in>> length ] [ out>> length ] bi
+ '[ _ narray vocabspec str rpc-request boa addrspec 9012 <inet> binary
+ [ "doer" serialize send-with-check ] with-client _ firstn ]
+ effect define-declared ;
+
+:: remote-vocab ( addrspec vocabspec -- vocab )
+ vocabspec "-remote" append dup vocab [ dup set-current-vocab
+ vocabspec addrspec 9012 <inet> binary [ "getter" serialize send-with-check ] with-client
+ [ first2 addrspec vocabspec define-remote ] each
+ ] unless ;
+
+: remote-load ( addr vocabspec -- voabspec ) [ swap
+ 9012 <inet> binary [ "loader" serialize serialize flush deserialize ] with-client ] keep
+ [ dictionary get-global set-at ] keep ;
\ No newline at end of file
--- /dev/null
+remote procedure call client
\ No newline at end of file
--- /dev/null
+Sam Anklesaria
--- /dev/null
+Improved module import syntax with network transparency
\ No newline at end of file
--- /dev/null
+USING: help.syntax help.markup strings modules.using ;
+IN: modules.using
+ARTICLE: { "modules.using" "use" } "Using the modules.using vocab"
+"This vocabulary defines " { $link POSTPONE: USING*: } " as an alternative to " { $link POSTPONE: USING: } " which makes qualified imports easier. "
+"Secondly, it allows loading vocabularies from remote servers, as long as the remote vocabulary can be accessed at compile time. "
+"Finally, the word can treat words in remote vocabularies as remote procedure calls. Any inputs are passed to the imported words as normal, and the result will appear on the stack- the only difference is that the word isn't called locally." ;
+ABOUT: { "modules.using" "use" }
+
+HELP: USING*:
+{ $syntax "USING: rpc-server::module fetch-sever:module { module qualified-name } { module => word ... } { qualified-module } { module EXCEPT word ... } { module word => importname } ;" }
+{ $description "Adds vocabularies to the search path. Vocabularies can be loaded off a server or called as an rpc if preceded by a valid hostname. Bracketed pairs facilitate all types of qualified imports on both remote and local modules." } ;
\ No newline at end of file
--- /dev/null
+! Copyright (C) 2009 Sam Anklesaria.
+! See http://factorcode.org/license.txt for BSD license.
+USING: kernel modules.rpc peg peg-lexer peg.ebnf sequences
+strings vocabs.parser ;
+IN: modules.using
+
+EBNF: modulize
+tokenpart = (!(':').)+ => [[ >string ]]
+s = ':' => [[ drop ignore ]]
+rpc = tokenpart s s tokenpart => [[ first2 remote-vocab ]]
+remote = tokenpart s tokenpart => [[ first2 remote-load ]]
+module = rpc | remote | tokenpart
+;EBNF
+
+ON-BNF: USING*:
+tokenizer = <foreign factor>
+sym = !(";"|"}"|"=>"|"EXCEPT").
+modspec = sym => [[ modulize ]]
+qualified-with = modspec sym => [[ first2 add-qualified ignore ]]
+qualified = modspec => [[ dup add-qualified ignore ]]
+from = modspec "=>" sym+ => [[ first3 nip add-words-from ignore ]]
+exclude = modspec "EXCEPT" sym+ => [[ first3 nip add-words-excluding ignore ]]
+rename = modspec sym "=>" sym => [[ first4 nip swapd add-renamed-word ignore ]]
+long = "{" ( from | exclude | rename | qualified-with | qualified ) "}" => [[ drop ignore ]]
+short = modspec => [[ use-vocab ignore ]]
+wordSpec = long | short
+using = wordSpec+ ";" => [[ drop ignore ]]
+;ON-BNF
\ No newline at end of file
--- /dev/null
+Sam Anklesaria
\ No newline at end of file
--- /dev/null
+USING: peg.ebnf help.syntax help.markup strings ;
+IN: peg-lexer
+
+HELP: ON-BNF:
+{ $syntax "ON-BNF: word ... ;ON-BNF" }
+{ $description "Creates a parsing word using a parser for lexer control, adding the resulting ast to the stack. Parser syntax is as in " { $link POSTPONE: EBNF: } } ;
+
+HELP: create-bnf
+{ $values { "name" string } { "parser" parser } }
+{ $description "Runtime equivalent of " { $link POSTPONE: ON-BNF: } " also useful with manually constructed parsers." } ;
+
+HELP: factor
+{ $values { "input" string } { "ast" "a sequence of tokens" } }
+{ $description "Tokenizer that acts like standard factor lexer, separating tokens by whitespace." } ;
\ No newline at end of file
--- /dev/null
+USING: tools.test peg-lexer.test-parsers ;
+IN: peg-lexer.tests
+
+{ V{ "1234" "-end" } } [
+ test1 1234-end
+] unit-test
+
+{ V{ 1234 53 } } [
+ test2 12345
+] unit-test
+
+{ V{ "heavy" "duty" "testing" } } [
+ test3 heavy duty testing
+] unit-test
\ No newline at end of file
--- /dev/null
+USING: hashtables assocs sequences locals math accessors multiline delegate strings
+delegate.protocols kernel peg peg.ebnf peg.private lexer namespaces combinators parser
+words ;
+IN: peg-lexer
+
+TUPLE: lex-hash hash ;
+CONSULT: assoc-protocol lex-hash hash>> ;
+: <lex-hash> ( a -- lex-hash ) lex-hash boa ;
+
+: pos-or-0 ( neg? -- pos/0 ) dup 0 < [ drop 0 ] when ;
+
+:: prepare-pos ( v i -- c l )
+ [let | n [ i v head-slice ] |
+ v CHAR: \n n last-index -1 or 1 + -
+ n [ CHAR: \n = ] count 1 +
+ ] ;
+
+: store-pos ( v a -- )
+ input swap at prepare-pos
+ lexer get [ (>>line) ] keep (>>column) ;
+
+M: lex-hash set-at
+ swap {
+ { pos [ store-pos ] }
+ [ swap hash>> set-at ]
+ } case ;
+
+:: at-pos ( t l c -- p ) t l head-slice [ length ] map sum l 1 - + c + ;
+
+M: lex-hash at*
+ swap {
+ { input [ drop lexer get text>> "\n" join t ] }
+ { pos [ drop lexer get [ text>> ] [ line>> 1 - ] [ column>> 1 + ] tri at-pos t ] }
+ [ swap hash>> at* ]
+ } case ;
+
+: with-global-lexer ( quot -- result )
+ [
+ f lrstack set
+ V{ } clone error-stack set H{ } clone \ heads set
+ H{ } clone \ packrat set
+ ] f make-assoc <lex-hash>
+ swap bind ; inline
+
+: parse* ( parser -- ast )
+ compile
+ [ execute [ error-stack get first throw ] unless* ] with-global-lexer
+ ast>> ; inline
+
+: create-bnf ( name parser -- )
+ reset-tokenizer [ lexer get skip-blank parse* dup ignore? [ drop ] [ parsed ] if ] curry
+ define-syntax word make-inline ;
+
+SYNTAX: ON-BNF:
+ CREATE-WORD reset-tokenizer ";ON-BNF" parse-multiline-string parse-ebnf
+ main swap at create-bnf ;
+
+! Tokenizer like standard factor lexer
+EBNF: factor
+space = " " | "\n" | "\t"
+spaces = space* => [[ drop ignore ]]
+chunk = (!(space) .)+ => [[ >string ]]
+expr = spaces chunk
+;EBNF
--- /dev/null
+Use peg to write parsing words
--- /dev/null
+extensions
+reflection
--- /dev/null
+USING: peg-lexer math.parser strings ;
+IN: peg-lexer.test-parsers
+
+ON-BNF: test1
+ num = [1-4]* => [[ >string ]]
+ expr = num ( "-end" | "-done" )
+;ON-BNF
+
+ON-BNF: test2
+ num = [1-4]* => [[ >string string>number ]]
+ expr= num [5-9]
+;ON-BNF
+
+ON-BNF: test3
+ tokenizer = <foreign factor>
+ expr= "heavy" "duty" "testing"
+;ON-BNF
\ No newline at end of file