Merge branch 'test' of github.com:urbit/urbit into test

This commit is contained in:
Galen Wolfe-Pauly 2015-09-29 17:18:25 -07:00
commit 5e07d3d703
100 changed files with 1977 additions and 808 deletions

View File

@ -10,7 +10,7 @@
!: :: ::
=> |% :: external structures
++ house :: all state
$: %1
$: %2
hoc=(map bone session) :: conversations
== ::
++ session :: per conversation
@ -27,6 +27,7 @@
[%pill p=path q=dojo-source] :: noun to unix pill
:: [%tree p=path q=dojo-source] :: noun to unix tree
[%file p=beam q=dojo-source] :: save to clay
[%http p=?(%post %put) q=purl r=dojo-source] :: http outbound
[%poke p=goal q=dojo-source] :: make and poke
[%show p=dojo-source] :: print
[%verb p=term q=(unit dojo-source)] :: store variable
@ -36,20 +37,14 @@
q=dojo-build :: general build
== ::
++ dojo-build :: one ford step
$% [%ec p=mark q=twig] :: caged expression
[%ex p=twig] :: hoon expression
[%di p=dojo-model] :: dialog
[%dv p=path] :: gate from source
[%fi p=dojo-filter q=dojo-source] :: filter
$% [%ur p=purl] :: http GET request
[%ge p=dojo-model] :: generator
[%sc p=dojo-model] :: script
[%dv p=path] :: core from source
[%ex p=twig] :: hoon expression
[%as p=mark q=dojo-source] :: simple transmute
[%do p=twig q=dojo-source] :: gate apply
[%tu p=(list dojo-source)] :: tuple
[%ur p=purl] :: http GET request
== ::
++ dojo-filter :: pipeline filter
$| mark :: simple transmute
twig :: function gate
::
++ dojo-model :: data construction
$: p=dojo-server :: core source
q=dojo-config :: configuration
@ -76,7 +71,7 @@
++ goal ,[p=ship q=term] :: flat application
++ clap :: action, user
$% [%peer p=path] :: subscribe
[%poke p=term q=*] :: apply
[%poke p=(cask)] :: apply
[%pull ~] :: unsubscribe
== ::
++ club :: action, system
@ -87,7 +82,7 @@
++ card :: general card
$% [%diff %sole-effect sole-effect] ::
[%send wire [ship term] clap] ::
[%hiss wire mark [%purl purl]] ::
[%hiss wire mark [%hiss hiss]] ::
[%exec wire @p (unit ,[beak silk])] ::
[%deal wire sock term club] ::
[%info wire @p toro] ::
@ -105,109 +100,110 @@
house :: program state
== ::
++ he :: per session
|_ [[ost=bone moz=(list move)] session] ::
|_ [moz=(list move) session] ::
++ dp :: dojo parser
|%
++ dp-default-app %hood
++ dp-specify
|= [gol=goal mod=dojo-model]
^- (pair goal dojo-source)
[gol [0 [%ge mod(q.p [q.gol q.p.mod])]]]
::
++ dp-command-line ;~(sfix dp-command (just '\0a'))
++ dp-command :: ++dojo-command
%+ knee *dojo-command |. ~+
;~ pose
;~ pfix bar
%+ cook
|= [a=path b=dojo-config]
^- dojo-command
[%poke [our.hid %hood] [0 %ge [0 [%cat %hood a]] b]]
;~(plug (most fas sym) dp-config)
;~ plug (cold %poke bar)
%+ cook dp-specify
(stag [our.hid dp-default-app] dp-model)
==
::
;~ plug (cold %poke col)
%+ cook
|= [a=goal b=(each dojo-source (trel term path dojo-config))]
|= [a=goal b=$&(dojo-model dojo-source)]
^- (pair goal dojo-source)
:- a
?- -.b
%& p.b
%| ?+ p.p.b !!
%di [0 %di [0 [%dog q.a q.p.b]] r.p.b]
%ge [0 %ge [0 [%cat q.a q.p.b]] r.p.b]
%sc [0 %sc [0 [%pig q.a q.p.b]] r.p.b]
==
==
?@ -.b [a b]
(dp-specify a b)
;~ plug
dp-goal
%+ pick ;~(pfix ace dp-source)
;~ plug
;~ pose
(cold %di wut)
(cold %ge lus)
(cold %sc pam)
==
(most fas sym)
dp-config
;~ pose
;~(pfix bar dp-model)
;~(pfix ace dp-source)
==
==
==
::
;~ plug (cold %verb tis)
;~(plug sym (punt ;~(pfix ace dp-source)))
==
::
;~ pfix fas
%+ cook
|=(a=(list twig) `dojo-command`[%verb %dir ~ [0 %ex %clsg a]])
dp-poor
==
::
;~ plug (cold %file tar)
;~((glue ace) dp-beam dp-source)
==
::
;~ plug (cold %flat pat)
;~((glue ace) (most fas sym) dp-source)
==
::
;~ plug (cold %pill dot)
;~((glue ace) (most fas sym) dp-source)
;~(plug (cold %file tar) dp-beam ;~(pfix ace dp-source))
;~(plug (cold %flat pat) (most fas sym) ;~(pfix ace dp-source))
;~(plug (cold %pill dot) (most fas sym) ;~(pfix ace dp-source))
;~(plug (cold %http lus) (easy %post) auri:epur ;~(pfix ace dp-source))
;~(plug (cold %http hep) (easy %put) auri:epur ;~(pfix ace dp-source))
;~(plug (cold %verb tis) sym (punt ;~(pfix ace dp-source)))
;~ plug (cold %verb fas)
;~ pose
;~(plug (cold %arc hep) (punt ;~(pfix gap dp-hooves)))
;~(plug (cold %lib lus) (punt ;~(pfix gap dp-hooves)))
(stag %dir :(stag ~ 0 %ex %clsg dp-poor))
==
==
::
(stag %show dp-source)
==
++ dp-hooves :: hoof list
:(stag 0 %ex %clsg (cook |=(a=tusk a) (most ;~(plug com gaw) dp-hoof)))
::
++ dp-hoof :: ++ford-hoof twig
%+ cook |*(a=* ~!(+.a `twig`a))
;~ plug
:(stag %dtzy %tas sym)
%- dp-twig-punt
;~ (glue fas)
;~(pfix fas (sear dp-case-twig nuck:so))
(stag %dtzy ;~(plug (cold %p sig) fed:ag))
==
==
::
++ dp-twig-punt :: twig of unit
|*(a=_rule ;~(pose (stag [%bczp %null] a) (easy [%bczp %null])))
::
++ dp-case-twig
|= a=coin ^- (unit twig)
?. ?=([~ case] a) ~
%+ some
[%dtzz %tas p.p.a]
[%dtzy p.a]
::
++ dp-source (stag 0 dp-build) :: ++dojo-source
++ dp-build :: ++dojo-build
%+ knee *dojo-build |. ~+
;~ pose
;~(pfix lus ;~(pose (stag %ur auri:epur) (stag %ge dp-model-cat)))
;~(plug (cold %di wut) dp-model-dog)
;~(plug (cold %fi cab) ;~((glue ace) dp-filter dp-source))
;~(plug (cold %ur lus) auri:epur)
;~(plug (cold %ge lus) dp-model)
;~(plug (cold %as pam) sym ;~(pfix ace dp-source))
;~(plug (cold %do cab) dp-twig ;~(pfix ace dp-source))
dp-value
==
::
++ dp-filter ;~(pose ;~(sfix sym cab) dp-twig) :: ++dojo-filter
++ dp-goal :: ++goal
%+ cook |=(a=goal a)
;~ pose
;~ plug
;~(pfix sig fed:ag)
;~(pfix fas sym)
;~(pfix sig fed:ag)
;~(pose ;~(pfix fas sym) (easy dp-default-app))
==
(cook |=(a=term `goal`[our.hid a]) sym)
(easy [our.hid %hood])
%+ stag our.hid
;~(pose sym (easy dp-default-app))
==
++ dp-beam :: ++beam
%+ sear tome
%+ cook |=(a=path =+((tome a) ?^(- u [he-beak (flop a)])))
=+ vez=(vang & dp-path)
(sear plex:vez (stag %clsg poor:vez))
::
++ dp-model-cat ;~(plug dp-server-cat dp-config) :: ++dojo-model
++ dp-model-dog ;~(plug dp-server-dog dp-config) :: ++dojo-model
++ dp-model-pig ;~(plug dp-server-pig dp-config) :: ++dojo-model
++ dp-path (tope he-beam) :: ++path
++ dp-server-cat (stag 0 (stag %cat dp-device)) :: ++dojo-server
++ dp-server-dog (stag 0 (stag %dog dp-device)) :: ++dojo-server
++ dp-server-pig (stag 0 (stag %pig dp-device)) :: ++dojo-server
++ dp-twig tall:(vang & dp-path) :: ++twig
++ dp-poor poor:(vang & (tope dir)) :: (list ++twig)
++ dp-device (most fas sym) :: ++dojo-device
++ dp-model ;~(plug dp-server dp-config) :: ++dojo-model
++ dp-path (tope he-beam) :: ++path
++ dp-server (stag 0 (most fas sym)) :: ++dojo-server
++ dp-twig tall:(vang & dp-path) :: ++twig
++ dp-poor poor:(vang & (tope dir)) :: (list ++twig)
++ dp-value :: ++dojo-source
;~ pose
(stag %tu (ifix [kel ker] (most ace dp-source)))
@ -238,10 +234,10 @@
(he-card(poy `+>+<(pux `way)) %exec way our.hid `[he-beak kas])
::
++ dy-eyre :: send work to eyre
|= [way=wire req=[%purl purl]]
|= [way=wire req=hiss]
^+ +>+>
?> ?=(~ pux)
(he-card(poy `+>+<(pux `way)) %hiss way %httr req)
(he-card(poy `+>+<(pux `way)) %hiss way %httr %hiss req)
::
++ dy-stop :: stop work
^+ +>
@ -267,12 +263,16 @@
|= mad=dojo-command
^+ [mad +>]
?- -.mad
%file =^(src +>.$ (dy-init-source q.mad) [[%file p.mad src] +>.$])
%flat =^(src +>.$ (dy-init-source q.mad) [[%flat p.mad src] +>.$])
%pill =^(src +>.$ (dy-init-source q.mad) [[%pill p.mad src] +>.$])
%poke =^(src +>.$ (dy-init-source q.mad) [[%poke p.mad src] +>.$])
%show =^(src +>.$ (dy-init-source p.mad) [[%show src] +>.$])
%verb =^(src +>.$ (dy-init-source-unit q.mad) [[%verb p.mad src] +>.$])
%file =^(src +>.$ (dy-init-source q.mad) [mad(q src) +>.$])
%flat =^(src +>.$ (dy-init-source q.mad) [mad(q src) +>.$])
%pill =^(src +>.$ (dy-init-source q.mad) [mad(q src) +>.$])
%poke =^(src +>.$ (dy-init-source q.mad) [mad(q src) +>.$])
%show =^(src +>.$ (dy-init-source p.mad) [mad(p src) +>.$])
%verb =^(src +>.$ (dy-init-source-unit q.mad) [mad(q src) +>.$])
%http
=. r.mad [0 %as %mime r.mad]
=^ src +>.$ (dy-init-source r.mad)
[mad(r src) +>.$]
==
::
++ dy-init-source-unit :: (unit dojo-source)
@ -295,14 +295,11 @@
|= bul=dojo-build
^+ [bul +>]
?- -.bul
%ec [bul +>.$]
%ex [bul +>.$]
%di =^(mod +>.$ (dy-init-model p.bul) [[%di mod] +>.$])
%dv [bul +>.$]
%fi =^ mor +>.$ (dy-init-source q.bul)
[bul(q mor) +>.$]
%as =^(mor +>.$ (dy-init-source q.bul) [bul(q mor) +>.$])
%do =^(mor +>.$ (dy-init-source q.bul) [bul(q mor) +>.$])
%ge =^(mod +>.$ (dy-init-model p.bul) [[%ge mod] +>.$])
%sc !!
%ur [bul +>.$]
%tu =^ dof +>.$
|- ^+ [p.bul +>.^$]
@ -323,7 +320,7 @@
++ dy-init-server :: ++dojo-server
|= srv=dojo-server
=. p.srv num
[srv +>.$(num +(num), job (~(put by job) num [%dv q.srv]))]
[srv +>.$(num +(num), job (~(put by job) num [%dv [%gen q.srv]]))]
::
++ dy-init-config :: prepare config
|= cig=dojo-config
@ -380,7 +377,7 @@
::
++ dy-done :: dialog submit
|= txt=tape
?. ?=(^ pro)
?: |(?=(^ per) ?=(^ pux) ?=(~ pro))
~& %dy-no-prompt
(dy-diff %bel ~)
(dy-slam /dial u.pro !>(txt))
@ -445,20 +442,30 @@
=- +(..dy (he-diff %tan - ~))
rose/[" " `~]^~[leaf/"=%" (smyt (tope he-beak s.dir))]
==
::
%http
=+ cay=(~(got by rez) p.r.mad)
?> ?=(%mime p.cay)
=+ mim=;;(mime q.q.cay)
=+ maf=(~(add ja *math) content-type/(moon p.mim))
(dy-eyre /show [q.mad p.mad maf ~ q.mim])
::
%show
=+ cay=(~(got by rez) p.p.mad)
%+ dy-rash %tan
?+ p.cay [(sell q.cay)]~
%tang ;;(tang q.q.cay)
%httr
=+ hit=;;(httr q.q.cay)
=- (flop (turn `wall`- |=(a=tape leaf/(dash:ut a ''))))
:- "HTTP {<p.hit>}"
%+ weld
(turn q.hit |=([a=@t b=@t] "{(trip a)}: {(trip b)}"))
(turn `wain`?~(r.hit ~ (lore q.u.r.hit)) trip)
==
(dy-show (~(got by rez) p.p.mad))
==
::
++ dy-show
|= cay=cage
%+ dy-rash %tan
?+ p.cay [(sell q.cay)]~
%tang ;;(tang q.q.cay)
%httr
=+ hit=;;(httr q.q.cay)
=- (flop (turn `wall`- |=(a=tape leaf/(dash:ut a ''))))
:- "HTTP {<p.hit>}"
%+ weld
(turn q.hit |=([a=@t b=@t] "{(trip a)}: {(trip b)}"))
(turn `wain`?~(r.hit ~ (lore q.u.r.hit)) trip)
==
::
++ dy-edit :: handle edit
@ -480,34 +487,33 @@
::
++ dy-cage |=(num=@ud (~(got by rez) num)) :: known cage
++ dy-vase |=(num=@ud q:(dy-cage num)) :: known vase
++ dy-silk-vase |=(vax=vase [%$ %noun vax]) :: vase to silk
++ dy-silk-config :: configure
|= [cag=cage cig=dojo-config]
^- silk
:+ %ride [%cnzy %$]
:+ %mute [%$ cag]
^- (list (pair wing silk))
:* :- [[~ 12] ~]
(dy-silk-vase !>([now=now.hid eny=eny.hid bec=he-beak]))
::
:- [[~ 26] ~]
%- dy-silk-vase
|- ^- vase
?~ p.cig !>(~)
(slop (dy-vase p.i.p.cig) $(p.cig t.p.cig))
::
%+ turn (~(tap by q.cig))
|= [a=term b=(unit dojo-source)]
^- (pair wing silk)
:- [a [~ 27] ~]
%- dy-silk-vase
?~(b !>([~ ~]) (dy-vase p.u.b))
==
++ dy-silk-vase |=(vax=vase [%$ %noun vax]) :: vase to silk
++ dy-silk-sources :: sources to silk
|= src=(list dojo-source) ^- silk
%- dy-silk-vase
|-
?~ src !>(~)
(slop (dy-vase p.i.src) $(src t.src))
::
++ dy-silk-init-modo :: init and config
|= [cag=cage cig=dojo-config]
^- silk
(dy-silk-config cag cig)
++ dy-silk-config :: configure
|= [cay=cage cig=dojo-config]
^- [wire silk]
:- ?+ -.q.q.cay ~|(%bad-gen ~_((sell (slot 2 q.cay)) !!))
%say /gent
%ask /dial
%get /scar
==
=+ gat=(slot 3 q.cay)
:+ %call (dy-silk-vase gat)
:+ (dy-silk-vase !>([now=now.hid eny=eny.hid bec=he-beak]))
(dy-silk-sources p.cig)
:+ %mute (dy-silk-vase (fall (slew 27 gat) !>(~)))
%+ turn (~(tap by q.cig))
|= [a=term b=(unit dojo-source)]
^- (pair wing silk)
:- [a ~]
%- dy-silk-vase
?~(b !>([~ ~]) (dy-vase p.u.b))
::
++ dy-twig-head :: dynamic state
^- cage
@ -517,7 +523,7 @@
%- ~(rep by var)
|= [[a=term @ b=vase] c=vase] ^- vase
(sloop b(p face/[a p.b]) c)
!>(`[our=@p now=@da eny=@uvI]`[our now eny]:hid)
!>([our=our now=now eny=eny]:hid)
::
++ dy-made-dial :: dialog product
|= cag=cage
@ -541,6 +547,35 @@
[%pro pom(cad [':' ' ' cad.pom])]
==
::
++ dy-made-scar :: scraper product
|= cag=cage
^+ +>+>
?. ?=(^ q.q.cag)
(dy-diff %err q.q.cag)
=+ tan=((list tank) +2.q.q.cag)
=. +>+>.$ (he-diff %tan tan)
=+ vax=(spec (slot 3 q.cag))
~_ (sell q.cag)
?+ -.q.vax !!
%&
?~ +.q.vax
~& %dy-made-scar-abort
(dy-rash %bel ~)
(dy-meal (slot 7 vax))
::
%|
=+ hiz=;;(hiss +<.q.vax)
=. ..dy (he-diff %tan leaf/"< {(earn p.hiz)}" ~)
(dy-eyre(pro `(slap (slot 7 vax) cnzy/%q)) /scar hiz)
==
::
++ dy-sigh-scar :: scraper result
|= dat=cage
?~ pro
~& %dy-no-scraper
(dy-show dat)
(dy-slam(pux ~) /scar u.pro q.dat)
::
++ dy-made-gent :: generator product
|= cag=cage
(dy-meal q.cag)
@ -550,17 +585,15 @@
?> ?=(^ cud)
=+ bil=q.u.cud :: XX =*
?: ?=(%ur -.bil)
(dy-eyre /hand %purl p.bil)
(dy-eyre /hand [p.bil %get ~ ~])
%- dy-ford
^- (pair path silk)
?+ -.bil !!
%di [/dial (dy-silk-init-modo (dy-cage p.p.p.bil) q.p.bil)]
%ge [/gent (dy-silk-init-modo (dy-cage p.p.p.bil) q.p.bil)]
?- -.bil
%ge (dy-silk-config (dy-cage p.p.p.bil) q.p.bil)
%dv [/hand [%core he-beak (flop p.bil)]]
%ec [/hand [%cast p.bil (dy-mare q.bil)]]
%ex [/hand (dy-mare p.bil)]
%fi =+ dat=[%$ (dy-cage p.q.bil)]
[/hand ?@(p.bil [%cast p.bil dat] [%call (dy-mare p.bil) dat])]
%as [/hand [%cast p.bil [%$ (dy-cage p.q.bil)]]]
%do [/hand [%call (dy-mare p.bil) [%$ (dy-cage p.q.bil)]]]
%tu :- /hand
:- %$
:- %noun
@ -637,7 +670,11 @@
==
::
++ he-abet :: resolve
[(flop moz) %_(+> hoc (~(put by hoc) ost +<+))]
[(flop moz) %_(+> hoc (~(put by hoc) ost.hid +<+))]
::
++ he-abut :: discard
=> he-stop
[(flop moz) %_(+> hoc (~(del by hoc) ost.hid))]
::
++ he-beak -:he-beam
++ he-beam :: logical beam
@ -648,7 +685,7 @@
++ he-card :: emit gift
|= cad=card
^+ +>
%_(+> moz [[ost cad] moz])
%_(+> moz [[ost.hid cad] moz])
::
++ he-send
|= [way=wire him=ship dap=term cop=clap]
@ -694,18 +731,25 @@
[%hand ~] dy-hand:dye
[%dial ~] dy-made-dial:dye
[%gent ~] dy-made-gent:dye
[%scar ~] dy-made-scar:dye
[%edit ~] dy-made-edit:dye
==
%| (he-diff(poy ~) %tan p.reg)
%tabl !!
==
::
++ he-sigh :: result from ford
++ he-sigh :: result from eyre
|= [way=wire hit=httr]
^+ +>
?> ?=(^ poy)
?> ?=([%hand ~] way) :: XX options?
he-pine:(~(dy-hand dy u.poy(pux ~)) %httr !>(hit))
=< he-pine
%. [%httr !>(hit)]
=+ dye=~(. dy u.poy(pux ~))
?+ way !!
[%hand ~] dy-hand:dye
[%show ~] dy-show:dye
[%scar ~] dy-sigh-scar:dye
==
::
++ he-unto :: result from behn
|= [way=wire cit=cuft]
@ -789,21 +833,25 @@
--
::
++ prep
=+ sosh==+(*session ,[_say syd=desk * (unit) _|3.-])
=+ session-1==+(*session ,_-(poy *(unit)))
=+ session-0==+(*session-1 ,[_say syd=desk * _|2.-])
:: ,_`..prep
|= old=(unit ?(house [%0 p=(map bone sosh)])) ^+ [~ ..prep]
=+ hoze=$%([%0 p=(map bone session-0)] [%1 p=(map bone session-1)])
|= old=(unit ?(house hoze)) ^+ [~ ..prep]
?~ old `..prep
?: ?=(%1 -.u.old) `..prep(+<+ u.old)
=< ^$(u.old [%1 (~(run by p.u.old) .)])
|= sos=sosh ^- session
=- ~! [%*(. *session poy *(unit)) -] -
[-.sos [[our.hid syd.sos ud/0] /] ~ |4.sos]
?- -.u.old
%2 `..prep(+<+ u.old)
%1 `..prep(+<+ [%2 (~(run by p.u.old) |=(session-1 +<(poy ~)))])
%0 =< ^$(u.old [%1 (~(run by p.u.old) .)])
|= sos=session-0 ^- session-1
[-.sos [[our.hid syd.sos ud/0] /] |3.sos]
==
::
:: pattern: ++ foo |=(data he-abet:(~(he-foo he (~(got by hoc) ost)) data))
++ arm (arm-session (~(got by hoc) ost.hid))
++ arm (arm-session ~ (~(got by hoc) ost.hid))
++ arm-session
|= ses=session
=> ~(. he [ost.hid ~] ses)
|= [moz=(list move) ses=session]
=> ~(. he moz ses)
=- [wrap=- +]
|* he-arm=_he-type
|= _+<.he-arm ^- (quip move ..he)
@ -812,9 +860,12 @@
++ peer-sole
~? !=(src.hid our.hid) [%dojo-peer-stranger ost.hid src.hid]
:: ?> =(src.hid our.hid)
~? (~(has by hoc) ost.hid) [%dojo-peer-replaced ost.hid]
=^ moz .
?. (~(has by hoc) ost.hid) [~ .]
~& [%dojo-peer-replaced ost.hid]
~(he-abut he ~ (~(got by hoc) ost.hid))
=+ ses=%*(. *session -.dir [our.hid %home ud/0])
(wrap he-peer):(arm-session ses)
(wrap he-peer):(arm-session moz ses)
::
++ poke-sole-action
|= act=sole-action ~| poke/act %. act
@ -828,7 +879,6 @@
++ pull
|= [pax=path]
^- (quip move +>)
=^ moz +>
he-abet:~(he-stop he [[ost.hid ~] (~(got by hoc) ost.hid)])
=^ moz +> ~(he-abut he ~ (~(got by hoc) ost.hid))
[moz +>.$(hoc (~(del by hoc) ost.hid))]
--

View File

@ -56,6 +56,11 @@
~? ?=(^ saw) [%kiln-spam-lame u.saw]
[~ +>]
::
++ coup-kiln-reload ::
|= [way=wire saw=(unit tang)]
~? ?=(^ saw) [%kiln-reload-lame u.saw]
[~ +>]
::
++ coup-drum-phat (wrap take-coup-phat):from-drum
++ coup-helm-hi (wrap coup-hi):from-helm
++ diff-sole-effect-drum-phat (wrap diff-sole-effect-phat):from-drum
@ -72,40 +77,45 @@
++ from-helm (from-lib %helm [..$ ,_abet]:(helm))
++ from-kiln (from-lib %kiln [..$ ,_abet]:(kiln))
::
++ init-helm |=([way=wire *] [~ +>])
++ made-kiln (wrap take-made):from-kiln
++ mere-kiln (wrap take-mere):from-kiln
++ mere-kiln-sync (wrap take-mere-sync):from-kiln
++ note-helm (wrap take-note):from-helm
++ onto-drum (wrap take-onto):from-drum
++ peer-drum (wrap peer):from-drum
++ poke-dill-belt (wrap poke-dill-belt):from-drum
++ poke-drum-link (wrap poke-link):from-drum
++ poke-drum-start (wrap poke-start):from-drum
++ poke-helm-hi (wrap poke-hi):from-helm
++ poke-helm-init (wrap poke-init):from-helm
++ poke-helm-invite (wrap poke-invite):from-helm
++ poke-helm-mass (wrap poke-mass):from-helm
++ poke-helm-reload (wrap poke-reload):from-helm
++ poke-helm-reload-desk (wrap poke-reload-desk):from-helm
++ poke-helm-reset (wrap poke-reset):from-helm
++ poke-helm-send-hi (wrap poke-send-hi):from-helm
++ poke-helm-verb (wrap poke-verb):from-helm
++ poke-helm-begin (wrap poke-begin):from-helm
++ poke-hood-sync (wrap poke-sync):from-kiln
++ poke-kiln-cp (wrap poke-cp):from-kiln
++ poke-kiln-label (wrap poke-label):from-kiln
++ poke-kiln-merge (wrap poke-merge):from-kiln
++ poke-kiln-mount (wrap poke-mount):from-kiln
++ poke-kiln-mv (wrap poke-mv):from-kiln
++ poke-kiln-rm (wrap poke-rm):from-kiln
++ poke-kiln-schedule (wrap poke-schedule):from-kiln
++ poke-kiln-sync (wrap poke-sync):from-kiln
++ poke-kiln-unmount (wrap poke-unmount):from-kiln
++ poke-kiln-unsync (wrap poke-unsync):from-kiln
++ poke-will (wrap poke-will):from-helm
++ quit-drum-phat (wrap quit-phat):from-drum
++ reap-drum-phat (wrap reap-phat):from-drum
++ woot-helm (wrap take-woot):from-helm
++ writ-kiln-sync (wrap take-writ):from-kiln
++ init-helm |=([way=wire *] [~ +>])
++ made-kiln (wrap take-made):from-kiln
++ mere-kiln (wrap take-mere):from-kiln
++ mere-kiln-sync (wrap take-mere-sync):from-kiln
++ note-helm (wrap take-note):from-helm
++ onto-drum (wrap take-onto):from-drum
++ peer-drum (wrap peer):from-drum
++ poke-dill-belt (wrap poke-dill-belt):from-drum
++ poke-drum-link (wrap poke-link):from-drum
::++ poke-drum-exit (wrap poke-exit):from-drum
++ poke-drum-start (wrap poke-start):from-drum
++ poke-helm-hi (wrap poke-hi):from-helm
++ poke-helm-init (wrap poke-init):from-helm
++ poke-helm-invite (wrap poke-invite):from-helm
++ poke-helm-mass (wrap poke-mass):from-helm
++ poke-helm-reload (wrap poke-reload):from-helm
++ poke-helm-reload-desk (wrap poke-reload-desk):from-helm
++ poke-helm-reset (wrap poke-reset):from-helm
++ poke-helm-send-hi (wrap poke-send-hi):from-helm
++ poke-helm-verb (wrap poke-verb):from-helm
++ poke-helm-begin (wrap poke-begin):from-helm
++ poke-hood-sync (wrap poke-sync):from-kiln
++ poke-kiln-cp (wrap poke-cp):from-kiln
++ poke-kiln-label (wrap poke-label):from-kiln
++ poke-kiln-merge (wrap poke-merge):from-kiln
++ poke-kiln-cancel (wrap poke-cancel):from-kiln
++ poke-kiln-mount (wrap poke-mount):from-kiln
++ poke-kiln-mv (wrap poke-mv):from-kiln
++ poke-kiln-rm (wrap poke-rm):from-kiln
++ poke-kiln-schedule (wrap poke-schedule):from-kiln
++ poke-kiln-sync (wrap poke-sync):from-kiln
++ poke-kiln-start-autoload (wrap poke-start-autoload):from-kiln
++ poke-kiln-autoload (wrap poke-autoload):from-kiln
++ poke-kiln-unmount (wrap poke-unmount):from-kiln
++ poke-kiln-unsync (wrap poke-unsync):from-kiln
++ poke-will (wrap poke-will):from-helm
++ quit-drum-phat (wrap quit-phat):from-drum
++ reap-drum-phat (wrap reap-phat):from-drum
++ woot-helm (wrap take-woot):from-helm
++ writ-kiln-autoload (wrap take-writ-autoload):from-kiln
++ writ-kiln-sync (wrap take-writ-sync):from-kiln
--

View File

@ -1686,10 +1686,10 @@
(weld " " txt)
::
%app
"[{(trip p.sep)}]: {(trip q.sep)}"
(trim 62 "[{(trip p.sep)}]: {(trip q.sep)}")
::
%tax
" {(rend-work-duty p.sep)}"
(trim 62 " {(rend-work-duty p.sep)}")
==
--
::
@ -1791,14 +1791,19 @@
++ log-to-file
|= man=span
^- move
=+ paf=/(scot %p our.hid)/home/(scot %da now.hid)/talk/[man]/talk-telegrams
=+ ^- paf=path
=+ day=(year %*(. (yore now.hid) +.t +:*tarp))
%+ tope [our.hid %home da/now.hid]
/talk-telegrams/(scot %da day)/[man]/talk
=+ grams:(~(got by stories) man)
[ost.hid %info /jamfile our.hid (foal paf [%talk-telegrams !>(-)])]
::
++ poke-save
|= man=span
^- (quip move +>)
[[(log-to-file man) ~] +>.$]
=+ paf=/(scot %p our.hid)/home/(scot %da now.hid)/talk/[man]/talk-telegrams
=+ grams:(~(got by stories) man)
[[ost.hid %info /jamfile our.hid (foal paf [%talk-telegrams !>(-)])]~ +>.$]
::
++ poke-load
|= man=span
@ -1816,6 +1821,12 @@
:- [(log-to-file man) ~]
+>.$(log (~(put by log) man count:(~(got by stories) man)))
::
++ poke-unlog
|= man=span
^- (quip move +>)
:- ~
+>.$(log (~(del by log) man))
::
++ prep
|= [old=(unit house-any)]
^- (quip move +>)

View File

@ -24,7 +24,8 @@
::
|_ $: bowl
client
connected=_|
connected=_| :: subscribed to talk
count=@ud :: # messages from talk
unordered=(map ,[@uvH @u] (pair ship flesh:work-stuff:talk))
==
++ at
@ -82,7 +83,7 @@
|= up=update
^+ +>
?- -.up
%add ?>(?=(%comment +<.up) (send-change %add-comment +>.up))
%add ?>(?=(%comment +<.up) (send-change %add-comment our +>.up))
%doer
?- +<.up
%release (send-change %set-doer ~)
@ -114,6 +115,7 @@
%- unit
$: client
_|
@ud
(map ,[@uvH @u] (pair ship flesh:work-stuff:talk))
==
^- [(list move) _+>.$]
@ -124,7 +126,7 @@
?: connected
[~ .]
:_ .(connected %&) :_ ~
[ost %peer /peering [our %talk] /f/(main:talk our)/0]
[ost %peer /peering [our %talk] /f/(main:talk our)/(scot %ud count)]
::
++ process-duty
|= [when=@da her=ship from=(set station:talk) action=duty:work-stuff:talk]
@ -256,7 +258,7 @@
%set-done tax.u.tax(done ?.(don.meat.action ~ `when))
%add-comment
%= tax.u.tax
discussion [[when her com.meat.action] discussion.tax.u.tax]
discussion [[when [who com]:meat.action] discussion.tax.u.tax]
==
==
=+ ooo=(~(get by unordered) id.action +(version.action))
@ -309,28 +311,25 @@
abut:send-create:(at | +.cod)
==
::
:: XX maybe need to check that we haven't received this message before
:: by keeping a counter of last message received
:: XX definitely do this!
:: XX handle and test the disconnection case
::
:: XX test the disconnection case
++ diff-talk-report
|= [way=wire rep=report:talk]
^- [(list move) _+>.$]
?> ?=(%grams -.rep)
|- ^- [(list move) _+>.^$]
?~ q.rep [~ +>.^$]
=. count +(count)
=* her p.i.q.rep
=* when p.r.q.i.q.rep
=* said r.r.q.i.q.rep
?. ?=(%tax -.said)
$(p.rep +(p.rep), q.rep t.q.rep)
=+ ^- from=(set station:talk)
%- sa ^- (list station:talk)
%+ murn (~(tap by q.q.i.q.rep))
=> talk
|= [par=partner *]
`(unit station)`?.(?=(%& -.par) ~ `p.par)
?. ?=(%tax -.said)
$(p.rep +(p.rep), q.rep t.q.rep)
=^ mos +>.^$ (process-duty when her from +.said)
=^ mof +>.^$ $(p.rep +(p.rep), q.rep t.q.rep)
[(weld mos mof) +>.^$]

View File

@ -413,7 +413,7 @@
vix=(bex +((cut 0 [25 2] mag))) :: width of sender
tay=(cut 0 [27 5] mag) :: message type
==
?> =(4 vez)
?> =(5 vez)
?> =(chk (end 0 20 (mug bod)))
:+ [(end 3 wix bod) (cut 3 [wix vix] bod)]
(kins tay)
@ -433,7 +433,7 @@
=+ tay=(ksin q.kec)
%+ mix
%+ can 0
:~ [3 4]
:~ [3 5]
[20 (mug bod)]
[2 yax]
[2 qax]
@ -1018,7 +1018,7 @@
++ gnaw :: gnaw:am
|= [kay=cape ryn=lane pac=rock] :: process packet
^- [p=(list boon) q=fort]
?. =(4 (end 0 3 pac)) [~ fox]
?. =(5 (end 0 3 pac)) [~ fox]
=+ kec=(bite pac)
?: (goop p.p.kec) [~ fox]
?. (~(has by urb.ton.fox) q.p.kec)

File diff suppressed because it is too large Load Diff

View File

@ -216,6 +216,7 @@
=. +> (sync %home our %base)
=. +> ?. ?=(?(%king %czar) can) +>
(sync %kids our %base)
=. +> autoload
=. +> peer
|- ^+ +>+
?~ myt +>+
@ -255,7 +256,18 @@
%_ +>.$
moz
:_ moz
[hen %pass ~ %g %deal [our our] ram %poke %hood-sync -:!>(syn) syn]
:* hen %pass /sync %g %deal [our our]
ram %poke %hood-sync -:!>(syn) syn
==
==
::
++ autoload
%_ .
moz
:_ moz
:* hen %pass /autoload %g %deal [our our]
ram %poke %kiln-start-autoload [%cube ~ [%atom %n]] ~
==
==
::
++ pump :: send diff ack

View File

@ -355,7 +355,7 @@
if(this.status !== 205) {
return urb.keep()
}
document.location.reload()
urb.onupdate()
})
urb.wreq.addEventListener('error', urb.keep)
urb.wreq.addEventListener('abort', urb.keep)
@ -365,6 +365,7 @@
setTimeout(urb.call,1000*urb.tries)
urb.tries++
}
urb.onupdate = function(){document.location.reload()}
urb.call()
urb.wasp = function(deh){
var old = /[^/]*$/.exec(urb.wurl)[0]
@ -430,13 +431,14 @@
urb.waspFrom = function(sel,attr){
Array.prototype.map.call(document.querySelectorAll(sel),
function(ele){
if(!ele[attr] || (new URL(ele[attr])).host != document.location.host) return;
if(!ele[attr] || (new URL(ele[attr])).host != document.location.host)
return;
var xhr = new XMLHttpRequest()
xhr.open("HEAD", ele[attr])
xhr.send()
xhr.onload = function(){
var tag = JSON.parse(this.getResponseHeader("etag"))
if(tag) urb.wasp(tag)
var dep = this.getResponseHeader("etag")
if(dep) urb.wasp(JSON.parse(dep.substr(2)))
}})}
if(urb.wasp){urb.waspFrom('script','src'); urb.waspFrom('link','href')}
'''
@ -770,7 +772,7 @@
==
~| q.q.cay
=+ ((hard ,[mit=mite rez=octs]) q.q.cay)
=+ dep=(crip (pojo %s (scot %uv p.sih)))
=+ dep=(crip "W/{(pojo %s (scot %uv p.sih))}")
(give-thou 200 ~[etag/dep content-type/(moon mit)] ~ rez)
==
==

View File

@ -993,9 +993,10 @@
(fine cof [%atom %t] u.mas)
%+ cope (fang cof for)
|= [cof=cafe tux=vase]
=+ bob=(slot 6 tux)
?: (~(nest ut p.bob) | p.sam)
(fine cof sam)
=+ typ=p:(slot 6 tux)
=. typ ?+(-.typ typ %face q.typ) :: XX vulcanization
?: (~(nest ut typ) | p.sam)
(fine cof typ q.sam)
?. (slob %grab p.tux)
(flaw cof [%leaf "ford: no grab: {<[for bek]>}"]~)
=+ gab=(slap tux [%cnzy %grab])
@ -1003,9 +1004,9 @@
(flaw cof [%leaf "ford: no noun: {<[for bek]>}"]~)
%+ cope (maul cof (slap gab [%cnzy %noun]) [%noun q.sam])
|= [cof=cafe pro=vase]
?> (~(nest ut p.bob) | p.pro)
?> (~(nest ut typ) | p.pro)
?: =(q.pro q.sam)
(fine cof p.bob q.pro)
(fine cof typ q.pro)
(flaw cof [%leaf "ford: invalid content: {<[for bek]>}"]~)
::
++ lamp :: normalize version
@ -1285,7 +1286,9 @@
(fine cof %& p.kas [%atom %t] '')
%+ cope (fang cof p.kas)
|= [cof=cafe tux=vase]
(fine cof [%& p.kas (slot 6 tux)])
=+ [typ=p val=q]:(slot 6 tux)
=. typ ?+(-.typ typ %face q.typ) :: XX vulcanization
(fine cof [%& p.kas [typ val]])
::
%call
:: %+ cool |.(leaf/"ford: call {<`@p`(mug kas)>}")
@ -1492,17 +1495,17 @@
|= [cof=cafe bax=vase doe=term hon=horn]
^- (bolt vase)
%+ cope (lash cof how (flux (slat doe)))
|= [cof=cafe yep=(map ,@ span)]
=+ ^= poy ^- (list (pair ,@ span))
|= [cof=cafe yep=(map span ,@)]
=+ ^= poy ^- (list (pair span ,@))
%+ sort (~(tap by yep) ~)
|=([a=[@ *] b=[@ *]] (lth -.a -.b))
|=([[* a=@] [* b=@]] (lth a b))
%+ cope
|- ^- (bolt (list (pair ,@ vase)))
?~ poy (flue cof)
%+ cope $(poy t.poy)
|= [cof=cafe nex=(list (pair ,@ vase))]
%+ cope (chap(s.how [q.i.poy s.how]) cof bax hon)
(flux |=(elt=vase [[p.i.poy elt] nex]))
%+ cope (chap(s.how [p.i.poy s.how]) cof bax hon)
(flux |=(elt=vase [[q.i.poy elt] nex]))
%- flux
|= yal=(list (pair ,@ vase)) ^- vase
?~ yal [[%cube 0 [%atom %n]] 0]

View File

@ -1122,6 +1122,7 @@
%deal `%g
%exec `%f
%flog `%d
%drop `%c
%info `%c
%merg `%c
%mont `%c

View File

@ -696,6 +696,7 @@
$(a t.a)
::
++ murn :: maybe transform
~/ %murn
|* [a=(list) b=$+(* (unit))]
|-
?~ a ~
@ -705,6 +706,7 @@
[i=u.c t=$(a t.a)]
::
++ reap :: replicate
~/ %reap
|* [a=@ b=*]
|- ^- (list ,_b)
?~ a ~
@ -727,6 +729,7 @@
$(a t.a, b b(+<+ (b i.a +<+.b)))
::
++ skid :: separate
~/ %skid
|* [a=(list) b=$+(* ?)]
|- ^+ [p=a q=a]
?~ a [~ ~]
@ -6585,6 +6588,7 @@
^- (list term)
?+ typ ~
[%hold *] $(typ ~(repo ut typ))
[%bull *] $(typ ~(repo ut typ))
[%core *]
(turn (~(tap by q.r.q.typ) ~) |=([a=term *] a))
==
@ -10553,7 +10557,7 @@
[@ %sync *] %c
[@ %term *] %d
[@ %http *] %e
[@ %temp *] %t
[@ %behn *] %b
==
::
++ doos :: sleep until

View File

@ -1836,7 +1836,6 @@
++ blob :: fs blob
$% [%delta p=lobe q=[p=mark q=lobe] r=page] :: delta on q
[%direct p=lobe q=page] :: immediate
[%indirect p=lobe q=page r=lobe s=page] :: both
== ::
++ boat (map (pair bone wire) (trel bean ship path)) :: outgoing subs
++ boon :: fort output
@ -1984,7 +1983,15 @@
== ::
++ gilt ,[@tas *] :: presumed gift
++ gens ,[p=lang q=gcos] :: general identity
++ germ ?(%init %fine %that %this %mate %meet %meld) :: merge style
++ germ :: merge style
$? %init :: new desk
%this :: ours with parents
%that :: hers with parents
%fine :: fast forward
%meet :: orthogonal files
%mate :: orthogonal changes
%meld :: force merge
== ::
++ gcos :: id description
$% [%czar ~] :: 8-bit ship
[%duke p=what] :: 32-bit ship
@ -2352,6 +2359,7 @@
== ::
++ kiss-clay :: in request ->$
$% [%boat ~] :: pier rebooted
[%drop p=@p q=@tas] :: cancel pending merge
[%info p=@p q=@tas r=nori] :: internal edit
[%init p=@p] :: report install
[%into p=@tas q=? r=mode] :: external edit

View File

@ -6,6 +6,7 @@
::
:::::::
!:
:- %say
|= [^ arg=[address=@t contact=@t ~] ~]
:- %address-contact
[address.arg contact.arg]

View File

@ -5,6 +5,7 @@
::
:::::
!:
:- %say
|= [^ [arg1=cord arg2=cord ~] ~] ::[arg=[cord cord ~] ~]]
:- %auth
[arg1 arg2]

View File

@ -5,6 +5,7 @@
::
:::::::
!:
:- %say
|= [^ [amount=@t currency=@t ~] ~]
:- %buy
[amount currency]

View File

@ -6,6 +6,7 @@
::
:::::::
!:
:- %say
|= [^ arg=[contact=@t address=@t ~] ~]
:- %bit-contact
[contact.arg address.arg]

View File

@ -5,6 +5,7 @@
::
:::::::
!:
:- %say
|= [^ ~ ~]
:- %list
~

View File

@ -5,6 +5,7 @@
::
:::::::
!:
:- %say
|= [^ [amount=@t currency=@t ~] ~]
:- %sell
[amount currency]

View File

@ -5,6 +5,7 @@
::
:::::::
!:
:- %say
|= [^ [to=@uc amo=@t ~] ~]
[%bit-send to amo]

View File

@ -1,12 +1,13 @@
:: ConCATenate file listings
::
:::: /hoon/cat/cat
:::: /hoon/gen/cat
::
// /%%/ls/subdir
// /%/pretty
!:
::::
::
:- %say
|= [^ [arg=(list path)] ~]
=- tang/(flop `tang`(zing -))
%+ turn arg

View File

@ -5,6 +5,7 @@
::
::::
!:
:- %say
|= [^ [arg=(list cord) ~]]
:- %keys
arg

6
gen/curl.hoon Normal file
View File

@ -0,0 +1,6 @@
/- sole
[sole]
:- %get |= [^ [a=tape ~] ~]
%+ sole-at (scan a auri:epur)
|= hit=httr
(sole-so %httr hit)

View File

@ -5,5 +5,6 @@
::
:::::
!:
:- %say
|= [^ [arg=tape ~] ~]
purl/(scan arg auri:epur)

View File

@ -1,10 +1,11 @@
::
:::: /hoon/hello/cat
:::: /hoon/hello/gen
::
/? 314
::
::::
!:
|= [* [[txt=@tas ~] ~]]
:- %say
|= [^ [[txt=@tas ~] ~]]
:- %noun
(crip (weld "hello, " (flop (trip txt))))

13
gen/hood/autoload.hoon Normal file
View File

@ -0,0 +1,13 @@
::
:::: /hoon/autoload/hood/gen
::
/? 314
::
::::
!:
:- %say
|= $: [now=@da eny=@uvI bec=beak]
[arg=?(~ [? ~]) ~]
==
:- %kiln-autoload
`(unit ,?)`?~(arg ~ `-.arg)

View File

@ -11,6 +11,7 @@
++ scug |*([a=@ b=(pole)] ?~(b ~ ?~(a ~ [-.b $(b +.b, a (dec a))])))
--
[sole .]
:- %ask
|= $: [now=@da eny=@uvI bec=beak]
[arg=_(scug *@ ~[his=*@p tic=*@p]) ~]
==

View File

@ -1,10 +1,11 @@
::
:::: /hoon/reload/hood/cat
:::: /hoon/reload/hood/gen
::
/? 314
::
::::
!:
:- %say
|= $: [now=@da eny=@uvI bec=beak]
[arg=(list term) ~]
==

View File

@ -1 +1,2 @@
:- %say
|=([^ [input=path output=path ~] ~] kiln-cp/[input output])

View File

@ -1 +1,2 @@
:- %say
|=([^ [who=ship mez=$|(~ [a=tape ~])] ~] helm-send-hi/[who ?~(mez ~ `a.mez)])

View File

@ -1,10 +1,11 @@
::
:::: /hoon/reload/hood/cat
:::: /hoon/reload/hood/gen
::
/? 314
::
::::
!:
:- %say
|= $: [now=@da eny=@uvI bec=beak]
[~ ~]
==

View File

@ -1,10 +1,11 @@
::
:::: /hoon/invite/hood/cat
:::: /hoon/invite/hood/gen
::
/? 314
::
::::
!:
:- %say
|= $: [now=@da eny=@uvI bec=beak]
[[who=@p myl=@t ~] ~]
==

View File

@ -1,10 +1,11 @@
::
:::: /hoon/label/hood/cat
:::: /hoon/label/hood/gen
::
/? 314
::
::::
!:
:- %say
|= $: [now=@da eny=@uvI bec=beak]
[arg=[syd=desk lab=@tas ~] ~]
==

View File

@ -1,10 +1,11 @@
::
:::: /hoon/link/hood/cat
:::: /hoon/link/hood/gen
::
/? 314
::
::::
!:
:- %say
|= $: [now=@da eny=@uvI byk=beak]
[arg=$?([dap=term ~] [who=ship dap=term ~]) ~]
==

View File

@ -1,10 +1,11 @@
::
:::: /hoon/mass/hood/cat
:::: /hoon/mass/hood/gen
::
/? 314
::
::::
!:
:- %say
|= $: [now=@da eny=@uvI bec=beak]
[arg=~ ~]
==

View File

@ -1,5 +1,5 @@
::
:::: /hoon/merge/hood/cat
:::: /hoon/merge/hood/gen
::
/? 314
!:
@ -10,6 +10,7 @@
::
::::
!:
:- %say
|= $: [now=@da eny=@uvI bek=beak]
[arg=[?(sorc [syd=$|(desk beaky) sorc])] gem=?(%auto germ)]
==

View File

@ -1,10 +1,11 @@
::
:::: /hoon/mount/hood/cat
:::: /hoon/mount/hood/gen
::
/? 314
::
::::
!:
:- %say
|= $: [now=@da eny=@uvI bec=beak]
[[pax=path pot=$|(~ [v=@tas ~])] ~]
==

View File

@ -1 +1,2 @@
:- %say
|=([^ [input=path output=path ~] ~] kiln-mv/[input output])

View File

@ -1,10 +1,11 @@
::
:::: /hoon/reload/hood/cat
:::: /hoon/reload/hood/gen
::
/? 314
::
::::
!:
:- %say
|= $: [now=@da eny=@uvI bec=beak]
[arg=~ ~]
==

View File

@ -1,10 +1,11 @@
::
:::: /hoon/reload/hood/cat
:::: /hoon/reload/hood/gen
::
/? 314
::
::::
!:
:- %say
|= $: [now=@da eny=@uvI bec=beak]
[arg=~ ~]
==

View File

@ -1,10 +1,11 @@
::
:::: /hoon/reload/hood/cat
:::: /hoon/reload/hood/gen
::
/? 314
::
::::
!:
:- %say
|= $: [now=@da eny=@uvI bec=beak]
[arg=[desk (list term)] ~]
==

View File

@ -1,10 +1,11 @@
::
:::: /hoon/reload/hood/cat
:::: /hoon/reload/hood/gen
::
/? 314
::
::::
!:
:- %say
|= $: [now=@da eny=@uvI bec=beak]
[arg=(list term) ~]
==

View File

@ -1,10 +1,11 @@
::
:::: /hoon/reset/hood/cat
:::: /hoon/reset/hood/gen
::
/? 314
::
::::
!:
:- %say
|= $: [now=@da eny=@uvI bec=beak]
[arg=~ ~]
==

View File

@ -1,10 +1,11 @@
::
:::: /hoon/reload/hood/cat
:::: /hoon/reload/hood/gen
::
/? 314
::
::::
!:
:- %say
|= $: [now=@da eny=@uvI bec=beak]
[arg=~ ~]
==

View File

@ -1 +1,2 @@
:- %say
|=([^ [input=path ~] ~] kiln-rm/input)

View File

@ -1 +1,2 @@
:- %say
|=([^ [where=path tym=@da eve=@t ~] ~] kiln-schedule/[where tym eve])

View File

@ -1,10 +1,11 @@
::
:::: /hoon/start/hood/cat
:::: /hoon/start/hood/gen
::
/? 314
::
::::
!:
:- %say
|= $: [now=@da eny=@uvI bec=beak]
[arg=[@ $|(~ [@ ~])] ~]
==

View File

@ -1,10 +1,11 @@
::
:::: /hoon/sync/hood/cat
:::: /hoon/sync/hood/gen
::
/? 314
::
::::
!:
:- %say
|= $: [now=@da eny=@uvI bec=beak]
[arg=[syd=@tas her=@p sud=@tas ~] ~]
==

View File

@ -1,10 +1,11 @@
::
:::: /hoon/unix/hood/cat
:::: /hoon/unix/hood/gen
::
/? 314
::
::::
!:
:- %say
|= $: [now=@da eny=@uvI bec=beak]
[[syd=@tas syn=?(~ [? ~])] ~]
==

View File

@ -1,10 +1,11 @@
::
:::: /hoon/mount/hood/cat
:::: /hoon/mount/hood/gen
::
/? 314
::
::::
!:
:- %say
|= $: [now=@da eny=@uvI bec=beak]
[[mon=$|(term [span path]) ~] ~]
==

View File

@ -1,10 +1,11 @@
::
:::: /hoon/unsync/hood/cat
:::: /hoon/unsync/hood/gen
::
/? 314
::
::::
!:
:- %say
|= $: [now=@da eny=@uvI bec=beak]
[arg=[syd=@tas her=@p sud=@tas ~] ~]
==

View File

@ -7,6 +7,7 @@
::::
!:
[sole .]
:- %say
|= $: [now=@da eny=@ bec=beak]
[~ ~]
==

View File

@ -1,11 +1,12 @@
:: LiSt directory subnodes
::
:::: /hoon/ls/cat
:::: /hoon/ls/gen
::
// /%/subdir
!:
::::
::
:- %say
|= [^ [arg=path ~] ~]
=+ lon=((hard arch) .^(%cy arg))
tang/[?~(dir.lon leaf/"~" (subdir arg dir.lon))]~

2
gen/make.hoon Normal file
View File

@ -0,0 +1,2 @@
:- %say
|=([^ arg=(list ,@) foo=_`@`1 bar=_`@`2] noun/[arg foo bar])

24
gen/pope.hoon Normal file
View File

@ -0,0 +1,24 @@
:: Input twitter keys
/- sole
!:
=+ cryp=bruw :: XX change to ec2 ++brew eventually
=+ [sole]
:- %ask
|= $: [now=@da eny=@uvI bec=beak]
[[who=ship ~] ~]
==
^- (sole-result (cask tang))
%+ sole-yo leaf/"generating carrier {(scow %p who)} (#{(scow %ud who)})"
%+ sole-lo [%| %pope-pass "passphrase: "]
%+ sole-go (boss 256 (star prn))
|= fra=@t
%+ sole-lo [%| %pope-none "[press enter to compute]"] :: XX oy
%+ sole-go (easy ~)
|= ~
=+ bur=(shax (add who (shax fra)))
~& %computing-fingerprint
=+ arc=(cryp 2.048 bur)
%+ sole-so %tang
:~ leaf/"generator: {(scow %uw bur)}"
leaf/"fingerprint: {(scow %uw fig:ex:arc)}"
==

View File

@ -1,10 +1,11 @@
::
:::: /hoon/solid/cat
:::: /hoon/solid/gen
::
/? 314
::
::::
!:
:- %say
|= $: [now=@da eny=@uvI bec=beak]
[~ ~]
==

View File

@ -1,10 +1,11 @@
::
:::: /hoon/ticket/cat
:::: /hoon/ticket/gen
::
/? 314
::
::::
!:
:- %say
|= $: [now=@da eny=@uvI bec=beak]
[[her=@p ~] ~]
==

View File

@ -1,20 +1,21 @@
:: Tree view recursive directory contents
::
:::: /hoon/tree/cat
:::: /hoon/tree/gen
::
!:
::::
::
:- %say
|= [^ [pax=path fla=$|(~ [%full ~])] ~]
=+ len=(lent pax)
=+ rend=?^(fla dank:ut |=(a=path (dank:ut (slag len a))))
:- %tang %- flop
|- ^- tang
=+ ark=;;(arch .^(cy/pax))
=- ?~ q.ark -
=- ?~ fil.ark -
[(rend pax) -]
%- zing
%+ turn
(sort (~(tap by r.ark)) aor)
(sort (~(tap by dir.ark)) aor)
|= [a=@t ~]
^$(pax (welp pax /[a]))

View File

@ -3,6 +3,7 @@
::::
::
[twitter .]
:- %say
|= $: [now=@da eny=@uvI bec=beak]
[[who=span msg=cord ~] ~]
==

View File

@ -6,6 +6,7 @@
++ baz64 (cook crip (star alp))
--
!:
:- %ask
|= $: [now=@da eny=@uvI bec=beak]
[~ ~]
==

View File

@ -10,6 +10,7 @@
|_ [hide ~]
++ stat ,[id=@u who=@ta now=@da txt=@t]
++ rens
:- %say
|=(stat rose/[": " `~]^~[leaf/"{<now>} @{(trip who)}" leaf/(trip txt)])
++ peer ,_`.
++ poke--args

View File

@ -17,4 +17,3 @@
::
++ print sing
--
=*(down-jet . .)

View File

@ -215,6 +215,9 @@
=< se-abet =< se-view
(se-link gyl)
::
:: ++ poke-exit ::
:: |=(~ se-abet:(se-blit `dill-blit`[%qit ~])) :: XX find bone
:: ::
++ reap-phat ::
|= [way=wire saw=(unit tang)]
=< se-abet =< se-view
@ -841,4 +844,3 @@
?:(=(0 len) ~ [`@c`'*' $(len (dec len))])
--
--
=*(drum . .)

View File

@ -174,4 +174,3 @@
|= [way=wire her=ship cop=coop] =< abet
(emit %flog ~ %text "woot: {<[way cop]>}")
--
=*(helm . .)

View File

@ -29,4 +29,3 @@
[%get headers ~]
[%post headers ~ (tact (pojo p.req-type))]
--
=*(http . .)

View File

@ -10,6 +10,7 @@
++ kiln-pith ::
$: rem=(map desk kiln-desk) ::
syn=(map kiln-sync ,[let=@ud ust=bone]) ::
autoload=? ::
== ::
++ kiln-desk :: per-desk state
$: auto=? :: escalate on failure
@ -54,6 +55,7 @@
=> |% :: arvo structures
++ card ::
$% [%exec wire @p ~ [beak silk]] ::
[%drop wire @p @tas] ::
[%info wire @p @tas nori] ::
[%mont wire @tas @p @tas path] ::
[%ogre wire $|(@tas beam)] ::
@ -64,6 +66,8 @@
++ pear :: poke fruit
$% [%talk-command command:talk] ::
[%kiln-merge kiln-merge] ::
[%helm-reload (list term)] ::
[%helm-reset ~] ::
== ::
++ move (pair bone card) :: user-level move
--
@ -118,6 +122,10 @@
|= kiln-merge
abet:abet:(merge:(work syd) ali sud gim)
::
++ poke-cancel
|= syd=desk
abet:(emit %drop /cancel our syd)
::
++ do-info
|= [mez=tape tor=toro]
abet:(emit:(spam leaf/mez ~) %info /kiln our tor)
@ -148,6 +156,25 @@
=+ old=;;((map ,@da cord) (fall (file where) ~))
(foal where %sched !>((~(put by old) tym eve)))
::
++ poke-autoload
|= lod=(unit ,?)
?^ lod
abet(autoload u.lod)
=< abet(autoload !autoload)
(spam leaf/"turning autoload o{?:(autoload "ff" "n")}" ~)
::
++ poke-start-autoload
|= ~
=< abet
%- emil
%+ turn
`(list term)`~[%ames %behn %clay %dill %eyre %ford %gall %zuse %hoon]
|= syd=term
^- card
:* %warp /kiln/autoload/[syd] [our our] %home ~
%next %y da/now /arvo/[syd]/hoon
==
::
++ take |=(way=wire ?>(?=([@ ~] way) (work i.way))) :: general handler
++ take-mere ::
|= [way=wire are=(each (set path) (pair term tang))]
@ -171,7 +198,7 @@
==
abet:abet:(mere:(auto hos) mes)
::
++ take-writ ::
++ take-writ-sync ::
|= [way=wire rot=riot]
?> ?=([@ @ @ ~] way)
=+ ^- hos=kiln-sync
@ -181,6 +208,24 @@
==
abet:abet:(writ:(auto hos) rot)
::
++ take-writ-autoload
|= [way=wire rot=riot]
?> ?=([@ ~] way)
?> ?=(^ rot)
=+ syd=(slav %tas i.way)
=. +>.$
?. autoload
+>.$
?: ?=(%hoon syd)
(emit %poke /kiln/reload/[syd] [our %hood] %helm-reset ~)
(emit %poke /kiln/reload/[syd] [our %hood] %helm-reload ~[syd])
=. +>.$
%- emit :*
%warp /kiln/autoload/[syd] [our our] %home ~
%next %y da/now /arvo/[syd]/hoon
==
abet
::
++ spam
|= mes=(list tank)
((slog mes) ..spam)
@ -244,7 +289,7 @@
?+ p.p.mes
:* (render "sync failed" sud her syd)
leaf/"please manually merge the desks with"
leaf/":+merge %{(trip syd)} {(scow %p her)} %{(trip sud)}"
leaf/"|merge %{(trip syd)} {(scow %p her)} %{(trip sud)}"
leaf/""
leaf/"error code: {<p.p.mes>}"
q.p.mes
@ -419,7 +464,7 @@
=> =+ :- "%mate merge failed with conflicts,"
"setting up scratch space at %{(trip tic)}"
[tic=tic (spam leaf/-< leaf/-> q.p.are)]
(fancy-merge tic our syd %that)
(fancy-merge tic our syd %init)
==
::
++ tape-to-tanks
@ -457,7 +502,7 @@
"""
done setting up scratch space in {<[-]>}
please resolve the following conflicts and run
:+merge {<syd>} our {<[-]>}
|merge {<syd>} our {<[-]>}
"""
%^ tanks-if-any
"annotated conflicts in:" annotated
@ -483,4 +528,3 @@
==
--
--
=*(kiln . .)

View File

@ -41,4 +41,3 @@
++ voy ": {<~[(icon who)]>} (row/col): " :: print prompt
--
--
=*(oct1 . .)

View File

@ -57,4 +57,3 @@
?.(ept " ({-}'s turn) " ": {-} (row/col): ")::
--
--
=*(oct2 . .)

View File

@ -58,4 +58,3 @@
?.(ept " ({-}'s turn) " ": {-} (row/col): ")::
--
--
=*(oct3 . .)

View File

@ -58,4 +58,3 @@
?.(ept " ({-}'s turn) " ": {-} (row/col): ")::
--
--
=*(oct4 . .)

View File

@ -129,4 +129,3 @@
")"
==
--
=*(react . .)

View File

@ -105,4 +105,3 @@
^- [(list ,_mof) _con]
(ref +<)
--
=*(sh-utils . .)

View File

@ -139,4 +139,3 @@
?> ?=(%ins -.dat)
p.dat
--
=*(sole . .)

View File

@ -34,15 +34,26 @@
=+ work-stuff
|^ |= due=duty ^- tape
?- -.due
%archive =+(due " -{(tr-id id)} {(trip -.due)}.")
%create =+(due " -{(tr-id id.tax)} {(trip -.due)}: {<title.tax>}")
%change =+(due " -{(tr-id id)} {(trip -.due)}: {(tr-meat meat)}")
%archive =+(due " -{(tr-id id)} fin.")
%create =+(due " -{(tr-id id.tax)} new: {<title.tax>}")
%change =+(due " -{(tr-id id)} mod {(tr-meat meat)}")
%update
=+ due
" -{(tr-id id)} {(trip -.due)} {<version>} by {<her>}: {(tr-meat meat.due)}"
" -{(tr-id id)} v{<version>} by {<her>} {(tr-meat meat.due)}"
==
++ tr-id |=(a=@uv (scow %uv (rsh 2 25 a)))
++ tr-term |=(a=term (rash a (star ;~(pose (cold ' ' hep) next))))
++ tr-term
|= a=term
=+ ^- [typ=tape key=tape]
%+ rash a
;~ (glue hep)
(plus aln)
(plus ;~(pose (cold ' ' hep) next))
==
?+ (crip typ) "{typ} {key}"
%set ":{key}"
%add "+{key}"
==
++ tr-meat
|= feh=flesh ^- tape
?- -.feh
@ -52,8 +63,8 @@
%set-tags =+(feh "{(tr-term -.feh)} {<tag>}")
%set-title =+(feh "{(tr-term -.feh)} {<(trip til)>}")
%set-description =+(feh "{(tr-term -.feh)} {<(trip des)>}")
%add-comment =+(feh "{(tr-term -.feh)} {<(trip com)>}")
%add-comment =+ feh
"{(tr-term -.feh)} {(scow %p who)} {<(trip com)>}"
==
--
--
=*(talk . .)

View File

@ -62,4 +62,3 @@
acc(r $(acc r.acc))
--
--
=*(tree . .)

View File

@ -542,4 +542,3 @@
(mold %get /statuses/lookup ,[us ~])
--
--
=*(twitter . .)

View File

@ -7,7 +7,7 @@
|_ ato=@
++ grab |%
++ noun ,@
++ mime |=([^ p=octs] q.p)
++ mime |=([* p=octs] q.p)
--
++ grow |%
++ mime [/aplication/x-urb-unknown (taco ato)]

1
mar/helm/reload.hoon Normal file
View File

@ -0,0 +1 @@
|*(* ~)

View File

@ -3,19 +3,153 @@
::
/? 314
/- talk
/+ talk
!:
=+ talk
|_ gam=(list telegram)
::
++ grab-work-duty => [jo work-stuff]
|^ dute
++ as
:: |*(a=fist (cu sa (ar a))) :: XX types
|* a=fist
%- cu :_ (ar a)
~(gas in *(set ,_(need *a)))
++ ot
|* a=(pole ,[@tas fist])
|= b=json
%. ((^ot a) b)
%- slog
?+ b ~
[%o *]
%+ murn `(list ,[@tas fist])`a
|= [c=term d=fist] ^- (unit tank)
=+ (~(get by p.b) c)
?~ - (some >[c (turn (~(tap by p.b)) head)]<)
=+ (d u)
?~ - (some >[c u]<)
~
==
++ of
|* a=(pole ,[@tas fist])
|= b=json
%. ((of:jo a) b)
%- slog
?+ b ~
[%o *]
%+ murn `(list ,[@tas fist])`a
|= [c=term d=fist] ^- (unit tank)
=+ (~(get by p.b) c)
?~ - ~
=+ (d u)
?~ - (some >[c u]<)
~
==
++ id (ci (slat %uv) so)
++ ship (su fed:ag)
++ dute
%- of :~
create/task change/(ot id/id meat/uppd ~)
archive/id update/(ot id/id version/ni her/(su fed:ag) meat/uppd ~)
==
++ task
%- ot :~
id/id 'date_created'^di
version/ni 'date_modified'^di
creator/ship doer/(mu ship)
tags/(as so) 'date_due'^(mu di)
done/(mu di) title/so
description/so discussion/(ar (ot date/di ship/ship body/so ~))
==
++ audi (as stan)
++ stan (su ;~((glue fas) ;~(pfix sig fed:ag) urs:ab))
++ uppd
%- of :~
set-doer/(mu (su fed:ag))
set-date-due/(mu di)
set-tags/(as so)
set-title/so
set-description/so
set-done/bo
add-comment/(ot ship/(su fed:ag) com/so ~)
==
--
++ grow-work-duty
=> work-stuff
=+ jope=|=(a=ship [%s (rsh 3 1 (scot %p a))])
=+ jove=|=(a=@uvI [%s (scot %uv a)])
=< |= duty
%+ joba +<-
?- +<-
%create (task tax)
%archive (jove id)
%change (jobe id/(jove id) meat/(flesh meat) ~)
%update
%- jobe :~
id/(jove id)
version/(jone version)
her/(jope her)
meat/(flesh meat)
==
==
|%
++ tags
|= a=(set ,@t)
[%a (turn (sort (~(tap in a)) aor) |=(b=cord s/b))]
::
++ task
|= ^task
%- jobe :~ id/[%s (scot %uv id)]
tags/(^tags tags)
doer/?~(doer ~ (jope u.doer))
title/[%s title]
creator/(jope creator)
version/(jone version)
'date_created'^(jode date-created)
'date_modified'^(jode date-modified)
description/[%s description]
=< discussion/[%a (turn discussion .)]
|=(comment (jobe date/(jode date) ship/(jope ship) body/[%s body] ~))
'date_due'^?~(date-due ~ (jode u.date-due))
done/?~(done ~ (jode u.done))
==
++ flesh
|= ^flesh
%+ joba +<-
?- +<-
%set-doer ?~(her ~ (jope u.her))
%set-date-due ?~(wen ~ (jode u.wen))
%set-tags (tags tag)
%set-title [%s til]
%set-description [%s des]
%set-done [%b don]
%add-comment (jobe ship/(jope who) com/[%s com] ~)
==
--
++ grab
|%
++ noun (list telegram)
++ mime |=(^mime (json (rash q.q apex:poja)))
++ json
=> [jo ..telegram]
=> [jo ..telegram dute=grab-work-duty]
|= a=json ^- (list telegram)
=- (need ((ar (ot ship/(su fed:ag) thought/thot ~)) a))
|%
++ of
|* a=(pole ,[@tas fist])
|= b=json
%. ((of:jo a) b)
%- slog
?+ b ~
[%o *]
%+ murn `(list ,[@tas fist])`a
|= [c=term d=fist] ^- (unit tank)
=+ (~(get by p.b) c)
?~ - ~
=+ (d u)
?~ - (some >[c u]<)
~
==
++ op :: parse keys of map
|* [fel=_rule wit=fist]
%+ cu mo
@ -30,14 +164,14 @@
%- cu :_ (ar a)
~(gas in *(set ,_(need *a)))
::
++ lake |*(a=_,* $+(json (unit a)))
++ peach
|* a=_[rule rule]
|= tub=nail
^- (like (each ,_(wonk (-.a)) ,_(wonk (+.a))))
%. tub
;~(pose (stag %& -.a) (stag %| +.a))
++ ke :: callbacks
|* [gar=* sef=_|.(fist)]
|= jon=json
^- (unit ,_gar)
=- ~! gar ~! (need -) -
((sef) jon)
::
++ lake |*(a=_,* $+(json (unit a)))
++ head-rush
|* a=_rule
|* [b=cord c=*]
@ -61,28 +195,34 @@
::
++ parn
^- $+(nail (like partner))
%+ peach
%+ pick
;~((glue fas) ;~(pfix sig fed:ag) urs:ab)
%+ sear (soft passport)
;~((glue fas) sym urs:ab) :: XX [a-z0-9_]{1,15}
::
++ eval
|= a=(trel ,@da bouquet ?(speech [%eval p=@t])) ^- statement
?. ?=(%eval -.r.a) a
=+ pax=[&1:% &2:% (scot %da p.a) |3:%]
=- a(r [%fat tank/- %exp p.r.a])
p:(mule |.([(sell (slap !>(..zuse) (rain pax p.r.a)))]~))
::
++ stam
^- $+(json (unit statement))
%+ cu eval
=- (ot date/di bouquet/(as (ar so)) speech/(of -) ~)
:~ lin/(ot say/bo txt/so ~)
url/(su aurf:urlp)
eval/so
:: exp/(cu |=(a=cord [a ~]) so)
:: inv/(ot ship/(su fed:ag) party/(su urs:ab) ~)
++ stam (ot date/di bouquet/(as (ar so)) speech/spec ~)
++ spec
%+ ke *speech |. ~+
%- of :~
lin/(ot say/bo txt/so ~)
url/(ot txt/(su aurf:urlp) ~)
exp/(ot txt/so ~)
tax/(ot xat/dute ~)
app/(ot txt/so src/so ~)
fat/(ot tor/tors taf/spec ~)
ext/(ot nom/so txe/blob ~)
non/ul
:: inv/(ot ship/(su fed:ag) party/(su urs:ab) ~)
==
++ tors
%+ ke *torso |. ~+
%- of :~
name/(ot nom/so mon/tors ~)
text/(cu lore so)
tank/(ot dat/(cu (hard (list tank)) blob) ~)
==
::
++ blob (cu cue (su fel:ofis))
--
--
::
@ -148,11 +288,13 @@
%+ joba -.a
?+ -.a ~|(stub/-.a !!)
%lin (jobe txt/[%s q.a] say/[%b p.a] ~)
%url (joba txt/[%s (crip (earf p.a))])
%url (joba txt/(jape (earf p.a)))
%exp (joba txt/[%s p.a])
%tax (joba txt/(jape <p.a>))
%tax (jobe txt/(jape (rend-work-duty p.a)) xat/(grow-work-duty p.a) ~)
%app (jobe txt/[%s q.a] src/[%s p.a] ~)
%fat (jobe tor/(tors p.a) taf/$(a q.a) ~)
%ext (jobe nom/[%s p.a] txe/(jape (sifo (jam +.a))) ~)
%non ~
:: %inv (jobe ship/(jope p.a) party/[%s q.a] ~)
==
::
@ -161,7 +303,7 @@
%+ joba -.a
?- -.a
%text [%s (role +.a)]
%tank [%a (turn +.a joke)]
%tank (jobe txt/[%a (turn +.a joke)] dat/(jape (sifo (jam +.a))) ~)
%name (jobe nom/s/p.a mon/$(a q.a) ~)
==
::

11
mar/unlog.hoon Normal file
View File

@ -0,0 +1,11 @@
::
:::: /hoon/core/unlog/mar
::
!:
|_ man=span
++ grab
|% :: convert from
++ noun span :: clam from %noun
--
--

View File

@ -0,0 +1,415 @@
# clay
## high-level
clay is the primary filesystem for the arvo operating system,
which is the core of an urbit. The architecture of clay is
intrinsically connected with arvo, but we assume no knowledge of
either arvo or urbit. We will point out only those features of
arvo that are necessary for an understanding of clay, and we will
do so only when they arise.
The first relevant feature of arvo is that it is a deterministic
system where input and output are defined as a series of events
and effects. The state of arvo is simply a function of its event
log. None of the effects from an event are emitted until the
event is entered in the log and persisted, either to disk or
another trusted source of persistence, such as a Kafka cluster.
Consequently, arvo is a single-level store: everything in its
state is persistent.
In a more traditional OS, everything in RAM can be erased at any
time by power failure, and is always erased on reboot. Thus, a
primary purpose of a filesystem is to ensure files persist across
power failures and reboots. In arvo, both power failures and
reboots are special cases of suspending computation, which is
done safely since our event log is already persistent. Therefore,
clay is not needed in arvo for persistence. Why, then, do we have a
filesystem? There are two answers to this question.
First, clay provides a filesystem tree, which is a convenient
user interface for some applications. Unix has the useful concept
of virtual filesystems, which are used for everything from direct
access to devices, to random number generators, to the /proc
tree. It is easy and intuitive to read from and write to a
filesystem tree.
Second, clay has a distributed revision control system baked into
it. Traditional filesystems are not revision controlled, so
userspace software -- such as git -- is written on top of them to
do so. clay natively provides the same functionality as modern
DVCSes, and more.
clay has two other unique properties that we'll cover later on:
it supports typed data and is referentially transparent.
### Revision Control
Every urbit has one or more "desks", which are independently
revision-controlled branches. Each desk contains its own mark
definitions, apps, doc, and so forth.
Traditionally, an urbit has at least a base and a home desk. The
base desk has all the system software from the distribution. the
home desk is a fork of base with all the stuff specific to the
user of the urbit.
A desk is a series of numbered commits, the most recent of which
represents the current state of the desk. A commit is composed of
(1) an absolute time when it was created, (2) a list of zero or
more parents, and (3) a map from paths to data.
Most commits have exactly one parent, but the initial commit on a
desk may have zero parents, and merge commits have more than one
parent.
The non-meta data is stored in the map of paths to data. It's
worth noting that no constraints are put on this map, so, for
example, both /a/b and /a/b/c could have data. This is impossible
in a traditional Unix filesystem since it means that /a/b is both
a file and a directory. Conventionally, the final element in the
path is its mark -- much like a filename extension in Unix. Thus,
/doc/readme.md in Unix is stored as /doc/readme/md in urbit.
The data is not stored directly in the map; rather, a hash of the
data is stored, and we maintain a master blob store. Thus, if the
same data is referred to in multiple commits (as, for example,
when a file doesn't change between commits), only the hash is
duplicated.
In the master blob store, we either store the data directly, or
else we store a diff against another blob. The hash is dependent
only on the data within and not on whether or not it's stored
directly, so we may on occasion rearrange the contents of the
blob store for performance reasons.
Recall that a desk is a series of numbered commits. Not every
commit in a desk must be numbered. For example, if the base desk
has had 50 commits since home was forked from it, then a merge
from base to home will only add a single revision number to home,
although the full commit history will be accessible by traversing
the parentage of the individual commits.
We do guarantee that the first commit is numbered 1, commits are
numbered consecutively after that (i.e. there are no "holes"),
the topmost commit is always numbered, and every numbered commit
is an ancestor of every later numbered commit.
There are three ways to refer to particular commits in the
revision history. Firstly, one can use the revision number.
Secondly, one can use any absolute time between the one numbered
commit and the next (inclusive of the first, exclusive of the
second). Thirdly, every desk has a map of labels to revision
numbers. These labels may be used to refer to specific commits.
Additionally, clay is a global filesystem, so data on other urbit
is easily accessible the same way as data on our local urbit. In
general, the path to a particular revision of a desk is
/~urbit-name/desk-name/revision. Thus, to get /try/readme/md
from revision 5 of the home desk on ~sampel-sipnym, we refer to
/~sampel-sipnym/home/5/try/readme/md. Clay's namespace is thus
global and referentially transparent.
XXX reactivity here?
### A Typed Filesystem
Since clay is a general filesystem for storing data of arbitrary
types, in order to revision control correctly it needs to be
aware of types all the way through. Traditional revision control
does an excellent job of handling source code, so for source code
we act very similar to traditional revision control. The
challenge is to handle other data similarly well.
For example, modern VCSs generally support "binary files", which
are files for which the standard textual diffing, patching, and
merging algorithms are not helpful. A "diff" of two binary files
is just a pair of the files, "patching" this diff is just
replacing the old file with the new one, and "merging"
non-identical diffs is always a conflict, which can't even be
helpfully annotated. Without knowing anything about the structure
of a blob of data, this is the best we can do.
Often, though, "binary" files have some internal structure, and
it is possible to create diff, patch, and merge algorithms that
take advantage of this structure. An image may be the result of a
base image with some set of operations applied. With algorithms
aware of this set of operations, not only can revision control
software save space by not having to save every revision of the
image individually, these transformations can be made on parallel
branches and merged at will.
Suppose Alice is tasked with touching up a picture, improving the
color balance, adjusting the contrast, and so forth, while Bob
has the job of cropping the picture to fit where it's needed and
adding textual overlay. Without type-aware revision control,
these changes must be made serially, requiring Alice and Bob to
explicitly coordinate their efforts. With type-aware revision
control, these operations may be performed in parallel, and then
the two changesets can be merged programmatically.
Of course, even some kinds of text files may be better served by
diff, patch, and merge algorithms aware of the structure of the
files. Consider a file containing a pretty-printed JSON object.
Small changes in the JSON object may result in rather significant
changes in how the object is pretty-printed (for example, by
addding an indentation level, splitting a single line into
multiple lines).
A text file wrapped at 80 columns also reacts suboptimally with
unadorned Hunt-McIlroy diffs. A single word inserted in a
paragraph may push the final word or two of the line onto the
next line, and the entire rest of the paragraph may be flagged as
a change. Two diffs consisting of a single added word to
different sentences may be flagged as a conflict. In general,
prose should be diffed by sentence, not by line.
As far as the author is aware, clay is the first generalized,
type-aware revision control system. We'll go into the workings
of this system in some detail.
### Marks
Central to a typed filesystem is the idea of types. In clay, we
call these "marks". A mark is a file that defines a type,
conversion routines to and from the mark, and diff, patch, and
merge routines.
For example, a `%txt` mark may be a list of lines of text, and it
may include conversions to `%mime` to allow it to be serialized
and sent to a browswer or to the unix filesystem. It will also
include Hunt-McIlroy diff, patch, and merge algorithms.
A `%json` mark would be defined as a json object in the code, and
it would have a parser to convert from `%txt` and a printer to
convert back to `%txt`. The diff, patch, and merge algorithms are
fairly straightforward for json, though they're very different
from the text ones.
More formally, a mark is a core with three arms, `++grab`,
`++grow`, and `++grad`. In `++grab` is a series of functions to
convert from other marks to the given mark. In `++grow` is a
series of functions to convert from the given mark to other
marks. In `++grad` is `++diff`, `++pact`, `++join`, and `++mash`.
The types are as follows, in an informal pseudocode:
++ grab:
++ mime: <mime> -> <mark-type>
++ txt: <txt> -> <mark-type>
...
++ grow:
++ mime: <mark-type> -> <mime>
++ txt: <mark-type> -> <txt>
...
++ grad
++ diff: (<mark-type>, <mark-type>) -> <diff-type>
++ pact: (<mark-type>, <diff-type>) -> <mark-type>
++ join: (<diff-type>, <diff-type>) -> <diff-type> or NULL
++ mash: (<diff-type>, <diff-type>) -> <diff-type>
These types are basically what you would expect. Not every mark
has each of these functions defined -- all of them are optional
in the general case.
In general, for a particular mark, the `++grab` and `++grow` entries
(if they exist) should be inverses of each other.
In `++grad`, `++diff` takes two instances of a mark and produces
a diff of them. `++pact` takes an instance of a mark and patches
it with the given diff. `++join` takes two diffs and attempts to
merge them into a single diff. If there are conflicts, it
produces null. `++mash` takes two diffs and forces a merge,
annotating any conflicts.
In general, if `++diff` called with A and B produces diff D, then
`++pact` called with A and D should produce B. Also, if `++join`
of two diffs does not produce null, then `++mash` of the same
diffs should produce the same result.
Alternately, instead of `++diff`, `++pact`, `++join`, and
`++mash`, a mark can provide the same functionality by defining
`++sted` to be the name of another mark to which we wish to
delegate the revision control responsibilities. Then, before
running any of those functions, clay will convert to the other
mark, and convert back afterward. For example, the `%hoon` mark
is revision-controlled in the same way as `%txt`, so its `++grad`
is simply `++sted %txt`. Of course, `++txt` must be defined in
`++grow` and `++grab` as well.
Every file in clay has a mark, and that mark must have a
fully-functioning `++grad`. Marks are used for more than just
clay, and other marks don't need a `++grad`, but if a piece of
data is to be saved to clay, we must know how to revision-control
it.
Additionally, if a file is to be synced out to unix, then it must
have conversion routines to and from the `%mime` mark.
##Using clay
### Reading and Subscribing
When reading from Clay, there are three types of requests. A
`%sing` request asks for data at single revsion. A `%next`
request asks to be notified the next time there's a change to
given file. A `%many` request asks to be notified on every
change in a desk for a range of changes.
For `%sing` and `%next`, there are generally three things to be
queried. A `%u` request simply checks for the existence of a
file at a path. A `%x` request gets the data in the file at a
path. A `%y` request gets a hash of the data in the file at the
path combined with all its children and their data. Thus, `%y`
of a node changes if it or any of its children change.
A `%sing` request is fulfilled immediately if possible. If the
requested revision is in the future, or is on another ship for
which we don't have the result cached, we don't respond
immediately. If the requested revision is in the future, we wait
until the revision happens before we respond to the request. If
the request is for data on another ship, we pass on the request
to the other ship. In general, Clay subscriptions, like most
things in Urbit, aren't guaranteed to return immediately.
They'll return when they can, and they'll do so in a
referentially transparent manner.
A `%next` request checks query at the given revision, and it
produces the result of the query the next time it changes, along
with the revsion number when it changes. Thus, a `%next` of a
`%u` is triggered when a file is added or deleted, a `%next` of a
`%x` is triggered when a file is added, deleted, or changed, and
a `%next` of a `%y` is triggered when a file or any of its
children is added, deleted, or changed.
A `%many` request is triggered every time the given desk has a
new revision. Unlike a `%next`, a `%many` has both a start and
an end revsion, after which it stops returning. For `%next`, a
single change is reported, and if the caller wishes to hear of
the next change, it must resubscribe. For `%many`, every revsion
from the start to the end triggers a response. Since a `%many`
request doesn't ask for any particular data, there aren't `%u`,
`%x`, and `%y` versions for it.
### Unix sync
One of the primary functions of clay is as a convenient user
interface. While tools exist to use clay from within urbit, it's
often useful to be able to treat clay like any other filesystem
from the Unix perspective -- to "mount" it, as it were.
From urbit, you can run `|mount /path/to/directory %mount-point`,
and this will mount the given clay directory to the mount-point
directory in Unix. Every file is converted to `%mime` before it's
written to Unix, and converted back when read from Unix. The
entire directory is watched (a la Dropbox), and every change is
auto-committed to clay.
### Merging
Merging is a fundamental operation for a distributed revision
control system. At their root, clay's merges are similar to
git's, but with some additions to accomodate typed data. There
are seven different merge strategies.
Throughout our discussion, we'll say that the merge is from
Alice's desk to Bob's. Recall that a commit is a date (for all
new commits this will be the current date), a list of parents,
and the data itself.
A `%init` merge should be used iff it's the first commit to a
desk. The head of Alice's desk is used as the number 1 commit to
Bob's desk. Obviously, the ancestry remains intact through
traversing the parentage of the commit even though previous
commits are not numbered for Bob's desk.
A `%this` merge means to keep what's in Bob's desk, but join the
ancestry. Thus, the new commit has the head of each desk as
parents, but the data is exactly what's in Bob's desk. For those
following along in git, this is the 'ours' merge strategy, not
the '--ours' option to the 'recursive' merge strategy. In other
words, even if Alice makes a change that does not conflict with
Bob, we throw it away. It's Bob's way or the highway.
A `%that` merge means to take what's in Alice's desk, but join
the ancestry. This is the reverse of `%this`.
A `%fine` merge is a "fast-forward" merge. This succeeds iff one
head is in the ancestry of the other. In this case, we use the
descendant as our new head.
For `%meet`, `%mate`, and `%meld` merges, we first find the most
recent common ancestor to use as our merge base. If we have no
common ancestors, then we fail. If we have more than one most
recent common ancestor, then we have a criss-cross situation,
which should be handled delicately. At present, we delicately
throw up our hands and give up, but something akin to git's
'recursive' strategy should be implemented in the future.
There's a functional inclusion ordering on `%fine`, `%meet`,
`%mate`, and `%meld` such that if an earlier strategy would have
succeeded, then every later strategy will produce the same
result. Put another way, every earlier strategy is the same as
every later strategy except with a restricted domain.
A `%meet` merge only succeeds if the changes from the merge base
to Alice's head (hereafter, "Alice's changes") are in different
files than Bob's changes. In this case, the parents are both
Alice's and Bob's heads, and the data is the merge base plus
Alice's changed files plus Bob's changed files.
A `%mate` merge attempts to merge changes to the same file when
both Alice and bob change it. If the merge is clean, we use it;
otherwise, we fail. A merge between different types of changes --
for example, deleting a file vs changing it -- is always a
conflict. If we succeed, the parents are both Alice's and Bob's
heads, and the data is the merge base plus Alice's changed files
plus Bob's changed files plus the merged files.
A `%meld` merge will succeed even if there are conflicts. If
there are conflicts in a file, then we use the merge base's
version of that file, and we produce a set of files with
conflicts. The parents are both Alice's and Bob's heads, and the
data is the merge base plus Alice's changed files plus Bob's
changed files plus the successfully merged files plus the merge
base's version of the conflicting files.
That's the extent of the merge options in clay proper. In
userspace there's a final option `%auto`, which is the most
common. `%auto` checks to see if Bob's desk exists, and if it
doesn't we use a `%init` merge. Otherwise, we progressively try
`%fine`, `%meet`, and `%mate` until one succeeds.
If none succeed, we merge Bob's desk into a scratch desk. Then,
we merge Alice's desk into the scratch desk with the `%meld`
option to force the merge. For each file in the produced set of
conflicting files, we call the `++mash` function for the
appropriate mark, which annotates the conflicts if we know how.
Finally, we display a message to the user informing them of the
scratch desk's existence, which files have annotated conflicts,
and which files have unannotated conflicts. When the user has
resolved the conflicts, they can merge the scratch desk back into
Bob's desk. This will be a `%fine` merge since Bob's head is in
the ancestry of the scratch desk.
### Autosync
Tracking and staying in sync with another desk is another
fundamental operation. We call this "autosync". This doesn't mean
simply mirroring a desk, since that wouldn't allow local changes.
We simply want to apply changes as they are made upstream, as
long as there are no conflicts with local changes.
This is implemented by watching the other desk, and, when it has
changes, merging these changes into our desk with the usual merge
strategies.
Note that it's quite reasonable for two desks to be autosynced to
each other. This results in any change on one desk being mirrored
to the other and vice versa.
Additionally, it's fine to set up an autosync even if one desk,
the other desk, or both desks do not exist. The sync will be
activated when the upstream desk comes into existence and will
create the downstream desk if needed.

View File

@ -1,8 +1,8 @@
/+ tree
/: /%%/ /%
/^ [tip=marl sub=(map span marl) ~]
/. /; (getall %h1 ~) /elem/
/_ /; (getall %h1 ~) /elem/
/. /; (getall:tree %h1 ~) /elem/
/_ /; (getall:tree %h1 ~) /elem/
==
!:
::::

37
pub/talklog/hymn.hook Normal file
View File

@ -0,0 +1,37 @@
:: Talk log front-end
::
:::: /hook/hymn/talklog/pub
::
/? 310
/= mez
/: /===/talk
/% 2
/; pojo
/; |=(a=(list ,[@ p=json]) =.(a (flop a) ?~(a [%a ~] p.i.a)))
/& /json/
|%
++ cdnj |=(a=tape ;script(src "//cdnjs.cloudflare.com/ajax/libs/{a}");)
--
::
::::
::
^- manx
;html
;head
;title: Talk Log
;* %- turn :_ cdnj ^- wall
:~ "jquery/2.1.1/jquery.min.js"
"lodash.js/2.4.1/lodash.min.js"
"react/0.13.0/react.js"
== ==
;body
;div#cont;
;script(type "text/javascript", src "/~/at/home/lib/urb.js");
;link/"/home/pub/talk/src/css/main.css"(type "text/css", rel "stylesheet");
;script: window.MessageData = {mez}
;script@"src/MessagesComponent.coffee"(type "text/coffeescript");
;script@"src/util.coffee"(type "text/coffeescript");
;script@"src/main.coffee"(type "text/coffeescript");
;+ (cdnj "coffee-script/1.7.1/coffee-script.min.js")
==
==

View File

@ -0,0 +1,82 @@
recl = React.createClass
{div,pre,br,input,textarea,a} = React.DOM
Message = recl
lz: (n) -> if n<10 then "0#{n}" else "#{n}"
convTime: (time) ->
d = new Date time
h = @lz d.getHours()
m = @lz d.getMinutes()
s = @lz d.getSeconds()
"~#{h}.#{m}.#{s}"
render: ->
# pendingClass = if @props.pending isnt "received" then "pending" else ""
delivery = _.uniq _.pluck @props.thought.audience, "delivery"
klass = if delivery.indexOf("received") isnt -1 then " received" else " pending"
speech = @props.thought.statement.speech
attachments = []
while speech.fat?
attachments.push pre {}, speech.fat.tor.tank.txt.join("\n")
speech = speech.fat.taf # XX
if !speech? then return;
if speech.lin?.say is false then klass += " say"
if speech.url then klass += " url"
name = if @props.name then @props.name else ""
aude = _.keys @props.thought.audience
audi = window.util.clipAudi(aude).map (_audi) -> (div {}, _audi.slice(1))
type = ['private','public']
type = type[Number(aude.indexOf(window.util.mainStationPath(window.urb.ship)) is -1)]
mess = switch
when (con = speech.lin) or (con = speech.app) or
(con = speech.exp) or (con = speech.tax)
con.txt
when (con = speech.url)
(a {href:con.txt,target:"_blank"}, con.txt)
else "Unknown speech type:" + (" %"+x for x of speech).join ''
klass += switch
when speech.app? then " say"
when speech.exp? then " exp"
else ""
{ship} = @props
if ship[0] is "~" then ship = ship.slice(1)
div {className:"message#{klass}"}, [
(div {className:"attr"}, [
div {className:"type #{type}"}, ""
div {className:"iden"}, div {className:"ship"}, ship
div {className:"audi"}, audi
div {className:"time"}, @convTime @props.thought.statement.date
])
div {className:"mess"}, mess,
if attachments.length
div {className:"fat"}, attachments
]
window.MessagesComponent = recl
pageSize: 50
paddingTop: 100
getInitialState: -> {station:window.location.pathname.split("/").reverse()[0]}
sortedMessages: (messages) ->
_.sortBy messages, (_message) ->
_message.pending = false
_message.thought.statement.date
render: ->
_messages = @sortedMessages @props.messages
messages = _messages.map (_message,k) =>
if _message.thought.statement.speech?.app
_message.ship = "system"
_message.station = @state.station
React.createElement Message,_message
div {id: "messages"}, messages

View File

@ -0,0 +1,3 @@
$ ->
rend = React.render; rele = React.createElement
rend (rele window.MessagesComponent, messages: window.MessageData), ($ '#cont')[0]

View File

@ -0,0 +1,85 @@
if not window.util then window.util = {}
_.merge window.util,
mainStations: ["court","floor","porch"]
mainStationPath: (user) -> "~#{user}/#{window.util.mainStation(user)}"
mainStation: (user) ->
if not user then user = window.urb.ship
switch user.length
when 3
return "court"
when 6
return "floor"
when 13
return "porch"
clipAudi: (audi) ->
audi = audi.join " "
ms = window.util.mainStationPath window.urb.ship
regx = new RegExp "/#{ms}","g"
audi = audi.replace regx,""
audi.split " "
expandAudi: (audi) ->
audi = audi.join " "
ms = window.util.mainStationPath window.urb.ship
if audi.indexOf(ms) is -1
if audi.length > 0
audi += " "
audi += "#{ms}"
audi.split " "
create: (name) ->
window.talk.StationPersistence.createStation name, (err,res) ->
subscribe: (name) ->
window.talk.StationPersistence.addSource "main",window.urb.ship,["~zod/#{name}"]
uuid32: ->
str = "0v"
str += Math.ceil(Math.random()*8)+"."
for i in [0..5]
_str = Math.ceil(Math.random()*10000000).toString(32)
_str = ("00000"+_str).substr(-5,5)
str += _str+"."
str.slice(0,-1)
populate: (station,number) ->
c = 0
send = ->
if c < number
c++
else
console.log 'done'
return true
_audi = {}
_audi[station] = "pending"
_message =
serial:window.util.uuid32()
audience:_audi
statement:
speech:
say:"Message "+c
time: Date.now()
now: Date.now()
window.talk.MessagePersistence.sendMessage _message,send
send()
getScroll: ->
@writingPosition = $('#c').outerHeight(true)+$('#c').offset().top-$(window).height()
setScroll: ->
window.util.getScroll()
$(window).scrollTop($("#c").height())
isScrolling: ->
if not window.util.writingPosition
window.util.getScroll()
return ($(window).scrollTop()+$('#writing').outerHeight() < window.util.writingPosition)
checkScroll: ->
if window.util.isScrolling()
$('body').addClass 'scrolling'
else
$('body').removeClass 'scrolling'

View File

@ -47,7 +47,7 @@ Links = React.createFactory query {
render: -> div {className:'links'}, @props.children, @_render()
_render: -> div {id:"sibs"}, div {className:"active"}, a {}, @props.curr
CLICK = 'a,h1,h2,h3,h4,h5,h6'
CLICK = 'a' # 'a,h1,h2,h3,h4,h5,h6'
module.exports = query {sein:'t',path:'t',name:'t',next:'t',prev:'t'},recl
displayName: "Anchor"
getInitialState: -> url: window.location.pathname

View File

@ -34,10 +34,10 @@ module.exports = (queries, Child, load=_load)-> recl
filterWith: (have,_queries)->
return _queries unless have?
request = {}
for k of _queries
for k of _queries when k isnt 'kids'
request[k] = _queries[k] unless have[k] isnt undefined
if _queries.kids? and have.kids?
if _.isEmpty have.kids
if _queries.kids?
if not have.kids?
request.kids = _queries.kids
else
request.kids = {}

View File

@ -7,4 +7,5 @@ module.exports =
list: require './ListComponent.coffee'
kids: require './KidsComponent.coffee'
toc: require './TocComponent.coffee'
email: require './EmailComponent.coffee'
lost: recl render: -> (div {}, "<lost(", @props.children, ")>")

View File

@ -0,0 +1,45 @@
reactify = require './Reactify.coffee'
recl = React.createClass
{div,p,button,input} = React.DOM
module.exports = recl
displayName: "email"
getInitialState: -> {submit:false,email:""}
onClick: -> @submit()
onKeyUp: (e) ->
email = @$email.val()
valid = (email.indexOf('@') != -1 &&
email.indexOf('.') != -1 &&
email.length > 7 &&
email.split(".")[1].length > 1 &&
email.split("@")[0].length > 0 &&
email.split("@")[1].length > 4)
@$email.toggleClass 'valid',valid
@$email.removeClass 'error'
if e.keyCode is 13
if valid is true
@submit()
e.stopPropagation()
e.preventDefault()
return false
else
@$email.addClass 'error'
submit: ->
$.post @props.dataPath,{email:@$email.val()},() =>
@setState {submit:true}
componentDidMount: -> @$email = $('input.email')
render: ->
if @state.submit is false
cont = [
(input {key:"field",className:"email",placeholder:"your@email.com",@onKeyUp}, @state.email)
(button {key:"submit",className:"submit",@onClick}, "Submit")
]
else
cont = [(div {className:"submitted"},"Got it. Thanks!")]
(p {className:"email"}, cont)

View File

@ -9,29 +9,26 @@ module.exports = query {body:'r'}, recl
hash:null
displayName: "TableOfContents"
_click: (e) ->
document.location.hash = @urlsafe $(e.target).text()
urlsafe: (str) ->
str.toLowerCase().replace(/\ /g, "-").replace(/[^a-z0-9~_.-]/g,"")
_click: (id)->
-> if id then document.location.hash = id
componentDidMount: ->
@int = setInterval @checkHash,100
@st = $(window).scrollTop()
$(window).on 'scroll',@checkScroll
@$headers = $('#toc h1, #toc h2, #toc h3, #toc h4')
# $(window).on 'scroll',@checkScroll
@$headers = $('#toc').children('h1,h2,h3,h4').filter('[id]')
checkScroll: ->
st = $(window).scrollTop()
if Math.abs(@st-st) > 10
hash = null
@st = st
for k,v of @$headers
for v in @$headers
continue if v.tagName is undefined
$h = $ v
hst = $h.offset().top-$h.outerHeight(true)+10
if hst < st
hash = @urlsafe $h.text()
hash = $h.attr('id')
if hst > st and hash isnt @hash and hash isnt null
@hash = "#"+hash
document.location.hash = hash
@ -40,9 +37,9 @@ module.exports = query {body:'r'}, recl
checkHash: ->
if document.location.hash?.length > 0 and document.location.hash isnt @hash
hash = document.location.hash.slice(1)
for k,v of @$headers
for v in @$headers
$h = $ v
if hash is @urlsafe $h.text()
if hash is $h.attr('id')
@hash = document.location.hash
offset = $h.offset().top - $h.outerHeight(true)
setTimeout -> $(window).scrollTop offset
@ -52,20 +49,23 @@ module.exports = query {body:'r'}, recl
componentWillUnmount: ->
clearInterval @int
collectHeaders: (e) ->
hs = [{gn:"h1", ga:{className:"t"}, c:["Table of contents"]}]
for k,v of e
if not v.gn then continue
if v.gn[0] is 'h' and parseInt(v.gn[1]) isnt NaN
hs.push v
return hs
collectHeader: ({gn,ga,c})->
if gn and gn[0] is 'h' and parseInt(gn[1]) isnt NaN
ga = _.clone ga
ga.onClick = @_click ga.id
delete ga.id
{gn,ga,c}
parseHeaders: ->
if @props.body.c
for k,v of @props.body.c
for v in @props.body.c
if v.gn is 'div' and v.ga?.id is "toc"
return {gn:"div", ga:{className:"toc",onClick:@_click}, c:@collectHeaders(v.c)}
return {
gn:"div"
ga:{className:"toc"}
c:[
{gn:"h1", ga:{className:"t"}, c:["Table of contents"]}
(_.filter v.c.map @collectHeader)...
]}
render: -> reactify @parseHeaders()

View File

@ -35,7 +35,8 @@ module.exports = {
};
},{"../dispatcher/Dispatcher.coffee":13,"../persistence/TreePersistence.coffee":19}],2:[function(require,module,exports){
},{"../dispatcher/Dispatcher.coffee":14,"../persistence/TreePersistence.coffee":20}],2:[function(require,module,exports){
var BodyComponent, CLICK, Links, TreeActions, TreeStore, a, clas, div, query, reactify, recl, ref;
clas = require('classnames');
@ -141,7 +142,7 @@ Links = React.createFactory(query({
}
})));
CLICK = 'a,h1,h2,h3,h4,h5,h6';
CLICK = 'a';
module.exports = query({
sein: 't',
@ -288,7 +289,8 @@ module.exports = query({
}));
},{"../actions/TreeActions.coffee":1,"../stores/TreeStore.coffee":20,"./Async.coffee":3,"./BodyComponent.coffee":4,"./Reactify.coffee":10,"classnames":15}],3:[function(require,module,exports){
},{"../actions/TreeActions.coffee":1,"../stores/TreeStore.coffee":21,"./Async.coffee":3,"./BodyComponent.coffee":4,"./Reactify.coffee":11,"classnames":16}],3:[function(require,module,exports){
var TreeActions, TreeStore, _load, code, div, recl, ref, span;
_load = require('./LoadComponent.coffee');
@ -348,12 +350,14 @@ module.exports = function(queries, Child, load) {
}
request = {};
for (k in _queries) {
if (have[k] === void 0) {
request[k] = _queries[k];
if (k !== 'kids') {
if (have[k] === void 0) {
request[k] = _queries[k];
}
}
}
if ((_queries.kids != null) && (have.kids != null)) {
if (_.isEmpty(have.kids)) {
if (_queries.kids != null) {
if (have.kids == null) {
request.kids = _queries.kids;
} else {
request.kids = {};
@ -389,7 +393,8 @@ module.exports = function(queries, Child, load) {
};
},{"../actions/TreeActions.coffee":1,"../stores/TreeStore.coffee":20,"./LoadComponent.coffee":9}],4:[function(require,module,exports){
},{"../actions/TreeActions.coffee":1,"../stores/TreeStore.coffee":21,"./LoadComponent.coffee":10}],4:[function(require,module,exports){
var div, query, reactify, recl;
query = require('./Async.coffee');
@ -414,7 +419,8 @@ module.exports = query({
}));
},{"./Async.coffee":3,"./Reactify.coffee":10}],5:[function(require,module,exports){
},{"./Async.coffee":3,"./Reactify.coffee":11}],5:[function(require,module,exports){
var div, recl, ref, textarea;
recl = React.createClass;
@ -437,6 +443,7 @@ module.exports = recl({
});
},{}],6:[function(require,module,exports){
var div, recl;
@ -450,6 +457,7 @@ module.exports = {
list: require('./ListComponent.coffee'),
kids: require('./KidsComponent.coffee'),
toc: require('./TocComponent.coffee'),
email: require('./EmailComponent.coffee'),
lost: recl({
render: function() {
return div({}, "<lost(", this.props.children, ")>");
@ -458,7 +466,89 @@ module.exports = {
};
},{"./CodeMirror.coffee":5,"./KidsComponent.coffee":7,"./ListComponent.coffee":8,"./SearchComponent.coffee":11,"./TocComponent.coffee":12}],7:[function(require,module,exports){
},{"./CodeMirror.coffee":5,"./EmailComponent.coffee":7,"./KidsComponent.coffee":8,"./ListComponent.coffee":9,"./SearchComponent.coffee":12,"./TocComponent.coffee":13}],7:[function(require,module,exports){
var button, div, input, p, reactify, recl, ref;
reactify = require('./Reactify.coffee');
recl = React.createClass;
ref = React.DOM, div = ref.div, p = ref.p, button = ref.button, input = ref.input;
module.exports = recl({
displayName: "email",
getInitialState: function() {
return {
submit: false,
email: ""
};
},
onClick: function() {
return this.submit();
},
onKeyUp: function(e) {
var email, valid;
email = this.$email.val();
valid = email.indexOf('@') !== -1 && email.indexOf('.') !== -1 && email.length > 7 && email.split(".")[1].length > 1 && email.split("@")[0].length > 0 && email.split("@")[1].length > 4;
this.$email.toggleClass('valid', valid);
this.$email.removeClass('error');
if (e.keyCode === 13) {
if (valid === true) {
this.submit();
e.stopPropagation();
e.preventDefault();
return false;
} else {
return this.$email.addClass('error');
}
}
},
submit: function() {
return $.post(this.props.dataPath, {
email: this.$email.val()
}, (function(_this) {
return function() {
return _this.setState({
submit: true
});
};
})(this));
},
componentDidMount: function() {
return this.$email = $('input.email');
},
render: function() {
var cont;
if (this.state.submit === false) {
cont = [
input({
key: "field",
className: "email",
placeholder: "your@email.com",
onKeyUp: this.onKeyUp
}, this.state.email), button({
key: "submit",
className: "submit",
onClick: this.onClick
}, "Submit")
];
} else {
cont = [
div({
className: "submitted"
}, "Got it. Thanks!")
];
}
return p({
className: "email"
}, cont);
}
});
},{"./Reactify.coffee":11}],8:[function(require,module,exports){
var a, div, hr, li, query, reactify, recl, ref, ul;
reactify = require('./Reactify.coffee');
@ -497,7 +587,8 @@ module.exports = query({
}));
},{"./Async.coffee":3,"./Reactify.coffee":10}],8:[function(require,module,exports){
},{"./Async.coffee":3,"./Reactify.coffee":11}],9:[function(require,module,exports){
var a, clas, div, h1, li, query, reactify, recl, ref, ul;
clas = require('classnames');
@ -592,7 +683,8 @@ module.exports = query({
}));
},{"./Async.coffee":3,"./Reactify.coffee":10,"classnames":15}],9:[function(require,module,exports){
},{"./Async.coffee":3,"./Reactify.coffee":11,"classnames":16}],10:[function(require,module,exports){
var div, input, recl, ref, textarea;
recl = React.createClass;
@ -632,7 +724,8 @@ module.exports = recl({
});
},{}],10:[function(require,module,exports){
},{}],11:[function(require,module,exports){
var Virtual, div, load, reactify, recl, ref, rele, span, walk;
recl = React.createClass;
@ -699,7 +792,8 @@ module.exports = _.extend(reactify, {
});
},{"./LoadComponent.coffee":9}],11:[function(require,module,exports){
},{"./LoadComponent.coffee":10}],12:[function(require,module,exports){
var a, div, input, query, reactify, recl, ref,
slice = [].slice;
@ -837,8 +931,10 @@ module.exports = query({
}));
},{"./Async.coffee":3,"./Reactify.coffee":10}],12:[function(require,module,exports){
var div, query, reactify, recl;
},{"./Async.coffee":3,"./Reactify.coffee":11}],13:[function(require,module,exports){
var div, query, reactify, recl,
slice = [].slice;
query = require('./Async.coffee');
@ -853,35 +949,35 @@ module.exports = query({
}, recl({
hash: null,
displayName: "TableOfContents",
_click: function(e) {
return document.location.hash = this.urlsafe($(e.target).text());
},
urlsafe: function(str) {
return str.toLowerCase().replace(/\ /g, "-").replace(/[^a-z0-9~_.-]/g, "");
_click: function(id) {
return function() {
if (id) {
return document.location.hash = id;
}
};
},
componentDidMount: function() {
this.int = setInterval(this.checkHash, 100);
this.st = $(window).scrollTop();
$(window).on('scroll', this.checkScroll);
return this.$headers = $('#toc h1, #toc h2, #toc h3, #toc h4');
return this.$headers = $('#toc').children('h1,h2,h3,h4').filter('[id]');
},
checkScroll: function() {
var $h, hash, hst, k, ref, results, st, v;
var $h, hash, hst, i, len, ref, results, st, v;
st = $(window).scrollTop();
if (Math.abs(this.st - st) > 10) {
hash = null;
this.st = st;
ref = this.$headers;
results = [];
for (k in ref) {
v = ref[k];
for (i = 0, len = ref.length; i < len; i++) {
v = ref[i];
if (v.tagName === void 0) {
continue;
}
$h = $(v);
hst = $h.offset().top - $h.outerHeight(true) + 10;
if (hst < st) {
hash = this.urlsafe($h.text());
hash = $h.attr('id');
}
if (hst > st && hash !== this.hash && hash !== null) {
this.hash = "#" + hash;
@ -895,15 +991,15 @@ module.exports = query({
}
},
checkHash: function() {
var $h, hash, k, offset, ref, ref1, results, v;
var $h, hash, i, len, offset, ref, ref1, results, v;
if (((ref = document.location.hash) != null ? ref.length : void 0) > 0 && document.location.hash !== this.hash) {
hash = document.location.hash.slice(1);
ref1 = this.$headers;
results = [];
for (k in ref1) {
v = ref1[k];
for (i = 0, len = ref1.length; i < len; i++) {
v = ref1[i];
$h = $(v);
if (hash === this.urlsafe($h.text())) {
if (hash === $h.attr('id')) {
this.hash = document.location.hash;
offset = $h.offset().top - $h.outerHeight(true);
setTimeout(function() {
@ -920,42 +1016,39 @@ module.exports = query({
componentWillUnmount: function() {
return clearInterval(this.int);
},
collectHeaders: function(e) {
var hs, k, v;
hs = [
{
gn: "h1",
ga: {
className: "t"
},
c: ["Table of contents"]
}
];
for (k in e) {
v = e[k];
if (!v.gn) {
continue;
}
if (v.gn[0] === 'h' && parseInt(v.gn[1]) !== NaN) {
hs.push(v);
}
collectHeader: function(arg) {
var c, ga, gn;
gn = arg.gn, ga = arg.ga, c = arg.c;
if (gn && gn[0] === 'h' && parseInt(gn[1]) !== NaN) {
ga = _.clone(ga);
ga.onClick = this._click(ga.id);
delete ga.id;
return {
gn: gn,
ga: ga,
c: c
};
}
return hs;
},
parseHeaders: function() {
var k, ref, ref1, v;
var i, len, ref, ref1, v;
if (this.props.body.c) {
ref = this.props.body.c;
for (k in ref) {
v = ref[k];
for (i = 0, len = ref.length; i < len; i++) {
v = ref[i];
if (v.gn === 'div' && ((ref1 = v.ga) != null ? ref1.id : void 0) === "toc") {
return {
gn: "div",
ga: {
className: "toc",
onClick: this._click
className: "toc"
},
c: this.collectHeaders(v.c)
c: [{
gn: "h1",
ga: {
className: "t"
},
c: ["Table of contents"]
}].concat(slice.call(_.filter(v.c.map(this.collectHeader))))
};
}
}
@ -967,7 +1060,8 @@ module.exports = query({
}));
},{"./Async.coffee":3,"./Reactify.coffee":10}],13:[function(require,module,exports){
},{"./Async.coffee":3,"./Reactify.coffee":11}],14:[function(require,module,exports){
var Dispatcher;
Dispatcher = require('flux').Dispatcher;
@ -988,7 +1082,8 @@ module.exports = _.extend(new Dispatcher(), {
});
},{"flux":16}],14:[function(require,module,exports){
},{"flux":17}],15:[function(require,module,exports){
var rend;
rend = React.render;
@ -1134,7 +1229,8 @@ $(function() {
});
},{"./actions/TreeActions.coffee":1,"./components/AnchorComponent.coffee":2,"./components/BodyComponent.coffee":4,"./components/Components.coffee":6,"./persistence/TreePersistence.coffee":19}],15:[function(require,module,exports){
},{"./actions/TreeActions.coffee":1,"./components/AnchorComponent.coffee":2,"./components/BodyComponent.coffee":4,"./components/Components.coffee":6,"./persistence/TreePersistence.coffee":20}],16:[function(require,module,exports){
/*!
Copyright (c) 2015 Jed Watson.
Licensed under the MIT License (MIT), see
@ -1185,7 +1281,7 @@ $(function() {
}());
},{}],16:[function(require,module,exports){
},{}],17:[function(require,module,exports){
/**
* Copyright (c) 2014-2015, Facebook, Inc.
* All rights reserved.
@ -1197,7 +1293,7 @@ $(function() {
module.exports.Dispatcher = require('./lib/Dispatcher')
},{"./lib/Dispatcher":17}],17:[function(require,module,exports){
},{"./lib/Dispatcher":18}],18:[function(require,module,exports){
/*
* Copyright (c) 2014, Facebook, Inc.
* All rights reserved.
@ -1449,7 +1545,7 @@ var _prefix = 'ID_';
module.exports = Dispatcher;
},{"./invariant":18}],18:[function(require,module,exports){
},{"./invariant":19}],19:[function(require,module,exports){
/**
* Copyright (c) 2014, Facebook, Inc.
* All rights reserved.
@ -1504,7 +1600,11 @@ var invariant = function(condition, format, a, b, c, d, e, f) {
module.exports = invariant;
},{}],19:[function(require,module,exports){
},{}],20:[function(require,module,exports){
var dedup;
dedup = {};
module.exports = {
get: function(path, query, cb) {
var url;
@ -1512,6 +1612,10 @@ module.exports = {
query = "no-query";
}
url = (window.tree.basepath(path)) + ".json?q=" + (this.encode(query));
if (dedup[url]) {
return;
}
dedup[url] = true;
return $.get(url, {}, function(data) {
if (cb) {
return cb(null, data);
@ -1521,7 +1625,7 @@ module.exports = {
encode: function(obj) {
var _encode, delim;
delim = function(n) {
return ('_'.repeat(n)) || '.';
return Array(n + 1).join('_') || '.';
};
_encode = function(obj) {
var _dep, dep, k, res, sub, v;
@ -1554,7 +1658,8 @@ module.exports = {
};
},{}],20:[function(require,module,exports){
},{}],21:[function(require,module,exports){
var EventEmitter, MessageDispatcher, QUERIES, TreeStore, _curr, _data, _tree, clog;
EventEmitter = require('events').EventEmitter;
@ -1594,23 +1699,32 @@ TreeStore = _.extend(EventEmitter.prototype, {
return this.fulfillAt(this.getTree(path.split('/')), path, query);
},
fulfillAt: function(tree, path, query) {
var data, k, ref, sub, t;
var data, have, k, sub, t;
data = this.fulfillLocal(path, query);
for (k in query) {
t = query[k];
if (!QUERIES[k]) {
continue;
have = _data[path];
if (have != null) {
for (k in query) {
t = query[k];
if (!QUERIES[k]) {
continue;
}
if (t !== QUERIES[k]) {
throw TypeError("Wrong query type: " + k + ", '" + t + "'");
}
data[k] = have[k];
}
if (t !== QUERIES[k]) {
throw TypeError("Wrong query type: " + k + ", '" + t + "'");
}
data[k] = (ref = _data[path]) != null ? ref[k] : void 0;
}
if (query.kids) {
data.kids = {};
for (k in tree) {
sub = tree[k];
data.kids[k] = this.fulfillAt(sub, path + "/" + k, query.kids);
if (query.kids) {
if (have.EMPTY) {
data.kids = {};
} else {
for (k in tree) {
sub = tree[k];
if (data.kids == null) {
data.kids = {};
}
data.kids[k] = this.fulfillAt(sub, path + "/" + k, query.kids);
}
}
}
}
if (!_.isEmpty(data)) {
@ -1663,6 +1777,7 @@ TreeStore = _.extend(EventEmitter.prototype, {
this.loadValues(tree[k], path + "/" + k, v);
}
if (data.kids && _.isEmpty(data.kids)) {
old.EMPTY = true;
old.body = {
gn: 'div',
c: [
@ -1789,7 +1904,8 @@ TreeStore.dispatchToken = MessageDispatcher.register(function(payload) {
module.exports = TreeStore;
},{"../dispatcher/Dispatcher.coffee":13,"events":21}],21:[function(require,module,exports){
},{"../dispatcher/Dispatcher.coffee":14,"events":22}],22:[function(require,module,exports){
// Copyright Joyent, Inc. and other Node contributors.
//
// Permission is hereby granted, free of charge, to any person obtaining a
@ -2092,4 +2208,4 @@ function isUndefined(arg) {
return arg === void 0;
}
},{}]},{},[14]);
},{}]},{},[15]);

View File

@ -1,9 +1,12 @@
dedup = {} # XX wrong layer
module.exports =
get: (path,query="no-query",cb) ->
url = "#{window.tree.basepath(path)}.json?q=#{@encode query}"
return if dedup[url]
dedup[url] = true
$.get url, {}, (data) -> if cb then cb null,data
encode: (obj)->
delim = (n)-> ('_'.repeat n) || '.'
delim = (n)-> Array(n+1).join('_') || '.'
_encode = (obj)->
if typeof obj isnt 'object'
return [0,obj]

View File

@ -21,13 +21,17 @@ TreeStore = _.extend EventEmitter.prototype, {
fulfill: (path,query) -> @fulfillAt (@getTree path.split '/'),path,query
fulfillAt: (tree,path,query)->
data = @fulfillLocal path, query
for k,t of query when QUERIES[k]
if t isnt QUERIES[k] then throw TypeError "Wrong query type: #{k}, '#{t}'"
data[k] = _data[path]?[k]
if query.kids
data.kids = {}
for k,sub of tree
data.kids[k] = @fulfillAt sub, path+"/"+k, query.kids
have = _data[path]
if have?
for k,t of query when QUERIES[k]
if t isnt QUERIES[k] then throw TypeError "Wrong query type: #{k}, '#{t}'"
data[k] = have[k]
if query.kids
if have.EMPTY
data.kids = {}
else for k,sub of tree
data.kids ?= {}
data.kids[k] = @fulfillAt sub, path+"/"+k, query.kids
data unless _.isEmpty data
fulfillLocal: (path, query)->
@ -54,6 +58,7 @@ TreeStore = _.extend EventEmitter.prototype, {
@loadValues tree[k], path+"/"+k, v
if data.kids && _.isEmpty data.kids
old.EMPTY = true
old.body =
gn: 'div'
c: [ {gn:'h1', ga:{className:'error'}, c:['Error: Empty path']}

View File

@ -13,44 +13,6 @@
haw=@uvH :: source hash
ted=sole-edit :: state change
== ::
++ sole-dialog :: standard dialog
|* out=$+(* *) :: output structure
$+(sole-input (sole-result out)) :: output function
:: ::
++ sole-result :: conditional result
|* out=$+(* *) :: output structure
$|(@ud (sole-product out)) :: error position
:: ::
++ sole-product :: success result
|* out=$+(* *) ::
%+ pair (list tank) ::
%+ each (unit out) :: ~ is abort
(pair sole-prompt (sole-dialog out)) :: ask and continue
:: ::
++ sole-so :: construct result
|* pro=* ::
[p=*(list tank) q=[%& p=[~ u=pro]]] ::
:: ::
++ sole-yo :: add output tank
|* [tan=tank res=(sole-result)] ::
?@ res res ::
[p=[i=tan t=p.res] q=q.res] ::
:: ::
++ sole-lo :: construct prompt
|* [pom=sole-prompt mor=(sole-dialog)] ::
[p=*(list tank) q=[%| p=pom q=mor]] ::
:: ::
++ sole-no :: empty result
[p=*(list tank) q=~] ::
:: ::
++ sole-go :: parse by rule
|* [sef=_rule fun=$+(* *)] ::
|= txt=sole-input ::
=+ vex=(sef [0 0] txt) ::
?: |(!=((lent txt) q.p.vex) ?=(~ q.vex)) ::
q.p.vex ::
(fun p.u.q.vex) ::
:: ::
++ sole-clock ,[own=@ud his=@ud] :: vector clock
++ sole-edit :: shared state change
$% [%del p=@ud] :: delete one at
@ -83,10 +45,69 @@
tag=term :: history mode
cad=tape :: caption
== ::
++ sole-input tape :: prompt input
++ sole-share :: symmetric state
$: ven=sole-clock :: our vector clock
leg=(list sole-edit) :: unmerged edits
buf=sole-buffer :: sole state
== ::
:: ::
:: ::
++ sole-dialog :: standard dialog
|* out=$+(* *) :: output structure
$+(sole-input (sole-result out)) :: output function
:: ::
++ sole-input tape :: prompt input
++ sole-result :: conditional result
|* out=$+(* *) :: output structure
$|(@ud (sole-product out)) :: error position
:: ::
++ sole-product :: success result
|* out=$+(* *) ::
%+ pair (list tank) ::
%+ each (unit out) :: ~ is abort
(pair sole-prompt (sole-dialog out)) :: ask and continue
:: ::
++ sole-request :: scraper result
|* out=$+(* *) :: output structure
%+ pair (list tank) ::
%+ each (unit out) :: ~ is abort
(pair hiss $+(httr (sole-request out))) :: fetch and continue
:: ::
++ sole-gen :: XX virtual type
$% [%say $+((sole-args) (cask))] :: direct noun
[%ask $+((sole-args) (sole-product (cask)))] :: dialog
[%get $+((sole-args) (sole-request (cask)))] :: scraper
== ::
++ sole-args :: generator arguments
|* _[lit=,* opt=,*] ::
,[[now=@da eny=@uvI bek=beak] [lit opt]] ::
:: ::
:: ::
++ sole-so :: construct result
|* pro=* ::
[p=*(list tank) q=[%& p=[~ u=pro]]] ::
:: ::
++ sole-yo :: add output tank
|* [tan=tank res=(sole-result)] ::
?@ res res ::
[p=[i=tan t=p.res] q=q.res] ::
:: ::
++ sole-lo :: construct prompt
|* [pom=sole-prompt mor=(sole-dialog)] ::
[p=*(list tank) q=[%| p=pom q=mor]] ::
:: ::
++ sole-at :: fetch url
|* [pul=_purl fun=$+(httr *)] ::
[p=*(list tank) q=[%| p=[pul %get ~ ~] q=fun]] ::
:: ::
++ sole-no :: empty result
[p=*(list tank) q=~] ::
:: ::
++ sole-go :: parse by rule
|* [sef=_rule fun=$+(* *)] ::
|= txt=sole-input ::
=+ vex=(sef [0 0] txt) ::
?: |(!=((lent txt) q.p.vex) ?=(~ q.vex)) ::
q.p.vex ::
(fun p.u.q.vex) ::
--

View File

@ -93,7 +93,7 @@
[%set-title til=@t] :: set title
[%set-description des=@t] :: XX (list ,@t) :: set description
[%set-done don=?] :: set done
[%add-comment com=@t] :: XX (list ,@t) :: add comment
[%add-comment who=@p com=@t] :: XX (list ,@t) :: add comment
== ::
++ task ::
$: id=@uvH ::