Merge branch 'master' into mp/chat/firefox-take-2

This commit is contained in:
Matilde Park 2020-04-13 21:38:27 -04:00
commit a4d37c0313
22 changed files with 2307 additions and 341 deletions

View File

@ -1,3 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:9701b98374a28ae99961871d4204856720e9ee6c3f769c389e6faf38392b95e7
size 12715787
oid sha256:8ebc16b420b2ce8e8bf7e6b0c611a102355f9ec013475da3cf66cc078f35e4b3
size 12458781

View File

@ -399,56 +399,31 @@
^- (list card)
?> ?=(^ pax)
=/ last (dec (lent pax))
=/ backlog-start=(unit @ud)
%+ rush
(snag last `(list @ta)`pax)
dem:ag
=/ backlog-latest=(unit @ud) (rush (snag last `(list @ta)`pax) dem:ag)
=/ pas `path`(oust [last 1] `(list @ta)`pax)
?> ?=([* ^] pas)
?> (~(has by synced) pas)
:: check if read is permitted
?> (is-permitted src.bol pas)
=/ envs envelopes:(need (chat-scry pas))
=/ length (lent envs)
=/ latest
?~ backlog-latest length
?: (gth u.backlog-latest length) length
(sub length u.backlog-latest)
=. envs (scag latest envs)
=/ =vase !>([%messages pas 0 latest envs])
%- zing
:~ [%give %fact ~ %chat-update !>([%create pas])]~
?. ?&(?=(^ backlog-start) (~(has by allow-history) pas)) ~
(paginate-messages pas (need (chat-scry pas)) u.backlog-start)
?. ?&(?=(^ backlog-latest) (~(has by allow-history) pas)) ~
[%give %fact ~ %chat-update vase]~
[%give %kick [%backlog pax]~ `src.bol]~
==
::
++ paginate-messages
|= [=path =mailbox start=@ud]
^- (list card)
=/ cards=(list card) ~
=/ end (lent envelopes.mailbox)
?: |((gte start end) =(end 0))
cards
=. envelopes.mailbox (slag start `(list envelope)`envelopes.mailbox)
|- ^- (list card)
?~ envelopes.mailbox
cards
?: (lte end 5.000)
=. cards
%+ snoc cards
%- messages-fact
[path start (lent envelopes.mailbox) envelopes.mailbox]
$(envelopes.mailbox ~)
=. cards
%+ snoc cards
%- messages-fact
:^ path start
(add start 5.000)
(scag 5.000 `(list envelope)`envelopes.mailbox)
=: start (add start 5.000)
end (sub end 5.000)
==
$(envelopes.mailbox (slag 5.000 `(list envelope)`envelopes.mailbox))
::
++ fact-invite-update
|= [wir=wire fact=invite-update]
^- (quip card _state)
:_ state
?+ -.fact ~
::
%accepted
=/ ask-history ?~((chat-scry path.invite.fact) %.y %.n)
=* shp ship.invite.fact
@ -644,11 +619,6 @@
^- card
[%pass / %agent [our.bol %invite-store] %poke %invite-action !>(act)]
::
++ messages-fact
|= [=path start=@ud end=@ud envelopes=(list envelope)]
^- card
[%give %fact ~ %chat-update !>([%messages path start end envelopes])]
::
++ sec-to-perm
|= [pax=path =kind]
^- permission-action

View File

@ -7,10 +7,12 @@
+$ versioned-state
$% state-zero
state-one
state-two
==
::
+$ state-zero [%0 =inbox]
+$ state-one [%1 =inbox]
+$ state-two [%2 =inbox]
::
+$ diff
$% [%chat-initial inbox]
@ -19,7 +21,7 @@
==
--
::
=| state-one
=| state-two
=* state -
::
%- agent:dbug
@ -38,10 +40,14 @@
++ on-load
|= old-vase=vase
=/ old !<(versioned-state old-vase)
?: ?=(%1 -.old)
?: ?=(%2 -.old)
[~ this(state old)]
:_ this(state [%1 inbox.old])
[%pass /lo-chst %agent [our.bowl %chat-hook] %poke %noun !>(%store-load)]~
=/ reversed-inbox=^inbox
%- ~(run by inbox.old)
|= =mailbox
^- ^mailbox
[config.mailbox (flop envelopes.mailbox)]
[~ this(state [%2 reversed-inbox])]
::
++ on-poke
~/ %chat-store-poke
@ -196,7 +202,7 @@
?~ mailbox
[~ state]
=. letter.envelope.act (evaluate-letter [author letter]:envelope.act)
=^ envelope u.mailbox (append-envelope u.mailbox envelope.act)
=^ envelope u.mailbox (prepend-envelope u.mailbox envelope.act)
:- (send-diff path.act act(envelope envelope))
state(inbox (~(put by inbox) path.act u.mailbox))
::
@ -207,20 +213,16 @@
=/ mailbox=(unit mailbox) (~(get by inbox) path.act)
?~ mailbox
[~ state]
=. envelopes.act (flop envelopes.act)
=/ evaluated-envelopes=(list envelope) ~
|- ^- (quip card _state)
?~ envelopes.act
:_ state(inbox (~(put by inbox) path.act u.mailbox))
%+ send-diff path.act
:* %messages
path.act
(sub length.config.u.mailbox (lent evaluated-envelopes))
length.config.u.mailbox
evaluated-envelopes
==
[%messages path.act 0 (lent evaluated-envelopes) evaluated-envelopes]
=. letter.i.envelopes.act (evaluate-letter [author letter]:i.envelopes.act)
=^ envelope u.mailbox (append-envelope u.mailbox i.envelopes.act)
=. evaluated-envelopes (snoc evaluated-envelopes envelope)
=^ envelope u.mailbox (prepend-envelope u.mailbox i.envelopes.act)
=. evaluated-envelopes [envelope evaluated-envelopes]
$(envelopes.act t.envelopes.act)
::
++ handle-read
@ -246,12 +248,12 @@
letter(output (eval bol hoon))
letter
::
++ append-envelope
++ prepend-envelope
|= [=mailbox =envelope]
^+ [envelope mailbox]
=. number.envelope +(length.config.mailbox)
=: length.config.mailbox +(length.config.mailbox)
envelopes.mailbox (snoc envelopes.mailbox envelope)
envelopes.mailbox [envelope envelopes.mailbox]
==
[envelope mailbox]
::

View File

@ -121,15 +121,7 @@
%- ~(run by inbox)
|= =mailbox
^- ^mailbox
[config.mailbox (truncate-envelopes envelopes.mailbox)]
::
++ truncate-envelopes
|= envelopes=(list envelope)
^- (list envelope)
=/ length (lent envelopes)
?: (lth length message-limit)
envelopes
(slag (sub length message-limit) envelopes)
[config.mailbox (scag message-limit envelopes.mailbox)]
--
::
++ on-agent

File diff suppressed because it is too large Load Diff

View File

@ -60,11 +60,11 @@
(notebook-short-json book)
::
++ notebooks-map-json
|= [our=@p books=(map @tas notebook) subs=(map [@p @tas] notebook)]
|= [our=@p books=(map [@p @tas] notebook)]
^- json
=, enjs:format
=/ subs-notebooks-map=json
%- ~(rep by subs)
=/ notebooks-map=json
%- ~(rep by books)
|= [[[host=@p book-name=@tas] book=notebook] out=json]
^- json
=/ host-ta (scot %p host)
@ -79,22 +79,9 @@
=. p.u.books (~(put by p.u.books) book-name (notebook-short-json book))
:- %o
(~(put by p.out) host-ta u.books)
=? subs-notebooks-map ?=(~ subs-notebooks-map)
=? notebooks-map ?=(~ notebooks-map)
[%o ~]
=/ our-notebooks-map=json
%- ~(rep by books)
|= [[book-name=@tas book=notebook] out=json]
^- json
?~ out
(frond book-name (notebook-short-json book))
?> ?=(%o -.out)
:- %o
(~(put by p.out) book-name (notebook-short-json book))
?~ our-notebooks-map
subs-notebooks-map
?> ?=(%o -.subs-notebooks-map)
:- %o
(~(put by p.subs-notebooks-map) (scot %p our) our-notebooks-map)
notebooks-map
::
++ notebook-short-json
|= book=notebook
@ -170,6 +157,7 @@
num-comments+(numb ~(wyt by comments.note))
comments+(comments-page comments.note 0 50)
read+b+read.note
pending+b+pending.note
==
::
++ notes-by-date
@ -197,6 +185,7 @@
num-comments+(numb ~(wyt by comments.note))
read+b+read.note
snippet+s+snippet.note
pending+b+pending.note
==
::
++ notes-page
@ -246,5 +235,6 @@
:~ author+s+(scot %p author.com)
date-created+(time date-created.com)
content+s+content.com
pending+b+pending.com
==
--

View File

@ -44,7 +44,7 @@
%+ cook
|= [author=@ @ @ date-created=@da @ content=@t]
^- comment
[author date-created content]
[author date-created content %.n]
old-parser
==
--

View File

@ -27,13 +27,24 @@
[%read who=@p book=@tas note=@tas]
==
::
+$ comment
+$ comment comment-3
::
+$ comment-2
$: author=@p
date-created=@da
content=@t
==
::
+$ note
+$ comment-3
$: author=@p
date-created=@da
content=@t
pending=?
==
::
+$ note note-3
::
+$ note-2
$: author=@p
title=@t
filename=@tas
@ -42,11 +53,37 @@
read=?
file=@t
snippet=@t
:: build=(each manx tang)
comments=(map @da comment)
comments=(map @da comment-2)
==
::
+$ notebook
+$ note-3
$: author=@p
title=@t
filename=@tas
date-created=@da
last-edit=@da
read=?
file=@t
snippet=@t
comments=(map @da comment)
pending=?
==
::
+$ notebook notebook-3
::
+$ notebook-2
$: title=@t
description=@t
comments=?
writers=path
subscribers=path
date-created=@da
notes=(map @tas note-2)
order=(list @tas)
unread=(set @tas)
==
::
+$ notebook-3
$: title=@t
description=@t
comments=?

View File

@ -1493,10 +1493,12 @@
=| {l/(unit) r/(unit)}
|- ^- ?
?~ a &
?& ?~(l & (gor p.n.a u.l))
?~(r & (gor u.r p.n.a))
?~(l.a & ?&((mor p.n.a p.n.l.a) $(a l.a, l `p.n.a)))
?~(r.a & ?&((mor p.n.a p.n.r.a) $(a r.a, r `p.n.a)))
?& ?~(l & &((gor p.n.a u.l) !=(p.n.a u.l)))
?~(r & &((gor u.r p.n.a) !=(u.r p.n.a)))
?~ l.a &
&((mor p.n.a p.n.l.a) !=(p.n.a p.n.l.a) $(a l.a, l `p.n.a))
?~ r.a &
&((mor p.n.a p.n.r.a) !=(p.n.a p.n.r.a) $(a r.a, r `p.n.a))
==
::
++ gas :: concatenate

View File

@ -5836,6 +5836,7 @@
|* wil/(pole fist)
|= jon/json
?. ?=({$a *} jon) ~
?. =((lent wil) (lent p.jon)) ~
=+ raw=((at-raw wil) p.jon)
?.((za raw) ~ (some (zp raw)))
::

View File

@ -137,6 +137,9 @@
:: doesn't follow horizontal & vertical ordering
::
=/ unbalanced-e=(map @ @) [[1 1] [[3 3] ~ ~] [[2 2] ~ ~]]
:: has duplicate keys
::
=/ duplicates=(map @ @) [[1 1] [[1 2] ~ ~] ~]
;: weld
%+ expect-eq
!> [%b-a %.y]
@ -156,6 +159,9 @@
%+ expect-eq
!> [%u-e %.n]
!> [%u-e ~(apt by unbalanced-e)]
%+ expect-eq
!> [%dup %.n]
!> [%dup ~(apt by duplicates)]
==
::
:: Test bifurcation (i.e. splits map a into two, discarding -.a)

View File

@ -0,0 +1,693 @@
/+ *test
=, format
|%
:: split a cord on newlines
::
++ test-to-wain
;: weld
:: basic usage
::
%+ expect-eq
!> ~['hello' 'world']
!> (to-wain 'hello\0aworld')
:: string with no newlines
::
%+ expect-eq
!> ~['hey']
!> (to-wain 'hey')
:: empty string works fine
::
%+ expect-eq
!> ~['']
!> (to-wain '')
:: leading/trailing/consecutive newlines all work fine
::
%+ expect-eq
!> ~['' 'hi' '' '' 'there' '']
!> (to-wain '\0ahi\0a\0a\0athere\0a')
==
:: join a list of lines (cords) into a single cord
::
++ test-of-wain
;: weld
:: basic usage
::
%+ expect-eq
!> 'hey\0athere\0aworld!'
!> (of-wain ~['hey' 'there' 'world!'])
:: empty list
::
%+ expect-eq
!> ''
!> (of-wain ~)
:: single list
::
%+ expect-eq
!> 'hey'
!> (of-wain ~['hey'])
:: list with empties
::
%+ expect-eq
!> 'hey\0a\0athere'
!> (of-wain ~['hey' '' 'there'])
==
:: join a list of lines (tapes) into a single cord.
::
:: Appends an extra newline - this matches unix conventions of a
:: trailing newline. Also see #1, #2
::
++ test-of-wall
;: weld
:: basic usage
::
%+ expect-eq
!> "hey\0athere\0aworld!\0a"
!> (of-wall ~["hey" "there" "world!"])
:: empty list
::
%+ expect-eq
!> ""
!> (of-wall ~)
:: single list
::
%+ expect-eq
!> "hey\0a"
!> (of-wall ~["hey"])
:: list with empties
::
%+ expect-eq
!> "hey\0a\0athere\0a"
!> (of-wall ~["hey" "" "there"])
==
:: encoding and decoding of beams <-> paths
:: (a beam is a fully-qualified file reference. ship, desk, version,
:: path)
::
++ test-beam
=/ b=beam [[p=~zod q=%home r=[%ud p=12]] s=/hoon/zuse/sys]
=/ p=path /~zod/home/12/sys/zuse/hoon
;: weld
:: proper encode
::
%+ expect-eq
!> p
!> (en-beam b)
:: proper decode
::
%+ expect-eq
!> (some b)
!> (de-beam p)
:: proper round trip
::
%+ expect-eq
!> (some b)
!> (de-beam (en-beam b))
:: path too short
::
%+ expect-eq
!> ~
!> (de-beam /~zod/home)
:: invalid ship
::
%+ expect-eq
!> ~
!> (de-beam /'~zodisok'/home/12/sys/zuse/hoon)
:: invalid desk
::
%+ expect-eq
!> ~
!> (de-beam /~zod/12/12/sys/zuse/hoon)
:: invalid case
::
%+ expect-eq
!> ~
!> (de-beam /~zod/home/~zod/sys/zuse/hoon)
==
:: example values used in test
::
++ ex
|%
++ nul `json`~
++ tru `json`[%b &]
++ num `json`[%n ~.12]
++ str `json`[%s 'hey']
++ frond `json`(frond:enjs 'foo' num)
++ obj `json`(pairs:enjs ~[['foo' num] ['bar' str]])
--
:: functions for creating `json` values
::
++ test-enjs
=, enjs
;: weld
:: numbers
::
%+ expect-eq
!> num:ex
!> (numb 12)
%+ expect-eq
!> num:ex
!> (numb 0xc)
%+ expect-eq
!> [%n '0']
!> (numb 0)
:: strings
::
%+ expect-eq
!> str:ex
!> (tape "hey")
%+ expect-eq
:: uses of-wall, so adds the trailing newline
::
!> [%s 'hi\0athere\0a']
!> (wall ~["hi" "there"])
:: objects
::
%+ expect-eq
!> [%o (molt ~[['foo' num:ex]])]
!> (frond 'foo' num:ex)
=+ props=~[['foo' num:ex] ['bar' tru:ex]]
%+ expect-eq
!> [%o (molt props)]
!> (pairs props)
:: time - stored as integer number of milliseconds since the unix epoch
::
%+ expect-eq
!> [%n '1000']
!> (time ~1970.1.1..0.0.1)
:: ship - store ship identity as a string
::
%+ expect-eq
!> [%s 'zod']
!> (ship ~zod)
==
:: dejs - recursive processing of `json` values
::
:: This version crashes when used on improper input. Prefer using
:: dejs-soft (also tested below) which returns units instead.
::
:: decoding from null, booleans, numbers, strings
::
++ test-dejs-primitives
=, dejs
;: weld
:: null
::
%+ expect-eq
!> ~
!> (ul `json`~)
:: booleans
::
:: bo extracts as-is, bu negates it
::
%+ expect-eq
!> &
!> (bo tru:ex)
%+ expect-eq
!> |
!> (bu tru:ex)
%- expect-fail
|. (bo num:ex)
%- expect-fail
|. (bu num:ex)
:: integers
::
:: as @
::
%+ expect-eq
!> 12
!> (ni num:ex)
%- expect-fail
|. (ni tru:ex)
:: as cord
::
%+ expect-eq
!> '12'
!> (no num:ex)
%- expect-fail
|. (no tru:ex)
:: timestamp - ms since the unix epoch
::
%+ expect-eq
!> ~1970.1.1..00.00.01
!> (di [%n ~.1000])
%- expect-fail
|. (di tru:ex)
:: strings
::
:: string as tape
::
%+ expect-eq
!> "hey"
!> (sa str:ex)
%- expect-fail
|. (sa tru:ex)
:: string as cord
::
%+ expect-eq
!> 'hey'
!> (so str:ex)
%- expect-fail
|. (so tru:ex)
:: string with custom parser
::
%+ expect-eq
!> ' '
!> ((su (just ' ')) [%s ' '])
%- expect-fail
|. ((su (just ' ')) tru:ex)
==
:: decoding arrays
::
++ test-dejs-arrays
=, dejs
;: weld
:: ar - as list
::
%+ expect-eq
!> ~[1 2 3]
!> ((ar ni) [%a ~[[%n '1'] [%n '2'] [%n '3']]])
%- expect-fail
|. ((ar ni) str:ex)
%- expect-fail
|. ((ar ni) [%a ~[str:ex]])
:: at - as tuple
::
:: handlers must match exactly
::
%+ expect-eq
!> [1 'hey']
!> ((at ~[ni so]) [%a ~[[%n '1'] [%s 'hey']]])
:: too few or many handlers crash
::
%- expect-fail
|. ((at ~[ni so]) [%a ~])
%- expect-fail
|. ((at ~[ni so]) [%a ~[[%n '1'] [%s 'hey'] [%b &]]])
:: a nested error will crash
::
%- expect-fail
|. ((at ~[ni]) [%a ~[[%s 'hey']]])
==
:: decoding objects
::
++ test-dejs-objects
=, dejs
;: weld
:: of - single-property objects
::
%+ expect-eq
!> ['foo' 12]
!> ((of ~[['foo' ni]]) frond:ex)
%+ expect-eq
!> ['foo' 12]
!> ((of ~[['bar' so] ['foo' ni]]) frond:ex)
%- expect-fail
:: the handler needs to apply properly to the value
::
|. ((of ~[['foo' ni]]) num:ex)
%- expect-fail
:: the key of the frond needs to exist in the handler list
::
|. ((of ~[['bar' so]]) frond:ex)
%- expect-fail
:: an object with multiple properties is an error
::
|. ((of ~[['bar' so] ['foo' ni]]) obj:ex)
:: ot - exact-shape objects to tuple
::
%+ expect-eq
!> [12 'hey']
!> ((ot ~[['foo' ni] ['bar' so]]) obj:ex)
%- expect-fail
:: it checks it's called on an actual object
::
|. ((ot ~[['foo' ni]]) num:ex)
%- expect-fail
:: missing property on the object
::
|. ((ot ~[['foo' ni] ['baz' so]]) obj:ex)
:: ou - object to tuple, with optional properties. value handlers
::
:: are passed (unit json)
::
%+ expect-eq
!> [12 14]
!> ((ou ~[['foo' (uf 14 ni)] ['baz' (uf 14 ni)]]) obj:ex)
:: om - simple object as map
::
%+ expect-eq
!> (molt ~[['foo' num:ex] ['bar' str:ex]])
!> ((om same) obj:ex)
:: op - object to map, but run a parsing function on the keys
::
%+ expect-eq
!> (molt ~[[12 num:ex] [14 str:ex]])
!> ((op dem same) (pairs:enjs ~[['12' num:ex] ['14' str:ex]]))
==
:: decoder transformers
::
++ test-dejs-transformers
=, dejs
;: weld
:: cu - decode, then transform
::
%+ expect-eq
!> 11
!> ((cu dec ni) [%n ~.12])
:: ci - decode, then assert a transformation succeeds
::
%+ expect-eq
!> 12
!> ((ci some ni) num:ex)
%- expect-fail
|. ((ci |=(* ~) ni) num:ex)
:: mu - decode if not null
::
%+ expect-eq
!> ~
!> ((mu ni) nul:ex)
%+ expect-eq
!> (some 12)
!> ((mu ni) num:ex)
:: pe - add prefix to decoded value
::
%+ expect-eq
!> ['a' 12]
!> ((pe 'a' ni) num:ex)
:: uf - defaults for empty (unit json)
::
%+ expect-eq
!> 'nah'
!> ((uf 'nah' ni) ~)
%+ expect-eq
!> 12
!> ((uf 'nah' ni) (some num:ex))
:: un - dangerous ensure a (unit json)
::
%+ expect-eq
!> 12
!> ((un ni) (some num:ex))
%- expect-fail
|. ((un ni) ~)
==
:: various unit/collection helpers
::
++ test-dejs-helpers
=, dejs
=+ all=`(list (unit @))`~[(some 1) (some 2) (some 3)]
=+ nall=`(list (unit @))`~[(some 1) ~ (some 3)]
;: weld
:: za - are all units in this list full?
::
%+ expect-eq
!> &
!> (za ~)
%+ expect-eq
!> &
!> (za all)
%+ expect-eq
!> |
!> (za nall)
:: zl - collapse (list (unit)) -> (unit (list))
::
%+ expect-eq
!> (some ~[1 2 3])
!> (zl all)
%+ expect-eq
!> ~
!> (zl nall)
%+ expect-eq
!> (some ~)
!> (zl ~)
:: zp - force unwrap a (list (unit)) as tuple
::
%+ expect-eq
!> [1 2 3]
!> (zp all)
%- expect-fail
|. (zp nall)
%- expect-fail
|. (zp ~)
:: zm - collapse a (map @tas (unit *)) -> (unit (map @tas *))
::
%+ expect-eq
!> (some (molt ~[['a' 1] ['b' 2]]))
!> (zm (molt ~[['a' (some 1)] ['b' (some 2)]]))
%+ expect-eq
!> ~
!> (zm (molt ~[['a' `(unit @)`(some 1)] ['b' ~]]))
%+ expect-eq
!> (some ~)
!> (zm ~)
==
::
:: dejs-soft recursive processing of `json` values
::
:: These functions return units, which will be nil if the input
:: doesn't match the defined structure.
::
++ test-dejs-soft-primitives
=, dejs-soft
;: weld
:: null
::
%+ expect-eq
!> `~
!> (ul `json`~)
:: booleans
::
:: bo extracts as-is, bu negates it
::
%+ expect-eq
!> `&
!> (bo tru:ex)
%+ expect-eq
!> `|
!> (bu tru:ex)
%+ expect-eq
!> ~
!> (bo num:ex)
%+ expect-eq
!> ~
!> (bu num:ex)
:: integers
:: as @
::
%+ expect-eq
!> `12
!> (ni num:ex)
%+ expect-eq
!> ~
!> (ni tru:ex)
:: as cord
::
%+ expect-eq
!> `'12'
!> (no num:ex)
%+ expect-eq
!> ~
!> (no tru:ex)
:: timestamp - ms since the unix epoch
::
%+ expect-eq
!> `~1970.1.1..00.00.01
!> (di [%n ~.1000])
%+ expect-eq
!> ~
!> (di tru:ex)
:: string as tape
::
%+ expect-eq
!> `"hey"
!> (sa str:ex)
%+ expect-eq
!> ~
!> (sa tru:ex)
:: string as cord
::
%+ expect-eq
!> `'hey'
!> (so str:ex)
%+ expect-eq
!> ~
!> (so tru:ex)
:: string with custom parser
::
%+ expect-eq
!> `' '
!> ((su (just ' ')) [%s ' '])
%+ expect-eq
!> ~
!> ((su (just ' ')) tru:ex)
==
:: decoding arrays
::
++ test-dejs-soft-arrays
=, dejs-soft
;: weld
:: ar - as list
::
%+ expect-eq
!> `~[1 2 3]
!> ((ar ni) [%a ~[[%n '1'] [%n '2'] [%n '3']]])
%+ expect-eq
!> ~
!> ((ar ni) str:ex)
%+ expect-eq
!> ~
!> ((ar ni) [%a ~[str:ex]])
:: at - as tuple
::
:: handlers must match exactly
::
%+ expect-eq
!> `[1 'hey']
!> ((at ~[ni so]) [%a ~[[%n '1'] [%s 'hey']]])
:: too few or many handlers won't match
::
%+ expect-eq
!> ~
!> ((at ~[ni so]) [%a ~])
%+ expect-eq
!> ~
!> ((at ~[ni so]) [%a ~[[%n '1'] [%s 'hey'] [%b &]]])
:: a nested failure to match will propagate upwards
::
%+ expect-eq
!> ~
!> ((at ~[ni]) [%a ~[[%s 'hey']]])
==
:: decoding objects
::
++ test-dejs-soft-objects
=, dejs-soft
;: weld
:: of - single-property objects
::
%+ expect-eq
!> `['foo' 12]
!> ((of ~[['foo' ni]]) frond:ex)
%+ expect-eq
!> `['foo' 12]
!> ((of ~[['bar' so] ['foo' ni]]) frond:ex)
%+ expect-eq
!> ~
:: the handler needs to apply properly to the value
::
!> ((of ~[['foo' ni]]) num:ex)
%+ expect-eq
!> ~
:: the key of the frond needs to exist in the handler list
::
!> ((of ~[['bar' so]]) frond:ex)
%+ expect-eq
!> ~
:: an object with multiple properties is an error
::
!> ((of ~[['bar' so] ['foo' ni]]) obj:ex)
:: ot - exact-shape objects to tuple
::
%+ expect-eq
!> `[12 'hey']
!> ((ot ~[['foo' ni] ['bar' so]]) obj:ex)
%+ expect-eq
!> ~
:: missing property on the object
::
!> ((ot ~[['foo' ni] ['baz' so]]) obj:ex)
:: om - simple object as map
::
%+ expect-eq
!> `(molt ~[['foo' num:ex] ['bar' str:ex]])
!> ((om some) obj:ex)
:: op - object to map, but run a parsing function on the keys
::
%+ expect-eq
!> `(molt ~[[12 num:ex] [14 str:ex]])
!> ((op dem some) (pairs:enjs ~[['12' num:ex] ['14' str:ex]]))
==
:: decoder transformers
::
++ test-dejs-soft-transformers
=, dejs-soft
;: weld
:: cu - decode, then transform
::
%+ expect-eq
!> `11
!> ((cu dec ni) [%n ~.12])
:: ci - decode, then transform, adapting the transformer to return a
:: unit
::
%+ expect-eq
!> `12
!> ((ci some ni) num:ex)
%+ expect-eq
!> ~
!> ((ci |=(* ~) ni) num:ex)
:: mu - decode if not null
::
%+ expect-eq
!> `~
!> ((mu ni) nul:ex)
%+ expect-eq
!> `(some 12)
!> ((mu ni) num:ex)
:: pe - add prefix to decoded value
::
%+ expect-eq
!> `['a' 12]
!> ((pe 'a' ni) num:ex)
==
:: various unit/collection helpers
::
++ test-dejs-soft-helpers
=, dejs-soft
=+ all=`(list (unit @))`~[(some 1) (some 2) (some 3)]
=+ nall=`(list (unit @))`~[(some 1) ~ (some 3)]
;: weld
:: za - are all units in this list full?
::
%+ expect-eq
!> &
!> (za ~)
%+ expect-eq
!> &
!> (za all)
%+ expect-eq
!> |
!> (za nall)
:: zl - collapse (list (unit)) -> (unit (list))
::
%+ expect-eq
!> (some ~[1 2 3])
!> (zl all)
%+ expect-eq
!> ~
!> (zl nall)
%+ expect-eq
!> (some ~)
!> (zl ~)
:: zp - force unwrap a (list (unit)) as tuple
::
%+ expect-eq
!> [1 2 3]
!> (zp all)
%- expect-fail
|. (zp nall)
%- expect-fail
|. (zp ~)
:: zm - collapse a (map @tas (unit *)) -> (unit (map @tas *))
::
%+ expect-eq
!> (some (molt ~[['a' 1] ['b' 2]]))
!> (zm (molt ~[['a' (some 1)] ['b' (some 2)]]))
%+ expect-eq
!> ~
!> (zm (molt ~[['a' `(unit @)`(some 1)] ['b' ~]]))
%+ expect-eq
!> (some ~)
!> (zm ~)
==
--

View File

@ -1,13 +1,15 @@
:: tests for html
:: tests for html
::
/+ *test
=, html
=, de-xml:html
=, en-xml:html
|%
:: de-xml takes a cord but en-xml returns a tape?
:: de-xml takes a cord but en-xml returns a tape?
::
++ test-de-xml
;: weld
:: Basic use
:: Basic use
::
%+ expect-eq
!> ^- manx +:(de-xml:html (crip "<html><head><title>My first webpage</title></head><body><h1>Welcome!</h1>Hello, world! We are on the web.\0a<div></div><script src=\"http://unsafely.tracking.you/cookiemonster.js\"></script></body></html>"))
@ -23,14 +25,15 @@
;script(src "http://unsafely.tracking.you/cookiemonster.js");
==
==
:: CDATA sections
:: CDATA sections
::
%+ expect-eq
!> ^- manx
+:(de-xml:html (crip "<elem><![CDATA[text]]></elem>"))
!> ^- manx
;elem: text
:: comments
:: comments
::
%+ expect-eq
!> ^- manx
;elem: text
@ -39,7 +42,7 @@
!> ^- manx
;elem;
!> +:(de-xml:html (crip "<elem><!-- comment --></elem>"))
:: entities
:: entities
::
%+ expect-eq
!> ^- manx
@ -59,13 +62,13 @@
::
++ test-en-xml
;: weld
:: Entities
:: Entities
::
%+ expect-eq
!> "<elem>&gt;</elem>"
!> %- en-xml:html
;elem: >
:: Basic use
:: Basic use
::
%+ expect-eq
!> %- en-xml:html
@ -82,11 +85,788 @@
==
==
!> "<html><head><title>My first webpage</title></head><body><h1>Welcome!</h1>Hello, world!\0aWe are on the web.\0a<div></div><script src=\"http://unsafely.tracking.you/cookiemonster.js\"></script></body></html>"
:: Attributes
:: Attributes
::
%+ expect-eq
!> "<input type=\"submit\">Submit</input>"
!> %- en-xml:html
;input(type "submit"): Submit
==
:: JSON encoding/decoding
::
++ from-code-points
|= points=(list @)
(tufa `(list @c)`points)
++ from-code-point
|= point=@
(tuft point)
+$ json-parse-spec [name=tape input=cord expected=json]
+$ json-parse-rejection-spec [input=cord name=tape]
:: For checking a large list of examples against expected values.
:: It also nicely formats any failures.
::
++ run-parse-specs
:: legend tells of a man who made the Kessel run in less than 12
:: parse-specs...
|= specs=(list json-parse-spec)
%- zing
%- turn
:- specs
|= spec=json-parse-spec
^- tang
=+ result=(expect-eq !>(`expected.spec) !>((de-json:html input.spec)))
?~ result ~
`tang`[[%leaf "in {name.spec}:"] result]
:: Checks that a list of examples all fail to parse
::
++ run-parse-rejection-specs
|= specs=(list json-parse-rejection-spec)
%- zing
%- turn
:- specs
|= spec=json-parse-rejection-spec
^- tang
=+ result=(expect-eq !>(~) !>((de-json:html input.spec)))
?~ result ~
`tang`[[%leaf "in {name.spec}:"] result]
:: example values used in tests
::
++ ex
|%
++ two `json`[%n '2']
++ tru `json`[%b &]
--
:: encoding naked values
::
++ test-en-json-basics
;: weld
%+ expect-eq
!> "true"
!> (en-json [%b &])
%+ expect-eq
!> "false"
!> (en-json [%b |])
%+ expect-eq
!> "null"
!> (en-json ~)
%+ expect-eq
!> "123.45"
!> (en-json [%n '123.45'])
==
:: encoding strings, with proper escaping rules
::
++ test-en-json-strings
:: A less-confusing representation of theses strings are included in comments
::
:: Things get confusing with hoon string literal escapes. The
:: version included as a comment is if you opened the json output
:: in a simple text editor.
::
;: weld
:: "hello"
::
%+ expect-eq
!> "\"hello\""
!> (en-json [%s 'hello'])
:: it escapes quotes
:: "he said \"wow\""
::
%+ expect-eq
!> "\"he said \\\"wow\\\"\""
!> (en-json [%s 'he said "wow"'])
:: it escapes backslashes
:: "Delete C:\\Windows\\System32"
::
%+ expect-eq
!> "\"Delete C:\\\\Windows\\\\System32\""
!> (en-json [%s 'Delete C:\\Windows\\System32'])
:: it uses \n for newlines
:: "hello\nworld"
::
%+ expect-eq
!> "\"hello\\nworld\""
!> (en-json [%s 'hello\0aworld'])
:: it uses \u encoding for control characters (0x1f and below)
:: "ding!\u0007"
::
%+ expect-eq
!> "\"ding!\\u0007\""
!> (en-json [%s 'ding!\07'])
:: it supports null bytes
::
%+ expect-eq
!> "\"null\\u0000byte\\u0000separator\""
!> (en-json [%s 'null\00byte\00separator'])
:: inline unicode characters
::
%+ expect-eq
!> "\"lmao 🤣\""
!> (en-json [%s 'lmao 🤣'])
==
:: encoding arrays
::
++ test-en-json-arrays
;: weld
:: empty array
::
%+ expect-eq
!> "[]"
!> (en-json [%a ~])
:: 1 element
::
%+ expect-eq
!> "[2]"
!> (en-json [%a ~[two:ex]])
:: multiple elements are comma-separated
::
%+ expect-eq
!> "[2,2,2]"
!> (en-json [%a ~[two:ex two:ex two:ex]])
==
:: encoding basic objects
::
++ test-en-json-objects
:: opening curly braces are escaped to avoid urbit string literal
:: interpolation
::
;: weld
:: empty object
::
%+ expect-eq
!> "\{}"
!> (en-json [%o ~])
:: one property
::
%+ expect-eq
!> "\{\"foo\":2}"
!> (en-json [%o (molt ~[['foo' two:ex]])])
:: multiple properties are comma-separated
::
%+ expect-eq
!> "\{\"foo\":2,\"bar\":true}"
!> (en-json [%o (molt ~[['foo' two:ex] ['bar' tru:ex]])])
:: object keys use same encoding logic as strings
::
%+ expect-eq
:: {"\u0007\"\n\\":true}
::
!> "\{\"\\u0007\\\"\\n\\\\\":true}"
!> (en-json [%o (molt ~[['\07"\0a\\' tru:ex]])])
==
:: object encoding stress-test
::
++ test-en-json-complex-structure
%+ expect-eq
:: [{}, 4, [[], [{foo: {"4": 4, "true": true}}]]]
::
!> "[\{},4,[[],[\{\"foo\":\{\"4\":4,\"true\":true}}]]]"
!> %- en-json:html
:- %a
:~ [%o ~]
[%n '4']
:- %a
:~ [%a ~]
:- %a
:~ %+ frond:enjs:format
'foo'
(pairs:enjs:format ~[['4' [%n '4']] ['true' [%b &]]])
==
==
==
:: decoding naked values
::
++ test-de-json-simple-values
=, html
;: weld
%+ expect-eq
!> `~
!> (de-json 'null')
%+ expect-eq
!> `[%b &]
!> (de-json 'true')
%+ expect-eq
!> `[%b |]
!> (de-json 'false')
==
:: The following parser test suite (test-de-json-bad-examples and
:: test-en-json-suite) is adapted from https://github.com/nst/JSONTestSuite/
:: (Copyright (c) 2016 Nicolas Seriot) under the terms of the MIT license.
::
:: These are all inputs that should be rejected by a valid json parser
::
++ test-de-json-bad-examples
%- run-parse-rejection-specs
:~
['[1 true]' "n_array_1_true_without_comma"]
['[aÂ]' "n_array_a_invalid_utf8"]
['["": 1]' "n_array_colon_instead_of_comma"]
['[""],' "n_array_comma_after_close"]
['[,1]' "n_array_comma_and_number"]
['[1,,2]' "n_array_double_comma"]
['["x",,]' "n_array_double_extra_comma"]
['["x"]]' "n_array_extra_close"]
['["",]' "n_array_extra_comma"]
['["x"' "n_array_incomplete"]
['[x' "n_array_incomplete_invalid_value"]
['[3[4]]' "n_array_inner_array_no_comma"]
['[ˇ]' "n_array_invalid_utf8"]
['[1:2]' "n_array_items_separated_by_semicolon"]
['[,]' "n_array_just_comma"]
['[-]' "n_array_just_minus"]
['[ , ""]' "n_array_missing_value"]
['["a",\0a4\0a,1,' "n_array_newlines_unclosed"]
['[1,]' "n_array_number_and_comma"]
['[1,,]' "n_array_number_and_several_commas"]
['["\0b"\\f]' "n_array_spaces_vertical_tab_formfeed"]
['[*]' "n_array_star_inside"]
['[""' "n_array_unclosed"]
['[1,' "n_array_unclosed_trailing_comma"]
['[1,\0a1\0a,1' "n_array_unclosed_with_new_lines"]
['[{}' "n_array_unclosed_with_object_inside"]
['[fals]' "n_incomplete_false"]
['[nul]' "n_incomplete_null"]
['[tru]' "n_incomplete_true"]
['[++1234]' "n_number_++"]
['[+1]' "n_number_+1"]
['[+Inf]' "n_number_+Inf"]
['[-01]' "n_number_-01"]
['[-1.0.]' "n_number_-1.0."]
['[-NaN]' "n_number_-NaN"]
['[.-1]' "n_number_.-1"]
['[.2e-3]' "n_number_.2e-3"]
['[0.1.2]' "n_number_0.1.2"]
['[1 000.0]' "n_number_1_000"]
['[1eE2]' "n_number_1eE2"]
['[Inf]' "n_number_Inf"]
['[NaN]' "n_number_NaN"]
['[1]' "n_number_U+FF11_fullwidth_digit_one"]
['[1+2]' "n_number_expression"]
['[0x1]' "n_number_hex_1_digit"]
['[0x42]' "n_number_hex_2_digits"]
['[Infinity]' "n_number_infinity"]
['[0e+-1]' "n_number_invalid+-"]
['[-123.123foo]' "n_number_invalid-negative-real"]
['[123Â]' "n_number_invalid-utf-8-in-bigger-int"]
['[1e1Â]' "n_number_invalid-utf-8-in-exponent"]
['[0Â]' "n_number_invalid-utf-8-in-int"]
['[-Infinity]' "n_number_minus_infinity"]
['[-foo]' "n_number_minus_sign_with_trailing_garbage"]
['[- 1]' "n_number_minus_space_1"]
['[-012]' "n_number_neg_int_starting_with_zero"]
['[-.123]' "n_number_neg_real_without_int_part"]
['[-1x]' "n_number_neg_with_garbage_at_end"]
['[1ea]' "n_number_real_garbage_after_e"]
['[1eÂ]' "n_number_real_with_invalid_utf8_after_e"]
['[.123]' "n_number_starting_with_dot"]
['[1.2a-3]' "n_number_with_alpha"]
['[1.8011670033376514H-308]' "n_number_with_alpha_char"]
['[012]' "n_number_with_leading_zero"]
['["x", truth]' "n_object_bad_value"]
['{[: "x"}' "n_object_bracket_key"]
['{"x", null}' "n_object_comma_instead_of_colon"]
['{"x"::"b"}' "n_object_double_colon"]
['{🇨🇭}' "n_object_emoji"]
['{"a":"a" 123}' "n_object_garbage_at_end"]
['{key: \'value\'}' "n_object_key_with_single_quotes"]
['{"π":"0",}' "n_object_lone_continuation_byte_in_key_and_trailing_comma"]
['{"a" b}' "n_object_missing_colon"]
['{:"b"}' "n_object_missing_key"]
['{"a" "b"}' "n_object_missing_semicolon"]
['{"a":' "n_object_missing_value"]
['{"a"' "n_object_no-colon"]
['{1:1}' "n_object_non_string_key"]
['{9999E9999:1}' "n_object_non_string_key_but_huge_number_instead"]
['{null:null,null:null}' "n_object_repeated_null_null"]
['{"id":0,,,,,}' "n_object_several_trailing_commas"]
['{\'a\':0}' "n_object_single_quote"]
['{"id":0,}' "n_object_trailing_comma"]
['{"a":"b"}/**/' "n_object_trailing_comment"]
['{"a":"b"}/**//' "n_object_trailing_comment_open"]
['{"a":"b"}//' "n_object_trailing_comment_slash_open"]
['{"a":"b"}/' "n_object_trailing_comment_slash_open_incomplete"]
['{"a":"b",,"c":"d"}' "n_object_two_commas_in_a_row"]
['{a: "b"}' "n_object_unquoted_key"]
['{"a":"a' "n_object_unterminated-value"]
['{ "foo" : "bar", "a" }' "n_object_with_single_string"]
['{"a":"b"}#' "n_object_with_trailing_garbage"]
[' ' "n_single_space"]
['["\\uD800\\"]' "n_string_1_surrogate_then_escape"]
['["\\uD800\\u"]' "n_string_1_surrogate_then_escape_u"]
['["\\uD800\\u1"]' "n_string_1_surrogate_then_escape_u1"]
['["\\uD800\\u1x"]' "n_string_1_surrogate_then_escape_u1x"]
['[é]' "n_string_accentuated_char_no_quotes"]
['["\\x00"]' "n_string_escape_x"]
['["\\\\\\"]' "n_string_escaped_backslash_bad"]
['["\\\09"]' "n_string_escaped_ctrl_char_tab"]
['["\\🌀"]' "n_string_escaped_emoji"]
['["\\"]' "n_string_incomplete_escape"]
['["\\u00A"]' "n_string_incomplete_escaped_character"]
['["\\uD834\\uDd"]' "n_string_incomplete_surrogate"]
['["\\uD800\\uD800\\x"]' "n_string_incomplete_surrogate_escape_invalid"]
['["\\uÂ"]' "n_string_invalid-utf-8-in-escape"]
['["\\a"]' "n_string_invalid_backslash_esc"]
['["\\uqqqq"]' "n_string_invalid_unicode_escape"]
['["\\Â"]' "n_string_invalid_utf8_after_escape"]
['[\\u0020"asd"]' "n_string_leading_uescaped_thinspace"]
['[\\n]' "n_string_no_quotes_with_bad_escape"]
['"' "n_string_single_doublequote"]
['[\'single quote\']' "n_string_single_quote"]
['abc' "n_string_single_string_no_double_quotes"]
['["\\' "n_string_start_escape_unclosed"]
['["new' "n_string_unescaped_newline"]
['line"]' "n_string_unescaped_newline"]
['["\09"]' "n_string_unescaped_tab"]
['"\\UA66D"' "n_string_unicode_CapitalU"]
['""x' "n_string_with_trailing_garbage"]
['[⁠]' "n_structure_U+2060_word_joined"]
['Ôªø' "n_structure_UTF8_BOM_no_data"]
['<.>' "n_structure_angle_bracket_."]
['[<null>]' "n_structure_angle_bracket_null"]
['[1]x' "n_structure_array_trailing_garbage"]
['[1]]' "n_structure_array_with_extra_array_close"]
['["asd]' "n_structure_array_with_unclosed_string"]
['aå' "n_structure_ascii-unicode-identifier"]
['[True]' "n_structure_capitalized_True"]
['1]' "n_structure_close_unopened_array"]
['{"x": true,' "n_structure_comma_instead_of_closing_brace"]
['[][]' "n_structure_double_array"]
[']' "n_structure_end_array"]
['Ôª{}' "n_structure_incomplete_UTF8_BOM"]
['Â' "n_structure_lone-invalid-utf-8"]
['[' "n_structure_lone-open-bracket"]
['["a\00a"]' "n_string_unescaped_crtl_char"]
['["\\00"]' "n_string_backslash_00"]
==
:: TODO: de-json is accepting a slew of number formats it shouldn't.
::
:: Tracking issue here: https://github.com/urbit/urbit/issues/1775
:: Re-enable this test by removing the disable- prefix
::
++ disable-test-reject-invalid-numbers
%- run-parse-rejection-specs
:~
['123\00' "n_multidigit_number_then_00"]
['[1.]' "n_number_real_without_fractional_part"]
['[2.e+3]' "n_number_2.e+3"]
['[2.e-3]' "n_number_2.e-3"]
['[2.e3]' "n_number_2.e3"]
['[9.e+]' "n_number_9.e+"]
['[0.3e+]' "n_number_0.3e+"]
['[0.3e]' "n_number_0.3e"]
['[0.e1]' "n_number_0.e1"]
['[0E+]' "n_number_0_capital_E+"]
['[0E]' "n_number_0_capital_E"]
['[0e+]' "n_number_0e+"]
['[0e]' "n_number_0e"]
['[1.0e+]' "n_number_1.0e+"]
['[1.0e-]' "n_number_1.0e-"]
['[1.0e]' "n_number_1.0e"]
['[-2.]' "n_number_-2."]
==
:: these are all inputs that should be accepted by a valid parser
::
++ test-en-json-suite
=+ frond=frond:enjs:format
=+ pairs=pairs:enjs:format
%- run-parse-specs
:~
:* "y_array_arraysWithSpaces"
'[[] ]'
[%a ~[[%a ~]]]
==
:* "y_array_empty-string"
'[""]'
[%a ~[[%s '']]]
==
:* "y_array_empty"
'[]'
[%a ~]
==
:* "y_array_ending_with_newline"
'["a"]\0a'
[%a ~[[%s 'a']]]
==
:* "y_array_false"
'[false]'
[%a ~[[%b |]]]
==
:* "y_array_heterogeneous"
'[null, 1, "1", {}]'
[%a ~[~ [%n '1'] [%s '1'] [%o ~]]]
==
:* "y_array_null"
'[null]'
[%a ~[~]]
==
:* "y_array_with_1_and_newline"
'[1\0a]'
[%a ~[[%n '1']]]
==
:* "y_array_with_leading_space"
' [1]'
[%a ~[[%n '1']]]
==
:* "y_array_with_several_null"
'[1,null,null,null,2]'
[%a ~[[%n '1'] ~ ~ ~ [%n '2']]]
==
:* "y_array_with_trailing_space"
'[2] '
[%a ~[[%n '2']]]
==
:* "y_number"
'[123e65]'
[%a ~[[%n '123e65']]]
==
:* "y_number_0e+1"
'[0e+1]'
[%a ~[[%n '0e+1']]]
==
:* "y_number_0e1"
'[0e1]'
[%a ~[[%n '0e1']]]
==
:* "y_number_after_space"
'[ 4]'
[%a ~[[%n '4']]]
==
:* "y_number_double_close_to_zero"
'[-0.0000000000000000000000000000000000000000000000000000000\
/00000000000000000000001]'
[%a ~[[%n '-0.0000000000000000000000000000000000000000000000\
/00000000000000000000000000000001']]]
==
:* "y_number_int_with_exp"
'[20e1]'
[%a ~[[%n '20e1']]]
==
:* "y_number_minus_zero"
'[-0]'
[%a ~[[%n '-0']]]
==
:* "y_number_negative_int"
'[-123]'
[%a ~[[%n '-123']]]
==
:* "y_number_negative_one"
'[-1]'
[%a ~[[%n '-1']]]
==
:* "y_number_negative_zero"
'[-0]'
[%a ~[[%n '-0']]]
==
:* "y_number_real_capital_e"
'[1E22]'
[%a ~[[%n '1E22']]]
==
:* "y_number_real_capital_e_neg_exp"
'[1E-2]'
[%a ~[[%n '1E-2']]]
==
:* "y_number_real_capital_e_pos_exp"
'[1E+2]'
[%a ~[[%n '1E+2']]]
==
:* "y_number_real_exponent"
'[123e45]'
[%a ~[[%n '123e45']]]
==
:* "y_number_real_fraction_exponent"
'[123.456e78]'
[%a ~[[%n '123.456e78']]]
==
:* "y_number_real_neg_exp"
'[1e-2]'
[%a ~[[%n '1e-2']]]
==
:* "y_number_real_pos_exponent"
'[1e+2]'
[%a ~[[%n '1e+2']]]
==
:* "y_number_simple_int"
'[123]'
[%a ~[[%n '123']]]
==
:* "y_number_simple_real"
'[123.456789]'
[%a ~[[%n '123.456789']]]
==
:* "y_object"
'{"asd":"sdf", "dfg":"fgh"}'
(pairs ~[['asd' [%s 'sdf']] ['dfg' [%s ['fgh']]]])
==
:* "y_object_basic"
'{"asd":"sdf"}'
(frond ['asd' [%s 'sdf']])
==
:: duplicated keys, it takes the latest one.
::
:* "y_object_duplicated_key"
'{"a":"b","a":"c"}'
(frond ['a' [%s 'c']])
==
:* "y_object_duplicated_key_and_value"
'{"a":"b","a":"b"}'
(frond ['a' [%s 'b']])
==
:* "y_object_empty"
'{}'
[%o ~]
==
:* "y_object_empty_key"
'{"":0}'
(frond ['' [%n '0']])
==
:* "y_object_extreme_numbers"
'{ "min": -1.0e+28, "max": 1.0e+28 }'
(pairs ~[['min' [%n '-1.0e+28']] ['max' [%n '1.0e+28']]])
==
=/ long=@t 'xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx'
:* "y_object_long_strings"
'{"x":[{"id": "xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx"}], \
/"id": "xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx"}'
(pairs ~[['id' [%s long]] ['x' [%a ~[(frond ['id' [%s long]])]]]])
==
:* "y_object_simple"
'{"a":[]}'
(frond 'a' [%a ~])
==
:* "y_object_string_unicode"
'{"title":"\\u041f\\u043e\\u043b\\u0442\\u043e\\u0440\\u0430 \
/\\u0417\\u0435\\u043c\\u043b\\u0435\\u043a\\u043e\\u043f\\u0430" }'
(frond 'title' [%s 'Полтора Землекопа'])
==
:* "y_object_with_newlines"
'{\0a"a": "b"\0a}'
(frond 'a' [%s 'b'])
==
:* "y_string_allowed_escapes"
'["\\"\\\\\\/\\b\\f\\n\\r\\t"]'
[%a ~[[%s '"\\/\08\0c\0a\0d\09']]]
==
:* "y_string_backslash_and_u_escaped_zero"
'["\\\\u0000"]'
[%a ~[[%s '\\u0000']]]
==
:* "y_string_backslash_doublequotes"
'["\\""]'
[%a ~[[%s '"']]]
==
:* "y_string_comments"
'["a/*b*/c/*d//e"]'
[%a ~[[%s 'a/*b*/c/*d//e']]]
==
:* "y_string_double_escape_a"
'["\\\\a"]'
[%a ~[[%s '\\a']]]
==
:* "y_string_double_escape_n"
'["\\\\n"]'
[%a ~[[%s '\\n']]]
==
:* "y_string_escaped_control_character"
'["\\u0012"]'
[%a ~[[%s '\12']]]
==
:* "y_string_in_array_with_leading_space"
'[ "asd"]'
[%a ~[[%s 'asd']]]
==
:* "y_string_nonCharacterInUTF-8_U+10FFFF"
'["􏿿"]'
[%a ~[[%s (from-code-point 0x10.ffff)]]]
==
:* "y_string_nonCharacterInUTF-8_U+FFFF"
'["￿"]'
[%a ~[[%s (from-code-point 0xffff)]]]
==
:* "y_string_null_escape"
'["\\u0000"]'
[%a ~[[%s '\00']]]
==
:* "y_string_one-byte-utf-8"
'["\\u002c"]'
[%a ~[[%s '\2c']]]
==
:* "y_string_pi"
'["π"]'
[%a ~[[%s 'π']]]
==
:* "y_string_reservedCharacterInUTF-8_U+1BFFF"
'["𛿿"]'
[%a ~[[%s (from-code-point 0x1.bfff)]]]
==
:* "y_string_simple_ascii"
'["asd "]'
[%a ~[[%s 'asd ']]]
==
:* "y_string_space"
'" "'
[%s ' ']
==
:* "y_string_three-byte-utf-8"
'["\\u0821"]'
[%a ~[[%s (from-code-point 0x821)]]]
==
:* "y_string_two-byte-utf-8"
'["\\u0123"]'
[%a ~[[%s (from-code-point 0x123)]]]
==
:* "y_string_u+2028_line_sep"
'[""]'
[%a ~[[%s (from-code-point 0x2028)]]]
==
:* "y_string_u+2029_par_sep"
'[""]'
[%a ~[[%s (from-code-point 0x2029)]]]
==
:* "y_string_unicode_2"
'["⍂㈴⍂"]'
[%a ~[[%s '⍂㈴⍂']]]
==
:* "y_string_unicode_U+2064_invisible_plus"
'["\\u2064"]'
[%a ~[[%s (from-code-point 0x2064)]]]
==
:* "y_string_unicode_escaped_double_quote"
'["\\u0022"]'
[%a ~[[%s (from-code-point 0x22)]]]
==
:* "y_string_utf8"
'["€𝄞"]'
[%a ~[[%s '€𝄞']]]
==
:* "y_structure_lonely_false"
'false'
[%b |]
==
:* "y_structure_lonely_int"
'42'
[%n '42']
==
:* "y_structure_lonely_negative_real"
'-0.1'
[%n '-0.1']
==
:* "y_structure_lonely_null"
'null'
~
==
:* "y_structure_lonely_string"
'"asd"'
[%s 'asd']
==
:* "y_structure_lonely_true"
'true'
[%b &]
==
:* "y_structure_string_empty"
'""'
[%s '']
==
:* "y_structure_trailing_newline"
'["a"]\0a'
[%a ~[[%s 'a']]]
==
:* "y_structure_true_in_array"
'[true]'
[%a ~[[%b &]]]
==
:* "y_structure_whitespace_array"
' [] '
[%a ~]
==
==
:: TODO: de-json is rejecting or dropping unicode escape sequences
::
:: Tracking issue here: https://github.com/urbit/urbit/issues/1776
:: Re-enable this test by removing the disable- prefix
::
++ disable-test-parse-unicode-escape-sequences
=+ frond=frond:enjs:format
=+ pairs=pairs:enjs:format
%- run-parse-specs
:~
:* "y_string_with_del_character"
'["a\7fa"]'
[%a ~[[%s 'a\7fa']]]
==
:* "y_string_unicode_U+FDD0_nonchar"
'["\\uFDD0"]'
[%a ~[[%s (from-code-point 0xfdd0)]]]
==
:* "y_string_unicode_U+FFFE_nonchar"
'["\\uFFFE"]'
[%a ~[[%s (from-code-point 0xfffe)]]]
==
:* "y_string_unicode_U+10FFFE_nonchar"
'["\\uDBFF\\uDFFE"]'
[%a ~[[%s (crip (from-code-points ~[0xdbff 0xdffe]))]]]
==
:* "y_string_unicode_U+1FFFE_nonchar"
'["\\uD83F\\uDFFE"]'
[%a ~[[%s (crip (from-code-points ~[0xd83f 0xdffe]))]]]
==
:* "y_string_unicode_U+200B_ZERO_WIDTH_SPACE"
'["\\u200B"]'
[%a ~[[%s (from-code-point 0x200b)]]]
==
:* "y_string_uEscape"
'["\\u0061\\u30af\\u30EA\\u30b9"]'
[%a ~[[%s (crip (from-code-points ~[0x61 0x30af 0x30ea 0x30b9]))]]]
==
:* "y_string_uescaped_newline"
'["new\\u000Aline"]'
[%a ~[[%s 'new\0aline']]]
==
:* "y_string_unescaped_char_delete"
'["\7f"]'
[%a ~[[%s '\7f']]]
==
:* "y_string_unicode"
'["\\uA66D"]'
[%a ~[[%s (from-code-point 0xa66d)]]]
==
:* "y_string_unicodeEscapedBackslash"
'["\\u005C"]'
[%a ~[[%s (from-code-point 0x5c)]]]
==
:* "y_string_surrogates_U+1D11E_MUSICAL_SYMBOL_G_CLEF"
'["\\uD834\\uDd1e"]'
[%a ~[[%s (crip (from-code-points ~[0xd834 0xdd1e]))]]]
==
:* "y_string_last_surrogates_1_and_2"
'["\\uDBFF\\uDFFF"]'
[%a ~[[%s (crip (from-code-points ~[0xdbff 0xdfff]))]]]
==
:* "y_string_nbsp_uescaped"
'["new\\u00A0line"]'
[%a ~[[%s (crip "new{(from-code-points ~[0xa0])}line")]]]
==
:* "y_string_escaped_noncharacter"
'["\\uFFFF"]'
[%a ~[[%s (from-code-point 0xffff)]]]
==
:* "y_string_escaped_null"
'"foo\\u0000bar"'
[%s 'foo\00bar']
==
:* "y_object_escaped_null_in_key"
'{"foo\\u0000bar": 42}'
(frond ['foo\00bar' [%n '42']])
==
:* "y_string_1_2_3_bytes_UTF-8_sequences"
'["\\u0060\\u012a\\u12AB"]'
[%a ~[[%s '`Īካ']]]
==
:* "y_string_accepted_surrogate_pair"
'["\\uD801\\udc37"]'
[%a ~[[%s '𐐷']]]
==
:* "y_string_accepted_surrogate_pairs"
'["\\ud83d\\ude39\\ud83d\\udc8d"]'
[%a ~[[%s '😹💍']]]
==
==
--

View File

@ -76,8 +76,7 @@ export class ChatScreen extends Component {
props.history.push("/~chat");
} else if (
props.envelopes.length - prevProps.envelopes.length >=
40
props.envelopes.length >= prevProps.envelopes.length + 10
) {
this.hasAskedForMessages = false;
if (prevProps.envelopes.length <= 20) {
@ -106,24 +105,19 @@ export class ChatScreen extends Component {
}
if (
state.numPages * 100 >= props.length ||
props.envelopes.length >= props.length ||
this.hasAskedForMessages ||
props.length <= 0
) {
return;
}
let end = props.envelopes[0].number;
if (end > 0) {
let start = end - 400 > 0 ? end - 400 : 0;
if (start === 0 && end === 1) {
return;
}
let start =
props.length - props.envelopes[props.envelopes.length - 1].number;
if (start > 0) {
let end = start + 300 < props.length ? start + 300 : props.length;
this.hasAskedForMessages = true;
props.subscription.fetchMessages(start, end - 1, props.station);
props.subscription.fetchMessages(start + 1, end, props.station);
}
}
@ -193,33 +187,29 @@ export class ChatScreen extends Component {
const { props, state } = this;
let messages = props.envelopes.slice(0);
let lastMsgNum = messages.length > 0 ? messages.length : 0;
if (messages.length > 100 * state.numPages) {
messages = messages.slice(
messages.length - 100 * state.numPages,
messages.length
);
messages = messages.slice(0, 100 * state.numPages);
}
let pendingMessages = props.pendingMessages.has(props.station)
? props.pendingMessages.get(props.station)
? props.pendingMessages.get(props.station).reverse()
: [];
pendingMessages.map(function (value) {
return (value.pending = true);
});
let reversedMessages = messages.concat(pendingMessages);
reversedMessages = reversedMessages.reverse();
reversedMessages = reversedMessages.map((msg, i) => {
let messageElements = pendingMessages.concat(messages).map((msg, i) => {
// Render sigil if previous message is not by the same sender
let aut = ["author"];
let renderSigil =
_.get(reversedMessages[i + 1], aut) !==
_.get(messages[i + 1], aut) !==
_.get(msg, aut, msg.author);
let paddingTop = renderSigil;
let paddingBot =
_.get(reversedMessages[i - 1], aut) !==
_.get(messages[i - 1], aut) !==
_.get(msg, aut, msg.author);
return (
@ -248,7 +238,7 @@ export class ChatScreen extends Component {
}}></div>
{(
!(props.station in props.chatSynced) &&
(reversedMessages.length > 0)
(messages.length > 0)
) ? (
<ResubscribeElement
api={props.api}
@ -256,7 +246,7 @@ export class ChatScreen extends Component {
station={props.station} />
) : (<div />)
}
{reversedMessages}
{messageElements}
</div>
</div>
)}
@ -272,7 +262,7 @@ export class ChatScreen extends Component {
}}></div>
{(
!(props.station in props.chatSynced) &&
(reversedMessages.length > 0)
(messages.length > 0)
) ? (
<ResubscribeElement
api={props.api}
@ -280,7 +270,7 @@ export class ChatScreen extends Component {
station={props.station} />
) : (<div />)
}
{reversedMessages}
{messageElements}
</div>
)}
}

View File

@ -88,11 +88,11 @@ export class InviteSearch extends Component {
this.setState({ searchValue: event.target.value });
if (searchTerm.length < 2) {
if (searchTerm.length < 1) {
this.setState({ searchResults: { groups: [], ships: [] } });
}
if (searchTerm.length > 2) {
if (searchTerm.length > 0) {
if (this.state.inviteError === true) {
this.setState({ inviteError: false });
}
@ -100,7 +100,7 @@ export class InviteSearch extends Component {
let groupMatches = [];
if (this.props.groupResults) {
groupMatches = this.state.groups.filter(e => {
return (e[0].includes(searchTerm) || e[1].includes(searchTerm));
return (e[0].includes(searchTerm) || e[1].toLowerCase().includes(searchTerm));
});
}
@ -127,8 +127,8 @@ export class InviteSearch extends Component {
isValid = false;
}
if (shipMatches.length === 0 && isValid) {
shipMatches.push(searchTerm);
if (isValid && shipMatches.findIndex(s => s === searchTerm) < 0) {
shipMatches.unshift(searchTerm);
}
}
@ -141,6 +141,15 @@ export class InviteSearch extends Component {
this.setState({ selected: newSelection })
}
if(searchTerm.length < 3) {
groupMatches = groupMatches.filter(([, name]) =>
name.toLowerCase().split(' ').some(s => s.startsWith(searchTerm))
).sort((a,b) => a[1].length - b[1].length);
shipMatches = shipMatches.slice(0,3);
}
this.setState({
searchResults: { groups: groupMatches, ships: shipMatches }
});

View File

@ -17,7 +17,7 @@ export class ChatUpdateReducer {
message(json, state) {
let data = _.get(json, 'message', false);
if (data) {
state.inbox[data.path].envelopes.push(data.envelope);
state.inbox[data.path].envelopes.unshift(data.envelope);
state.inbox[data.path].config.length
= state.inbox[data.path].config.length + 1;
}
@ -27,9 +27,7 @@ export class ChatUpdateReducer {
let data = _.get(json, 'messages', false);
if (data) {
state.inbox[data.path].envelopes =
data.envelopes.concat(state.inbox[data.path].envelopes);
state.inbox[data.path].config.length =
state.inbox[data.path].config.length + data.envelopes.length;
state.inbox[data.path].envelopes.concat(data.envelopes);
}
}

View File

@ -89,11 +89,11 @@ export class InviteSearch extends Component {
this.setState({ searchValue: event.target.value });
if (searchTerm.length < 2) {
if (searchTerm.length < 1) {
this.setState({ searchResults: { groups: [], ships: [] } });
}
if (searchTerm.length > 2) {
if (searchTerm.length > 0) {
if (this.state.inviteError === true) {
this.setState({ inviteError: false });
}
@ -101,7 +101,7 @@ export class InviteSearch extends Component {
let groupMatches = [];
if (this.props.groupResults) {
groupMatches = this.state.groups.filter(e => {
return e[0].includes(searchTerm) || e[1].includes(searchTerm);
return e[0].includes(searchTerm) || e[1].toLowerCase().includes(searchTerm);
});
}
@ -128,8 +128,8 @@ export class InviteSearch extends Component {
isValid = false;
}
if (shipMatches.length === 0 && isValid) {
shipMatches.push(searchTerm);
if (isValid && shipMatches.findIndex(s => s === searchTerm) < 0) {
shipMatches.unshift(searchTerm);
}
}
@ -142,6 +142,15 @@ export class InviteSearch extends Component {
this.setState({ selected: newSelection })
}
if(searchTerm.length < 3) {
groupMatches = groupMatches.filter(([, name]) =>
name.toLowerCase().split(' ').some(s => s.startsWith(searchTerm))
).sort((a,b) => a[1].length - b[1].length);
shipMatches = shipMatches.slice(0,3);
}
this.setState({
searchResults: { groups: groupMatches, ships: shipMatches }
});

View File

@ -89,11 +89,11 @@ export class InviteSearch extends Component {
this.setState({ searchValue: event.target.value });
if (searchTerm.length < 2) {
if (searchTerm.length < 1) {
this.setState({ searchResults: { groups: [], ships: [] } });
}
if (searchTerm.length > 2) {
if (searchTerm.length > 0) {
if (this.state.inviteError === true) {
this.setState({ inviteError: false });
}
@ -101,7 +101,7 @@ export class InviteSearch extends Component {
let groupMatches = [];
if (this.props.groupResults) {
groupMatches = this.state.groups.filter(e => {
return e[0].includes(searchTerm) || e[1].includes(searchTerm);
return e[0].includes(searchTerm) || e[1].toLowerCase().includes(searchTerm);
});
}
@ -128,8 +128,8 @@ export class InviteSearch extends Component {
isValid = false;
}
if (shipMatches.length === 0 && isValid) {
shipMatches.push(searchTerm);
if (isValid && shipMatches.findIndex(s => s === searchTerm) < 0) {
shipMatches.unshift(searchTerm);
}
}
@ -142,6 +142,14 @@ export class InviteSearch extends Component {
this.setState({ selected: newSelection })
}
if(searchTerm.length < 3) {
groupMatches = groupMatches.filter(([, name]) =>
name.toLowerCase().split(' ').some(s => s.startsWith(searchTerm))
).sort((a,b) => a[1].length - b[1].length);
shipMatches = shipMatches.slice(0,3);
}
this.setState({
searchResults: { groups: groupMatches, ships: shipMatches }
});

View File

@ -3764,6 +3764,11 @@
"resolved": "https://registry.npmjs.org/moment/-/moment-2.24.0.tgz",
"integrity": "sha512-bV7f+6l2QigeBBZSM/6yTNq4P2fNpSWj/0e7jQcy87A8e7o2nAfP/34/2ky5Vw4B9S446EtIhodAzkFCcR4dQg=="
},
"mousetrap": {
"version": "1.6.5",
"resolved": "https://registry.npmjs.org/mousetrap/-/mousetrap-1.6.5.tgz",
"integrity": "sha512-QNo4kEepaIBwiT8CDhP98umTetp+JNfQYBWvC1pc6/OAibuXtRcxZ58Qz8skvEHYvURne/7R8T5VoOI7rDsEUA=="
},
"ms": {
"version": "2.0.0",
"resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz",

View File

@ -32,6 +32,7 @@
"del": "^5.1.0",
"lodash": "^4.17.11",
"moment": "^2.20.1",
"mousetrap": "^1.6.5",
"react": "^16.5.2",
"react-codemirror2": "^6.0.0",
"react-dom": "^16.8.6",

View File

@ -89,11 +89,11 @@ export class InviteSearch extends Component {
this.setState({ searchValue: event.target.value });
if (searchTerm.length < 2) {
if (searchTerm.length < 1) {
this.setState({ searchResults: { groups: [], ships: [] } });
}
if (searchTerm.length > 2) {
if (searchTerm.length > 0) {
if (this.state.inviteError === true) {
this.setState({ inviteError: false });
}
@ -101,7 +101,7 @@ export class InviteSearch extends Component {
let groupMatches = [];
if (this.props.groupResults) {
groupMatches = this.state.groups.filter(e => {
return e[0].includes(searchTerm) || e[1].includes(searchTerm);
return e[0].includes(searchTerm) || e[1].toLowerCase().includes(searchTerm);
});
}
@ -128,8 +128,8 @@ export class InviteSearch extends Component {
isValid = false;
}
if (shipMatches.length === 0 && isValid) {
shipMatches.push(searchTerm);
if (isValid && shipMatches.findIndex(s => s === searchTerm) < 0) {
shipMatches.unshift(searchTerm);
}
}
@ -142,6 +142,14 @@ export class InviteSearch extends Component {
this.setState({ selected: newSelection })
}
if(searchTerm.length < 3) {
groupMatches = groupMatches.filter(([, name]) =>
name.toLowerCase().split(' ').some(s => s.startsWith(searchTerm))
).sort((a,b) => a[1].length - b[1].length);
shipMatches = shipMatches.slice(0,3);
}
this.setState({
searchResults: { groups: groupMatches, ships: shipMatches }
});

View File

@ -60,18 +60,22 @@ export class PrimaryReducer {
let book = Object.keys(json[host])[0];
let noteId = json[host][book]["note-id"];
if (state.notebooks[host] && state.notebooks[host][book]) {
if (state.notebooks[host][book]["notes-by-date"]) {
state.notebooks[host][book]["notes-by-date"].unshift(noteId);
} else {
state.notebooks[host][book]["notes-by-date"] = [noteId];
}
if (state.notebooks[host][book].notes) {
if (state.notebooks[host][book].notes[noteId] &&
state.notebooks[host][book].notes[noteId].pending)
{
state.notebooks[host][book].notes[noteId].pending = false;
return;
}
if (state.notebooks[host][book]["notes-by-date"]) {
state.notebooks[host][book]["notes-by-date"].unshift(noteId);
} else {
state.notebooks[host][book]["notes-by-date"] = [noteId];
}
state.notebooks[host][book].notes[noteId] = json[host][book];
} else {
state.notebooks[host][book].notes = {[noteId]: json[host][book]};
}
state.notebooks[host][book]["num-notes"] += 1;
if (!json[host][book].read) {
state.notebooks[host][book]["num-unread"] += 1;
@ -79,7 +83,6 @@ export class PrimaryReducer {
let prevNoteId = state.notebooks[host][book]["notes-by-date"][1] || null;
state.notebooks[host][book].notes[noteId]["prev-note"] = prevNoteId
state.notebooks[host][book].notes[noteId]["next-note"] = null;
if (state.notebooks[host][book].notes[prevNoteId]) {
state.notebooks[host][book].notes[prevNoteId]["next-note"] = noteId;
}
@ -96,10 +99,26 @@ export class PrimaryReducer {
state.notebooks[host][book].notes &&
state.notebooks[host][book].notes[note])
{
state.notebooks[host][book].notes[note]["num-comments"] += 1;
if (state.notebooks[host][book].notes[note].comments) {
state.notebooks[host][book].notes[note].comments.unshift(comment);
let limboCommentIdx =
_.findIndex(state.notebooks[host][book].notes[note].comments, (o) => {
let oldVal = o[Object.keys(o)[0]];
let newVal = comment[Object.keys(comment)[0]];
return (oldVal.pending &&
(oldVal.author === newVal.author) &&
(oldVal.content === newVal.content)
);
});
if (limboCommentIdx === -1) {
state.notebooks[host][book].notes[note]["num-comments"] += 1;
state.notebooks[host][book].notes[note].comments.unshift(comment);
} else {
state.notebooks[host][book].notes[note].comments[limboCommentIdx] =
comment;
}
} else if (state.notebooks[host][book].notes[note]["num-comments"] === 1) {
state.notebooks[host][book].notes[note]["num-comments"] += 1;
state.notebooks[host][book].notes[note].comments = [comment];
}
}