1
1
mirror of https://github.com/kanaka/mal.git synced 2024-08-18 02:00:40 +03:00

impls/bash: Minor changes to make it run under OSH

OSH is a bash-compatible shell: https://www.oilshell.org/

reader.sh:

- Put the constant regex pattern in a string literal.  This simplifies
  it by removing mixed regex vs. shell quoting, and implicit
  concatenation with $'\n'.

  This is suggested by the bash manual:
  https://www.gnu.org/software/bash/manual/bash.html#Conditional-Constructs

  "Storing the regular expression in a shell variable is often a useful
  way to avoid problems with quoting characters that are special to the
  shell."

- Initialize __reader_tokens as an array, not a string.

  https://www.oilshell.org/release/0.8.pre6/doc/known-differences.html#values-are-tagged-with-types-not-cells

env.sh:

Simplify quoting in 'eval' expressions.  This quotes associative array keys,
which is required by OSH to avoid dynamic parsing.

  https://www.oilshell.org/release/0.8.pre6/doc/known-differences.html#strings-vs-bare-words-in-array-indices
  http://www.oilshell.org/blog/2016/10/20.html

core.sh:

Quote associative array keys.  '<' and '>' are shell operators and OSH doesn't
have a special case when inside [].

----

With this change, OSH can run tests just like bash, e.g.:

$ osh impls/bash/stepA_mal.sh tests/step4_if_fn_do.mal

----

Test results are the same before and after this change:

$ NO_DOCKER=1 ./.travis_test.sh test bash

FAILURES:
SOFT FAILED TEST (line 295): (f (+ 1 1)) -> ['',true]:
    Expected : '.*\ntrue'
    Got      : '(f (+ 1 1))\nfalse'

TEST RESULTS (for ../tests/stepA_mal.mal):
    1: soft failing tests
    0: failing tests
  106: passing tests
  107: total tests
This commit is contained in:
Andy Chu 2020-06-13 10:57:13 -07:00
parent 15a11c067f
commit 02028e90a4
3 changed files with 10 additions and 8 deletions

View File

@ -376,10 +376,10 @@ declare -A core_ns=(
[readline]=readline
[read-string]=read_string
[slurp]=slurp
[<]=num_lt
[<=]=num_lte
[>]=num_gt
[>=]=num_gte
['<']=num_lt
['<=']=num_lte
['>']=num_gt
['>=']=num_gte
[+]=num_plus
[-]=num_minus
[__STAR__]=num_multiply

View File

@ -48,7 +48,7 @@ ENV_FIND () {
r="${1}"
else
local obj="${ANON["${1}"]}"
eval local outer="\${${obj}["__outer__"]}"
eval 'local outer=${'${obj}'["__outer__"]}'
if [[ "${outer}" && "${outer}" != "${__nil}" ]]; then
ENV_FIND "${outer}" "${2}"
else
@ -66,7 +66,7 @@ ENV_GET () {
local key="${ANON["${2}"]}"
if [[ "${r}" ]]; then
local obj="${ANON["${env}"]}"
eval r="\${${obj}["${key}"]}"
eval 'r=${'${obj}'["'${key}'"]}'
else
_error "'${key}' not found"
fi

View File

@ -103,6 +103,8 @@ READ_FORM () {
esac
}
TOKEN_PAT=$'^^([][{}\\(\\)^@])|^(~@)|^("(\\\\.|[^\\"])*"?)|^(;[^\n]*)|^([~\'`])|^([^][ ~`\'";{}\\(\\)^@,\n]+)|^(,)|^([[:space:]]+)'
# Returns __reader_tokens as an indexed array of tokens
TOKENIZE () {
local data="${*}"
@ -114,14 +116,14 @@ TOKENIZE () {
local str=
__reader_idx=0
__reader_tokens=
declare -a -g __reader_tokens=() # global array
while true; do
if (( ${#str} < ( chunksz / 2) )) && (( chunk < datalen )); then
str="${str}${data:${chunk}:${chunksz}}"
chunk=$(( chunk + ${chunksz} ))
fi
(( ${#str} == 0 )) && break
[[ "${str}" =~ ^^([][{}\(\)^@])|^(~@)|^(\"(\\.|[^\\\"])*\"?)|^(;[^$'\n']*)|^([~\'\`])|^([^][ ~\`\'\";{}\(\)^@\,$'\n']+)|^(,)|^([[:space:]]+) ]]
[[ "${str}" =~ ${TOKEN_PAT} ]]
token=${BASH_REMATCH[0]}
str="${str:${#token}}"
token="${token}"