Merge branch 'master' into vendored-result-extra

This commit is contained in:
Dillon Kearns 2024-04-25 09:15:51 -07:00
commit c485c89196
24 changed files with 2405 additions and 775 deletions

1
.gitignore vendored
View File

@ -12,3 +12,4 @@ cypress/screenshots
.idea
generated/
elm-review-report.gz.json

File diff suppressed because one or more lines are too long

View File

@ -21,7 +21,7 @@
"BackendTask.File",
"BackendTask.Custom",
"BackendTask.Env",
"Stream",
"BackendTask.Stream",
"BackendTask.Do",
"Server.Request",
"Server.Session",

View File

@ -0,0 +1,42 @@
import { Writable, Transform, Readable } from "node:stream";
export async function hello(input, { cwd, env }) {
return `Hello!`;
}
export async function upperCaseStream() {
return {
metadata: () => "Hi! I'm metadata from upperCaseStream!",
stream: new Transform({
transform(chunk, encoding, callback) {
callback(null, chunk.toString().toUpperCase());
},
}),
};
}
export async function customReadStream() {
return new Readable({
read(size) {
this.push("Hello from customReadStream!");
this.push(null);
},
});
}
export async function customWrite(input) {
return {
stream: stdout(),
metadata: () => {
return "Hi! I'm metadata from customWriteStream!";
},
};
}
function stdout() {
return new Writable({
write(chunk, encoding, callback) {
process.stdout.write(chunk, callback);
},
});
}

View File

@ -35,7 +35,7 @@ run toTest =
Test.Runner.Plain tests ->
case toFailures tests of
[] ->
Script.log "All tests passed!"
Script.log (green " All tests passed!")
failures ->
BackendTask.fail
@ -45,9 +45,9 @@ run toTest =
failures
|> List.map
(\( label, failure ) ->
label ++ " | " ++ failure
"X " ++ label ++ "\n>>>>>> \n " ++ failure ++ "\n<<<<<<\n"
)
|> String.join "\n"
|> String.join "\n\n"
}
)
@ -153,7 +153,7 @@ viewReason : Reason -> String
viewReason reason =
case reason of
Custom ->
""
"Custom"
Equality expected actual ->
"Expected: " ++ expected ++ " | Actual: " ++ actual
@ -220,15 +220,18 @@ viewListDiff expected actual =
viewListDiffPart : Array String -> Int -> String -> String
viewListDiffPart otherList index listPart =
let
green : Bool
green =
isGreen : Bool
isGreen =
Array.get index otherList
|> maybeFilter (\value -> value == listPart)
|> Maybe.map (always True)
|> Maybe.withDefault False
in
-- todo use `green` to set ansi color code for green or red
listPart
if isGreen then
green listPart
else
red listPart
maybeFilter : (a -> Bool) -> Maybe a -> Maybe a
@ -243,3 +246,13 @@ maybeFilter f m =
Nothing ->
Nothing
green : String -> String
green text =
"\u{001B}[32m" ++ text ++ "\u{001B}[0m"
red : String -> String
red text =
"\u{001B}[31m" ++ text ++ "\u{001B}[0m"

View File

View File

@ -0,0 +1,14 @@
module Todo exposing (run)
import BackendTask
import Pages.Script as Script exposing (Script)
run : Script
run =
Script.log "Just"
|> BackendTask.andThen
(\_ ->
Debug.todo "Error string from todo."
)
|> Script.withoutCliOptions

View File

@ -1,10 +1,10 @@
module StreamDemo exposing (run)
import BackendTask exposing (BackendTask)
import BackendTask.Stream as Stream exposing (Stream)
import FatalError exposing (FatalError)
import Json.Decode as Decode
import Pages.Script as Script exposing (Script)
import Stream exposing (Stream)
run : Script

View File

@ -1,12 +1,18 @@
module StreamTests exposing (run)
import BackendTask exposing (BackendTask)
import BackendTask.Custom
import BackendTask.Http exposing (Error(..))
import BackendTask.Stream as Stream exposing (Stream, defaultCommandOptions)
import BackendTaskTest exposing (testScript)
import Dict
import Expect
import FatalError exposing (FatalError)
import Json.Decode as Decode
import Json.Encode as Encode
import Pages.Internal.FatalError exposing (FatalError(..))
import Pages.Script as Script exposing (Script)
import Stream exposing (Stream)
import TerminalText exposing (fromAnsiString)
import Test
@ -14,21 +20,50 @@ run : Script
run =
testScript "Stream"
[ Stream.fromString "asdf\nqwer\n"
|> Stream.captureCommandWithInput "wc" [ "-l" ]
|> Stream.pipe (Stream.command "wc" [ "-l" ])
|> Stream.read
|> try
|> test "capture stdin"
(\output ->
output.stdout
(\{ body } ->
body
|> String.trim
|> Expect.equal
"2"
)
, Stream.fromString "asdf\nqwer\n"
|> Stream.runCommandWithInput "wc" [ "-l" ]
|> try
|> Stream.pipe (Stream.command "wc" [ "-l" ])
|> Stream.run
|> test "run stdin"
(\() ->
Expect.pass
)
, Stream.command "does-not-exist" []
|> Stream.run
|> expectError "command with error"
"Error: spawn does-not-exist ENOENT"
, BackendTask.Custom.run "hello"
Encode.null
Decode.string
|> try
|> test "custom task"
(Expect.equal "Hello!")
, Stream.fromString "asdf\nqwer\n"
|> Stream.pipe (Stream.customDuplex "upperCaseStream" Encode.null)
|> Stream.read
|> try
|> test "custom duplex"
(.body >> Expect.equal "ASDF\nQWER\n")
, Stream.customRead "customReadStream" Encode.null
|> Stream.read
|> try
|> test "custom read"
(.body >> Expect.equal "Hello from customReadStream!")
, Stream.fromString "qwer\n"
|> Stream.pipe (Stream.customDuplex "customReadStream" Encode.null)
|> Stream.read
|> try
|> expectError "invalid stream"
"Expected 'customReadStream' to be a duplex stream!"
, Stream.fileRead "elm.json"
|> Stream.pipe Stream.gzip
|> Stream.pipe (Stream.fileWrite zipFile)
@ -38,18 +73,21 @@ run =
Stream.fileRead zipFile
|> Stream.pipe Stream.unzip
|> Stream.readJson (Decode.field "type" Decode.string)
|> try
)
|> test "zip and unzip" (Expect.equal "application")
|> test "zip and unzip" (.body >> Expect.equal "application")
, Stream.fromString
"""module Foo
a = 1
b = 2
"""
|> Stream.captureCommandWithInput "elm-format" [ "--stdin" ]
|> Stream.pipe (Stream.command "elm-format" [ "--stdin" ])
|> Stream.read
|> try
|> test "elm-format --stdin"
(\{ stdout } ->
stdout
(\{ metadata, body } ->
body
|> Expect.equal
"""module Foo exposing (a, b)
@ -62,12 +100,123 @@ b =
2
"""
)
, Stream.fileRead "elm.json"
|> Stream.pipe
(Stream.command "jq"
[ """."source-directories"[0]"""
]
)
|> Stream.readJson Decode.string
|> try
|> test "read command output as JSON"
(.body >> Expect.equal "src")
, Stream.fromString "invalid elm module"
|> Stream.pipe
(Stream.commandWithOptions
(defaultCommandOptions
|> Stream.allowNon0Status
|> Stream.withOutput Stream.MergeStderrAndStdout
)
"elm-format"
[ "--stdin" ]
)
|> Stream.read
|> try
|> test "stderr"
(.body >> Expect.equal "Unable to parse file <STDIN>:1:13 To see a detailed explanation, run elm make on the file.\n")
, Stream.http
{ url = "https://jsonplaceholder.typicode.com/posts/124"
, timeoutInMs = Nothing
, body = BackendTask.Http.emptyBody
, retries = Nothing
, headers = []
, method = "GET"
}
|> Stream.read
|> BackendTask.mapError .recoverable
|> BackendTask.toResult
|> test "output from HTTP"
(\result ->
case result of
Ok _ ->
Expect.fail ("Expected a failure, but got success!\n\n" ++ Debug.toString result)
Err (Stream.CustomError (BadStatus meta _) _) ->
meta.statusCode
|> Expect.equal 404
_ ->
Expect.fail ("Unexpected error\n\n" ++ Debug.toString result)
)
, Stream.http
{ url = "https://jsonplaceholder.typicode.com/posts/124"
, timeoutInMs = Nothing
, body = BackendTask.Http.emptyBody
, retries = Nothing
, headers = []
, method = "GET"
}
|> Stream.read
|> try
|> expectError "HTTP FatalError message"
"BadStatus: 404 Not Found"
, Stream.fromString "This is input..."
|> Stream.pipe
(Stream.customTransformWithMeta
"upperCaseStream"
Encode.null
(Decode.string |> Decode.map Ok)
)
|> Stream.read
|> try
|> test "duplex meta"
(Expect.equal
{ metadata = "Hi! I'm metadata from upperCaseStream!"
, body = "THIS IS INPUT..."
}
)
, Stream.fromString "This is input to writeStream!\n"
|> Stream.pipe
(Stream.customWriteWithMeta
"customWrite"
Encode.null
(Decode.string |> Decode.map Ok)
)
|> Stream.readMetadata
|> try
|> test "writeStream meta"
(Expect.equal "Hi! I'm metadata from customWriteStream!")
, Stream.fileRead "does-not-exist"
|> Stream.run
|> expectError "file not found error"
"Error: ENOENT: no such file or directory, open '/Users/dillonkearns/src/github.com/dillonkearns/elm-pages/examples/end-to-end/does-not-exist'"
, Stream.fromString "This is input..."
|> Stream.pipe (Stream.fileWrite "/this/is/invalid.txt")
|> Stream.run
|> expectError "invalid file write destination"
"Error: ENOENT: no such file or directory, mkdir '/this'"
, Stream.gzip
|> Stream.read
|> try
|> BackendTask.do
|> test "gzip alone is no-op"
(\() ->
Expect.pass
)
, Script.exec "does-not-exist-exec" []
|> expectError "exec with non-0 fails"
"Error: spawn does-not-exist-exec ENOENT"
, Script.command "does-not-exist-command" []
|> expectError "command with non-0 fails"
"Error: spawn does-not-exist-command ENOENT"
]
test : String -> (a -> Expect.Expectation) -> BackendTask FatalError a -> BackendTask FatalError Test.Test
test name toExpectation task =
task
--Script.log name
BackendTask.succeed ()
|> Script.doThen task
|> BackendTask.map
(\data ->
Test.test name <|
@ -75,6 +224,33 @@ test name toExpectation task =
)
expectError : String -> String -> BackendTask FatalError a -> BackendTask FatalError Test.Test
expectError name message task =
task
|> BackendTask.toResult
|> BackendTask.map
(\result ->
Test.test name <|
\() ->
case result of
Ok data ->
--Expect.fail "Expected a failure, but got success!"
result
|> Debug.toString
|> Expect.equal "Expected a failure, but got success!"
Err error ->
let
(FatalError info) =
error
in
info.body
|> TerminalText.fromAnsiString
|> TerminalText.toPlainString
|> Expect.equal message
)
try : BackendTask { error | fatal : FatalError } data -> BackendTask FatalError data
try =
BackendTask.allowFatal

View File

@ -13,16 +13,16 @@ import { compatibilityKey } from "./compatibility-key.js";
import * as fs from "node:fs";
import * as crypto from "node:crypto";
import { restoreColorSafe } from "./error-formatter.js";
import { Spinnies } from './spinnies/index.js'
import { Spinnies } from "./spinnies/index.js";
import { default as which } from "which";
import * as readline from "readline";
import { spawn as spawnCallback } from "cross-spawn";
import {ChildProcess} from 'node:child_process';
import * as consumers from 'stream/consumers'
import * as zlib from 'node:zlib'
import { Readable } from "node:stream";
import * as consumers from "stream/consumers";
import * as zlib from "node:zlib";
import { Readable, Writable } from "node:stream";
import * as validateStream from "./validate-stream.js";
import { default as makeFetchHappenOriginal } from "make-fetch-happen";
import mergeStreams from "@sindresorhus/merge-streams";
let verbosity = 2;
const spinnies = new Spinnies();
@ -197,7 +197,8 @@ function runGeneratorAppHelp(
mode,
requestToPerform,
hasFsAccess,
patternsToWatch
patternsToWatch,
portsFile
);
} else {
return runHttpJob(
@ -335,7 +336,8 @@ function runElmApp(
mode,
requestToPerform,
hasFsAccess,
patternsToWatch
patternsToWatch,
portsFile
);
} else {
return runHttpJob(
@ -476,7 +478,8 @@ async function runInternalJob(
mode,
requestToPerform,
hasFsAccess,
patternsToWatch
patternsToWatch,
portsFile
) {
try {
if (requestToPerform.url === "elm-pages-internal://log") {
@ -521,7 +524,7 @@ async function runInternalJob(
} else if (requestToPerform.url === "elm-pages-internal://shell") {
return [requestHash, await runShell(requestToPerform)];
} else if (requestToPerform.url === "elm-pages-internal://stream") {
return [requestHash, await runStream(requestToPerform)];
return [requestHash, await runStream(requestToPerform, portsFile)];
} else if (requestToPerform.url === "elm-pages-internal://start-spinner") {
return [requestHash, runStartSpinner(requestToPerform)];
} else if (requestToPerform.url === "elm-pages-internal://stop-spinner") {
@ -578,160 +581,351 @@ async function runWhich(req) {
async function runQuestion(req) {
return jsonResponse(req, await question(req.body.args[0]));
}
function runStream(req) {
return new Promise(async (resolve, reject) => {
try {
const cwd = path.resolve(...req.dir);
const quiet = req.quiet;
const env = { ...process.env, ...req.env };
const kind = req.body.args[0].kind;
const parts = req.body.args[0].parts;
let lastStream = null;
parts.forEach((part, index) => {
function runStream(req, portsFile) {
return new Promise(async (resolve) => {
let metadataResponse = null;
let lastStream = null;
try {
const cwd = path.resolve(...req.dir);
const quiet = req.quiet;
const env = { ...process.env, ...req.env };
const kind = req.body.args[0].kind;
const parts = req.body.args[0].parts;
let index = 0;
for (const part of parts) {
let isLastProcess = index === parts.length - 1;
let thisStream;
if (isLastProcess && (kind === "command" || kind === "commandCode")) {
const {command, args} = part;
let stdio;
if (kind === "command") {
stdio = ["pipe", "pipe", "pipe"];
} else if (kind === "commandCode") {
stdio = quiet ? ['pipe', 'ignore', 'ignore'] : ['pipe', 'inherit', 'inherit'];
} else {
throw new Error(`Unknown kind: ${kind}`);
}
const newProcess = spawnCallback(command, args, {
stdio,
cwd: cwd,
env: env,
});
lastStream && lastStream.pipe(newProcess.stdin);
if (kind === "command") {
let stdoutOutput = "";
let stderrOutput = "";
let combinedOutput = "";
newProcess.stderr.on("data", function (data) {
stderrOutput += data;
combinedOutput += data;
});
newProcess.stdout.on("data", function (data) {
stdoutOutput += data;
combinedOutput += data;
});
newProcess.on("close", async (exitCode) => {
resolve(jsonResponse(req, { stdoutOutput, stderrOutput, combinedOutput, exitCode }));
});
} else {
newProcess.on("close", async (exitCode) => {
resolve(jsonResponse(req, { exitCode }));
});
}
} else {
thisStream = pipePartToStream(lastStream, part, { cwd, quiet, env });
}
const { stream, metadata } = await pipePartToStream(
lastStream,
part,
{ cwd, quiet, env },
portsFile,
(value) => resolve(jsonResponse(req, value)),
isLastProcess,
kind
);
metadataResponse = metadata;
thisStream = stream;
lastStream = thisStream;
});
index += 1;
}
if (kind === "json") {
resolve(jsonResponse(req, await consumers.json(lastStream)));
resolve(
jsonResponse(req, {
body: await consumers.json(lastStream),
metadata: await tryCallingFunction(metadataResponse),
})
);
} else if (kind === "text") {
resolve(jsonResponse(req, await consumers.text(lastStream)));
resolve(
jsonResponse(req, {
body: await consumers.text(lastStream),
metadata: await tryCallingFunction(metadataResponse),
})
);
} else if (kind === "none") {
// lastStream.once("finish", async () => {
// resolve(jsonResponse(req, null));
// });
lastStream.once("close", () => {
resolve(jsonResponse(req, null));
});
if (!lastStream) {
// ensure all error handling gets a chance to fire before resolving successfully
await tryCallingFunction(metadataResponse);
resolve(jsonResponse(req, { body: null }));
} else {
let resolvedMeta = await tryCallingFunction(metadataResponse);
lastStream.once("finish", async () => {
resolve(
jsonResponse(req, {
body: null,
metadata: resolvedMeta,
})
);
});
lastStream.once("end", async () => {
resolve(
jsonResponse(req, {
body: null,
metadata: resolvedMeta,
})
);
});
}
} else if (kind === "command") {
// already handled in parts.forEach
}
/**
*
* @param {import('node:stream').Stream?} lastStream
* @param {{ name: string }} part
* @param {{cwd: string, quiet: boolean, env: object}} param2
* @returns {Promise<{stream: import('node:stream').Stream, metadata?: any}>}
*/
async function pipePartToStream(
lastStream,
part,
{ cwd, quiet, env },
portsFile,
resolve,
isLastProcess,
kind
) {
if (verbosity > 1 && !quiet) {
}
if (part.name === "stdout") {
return { stream: pipeIfPossible(lastStream, stdout()) };
} else if (part.name === "stderr") {
return { stream: pipeIfPossible(lastStream, stderr()) };
} else if (part.name === "stdin") {
return { stream: process.stdin };
} else if (part.name === "fileRead") {
const newLocal = fs.createReadStream(path.resolve(cwd, part.path));
newLocal.once("error", (error) => {
newLocal.close();
resolve({ error: error.toString() });
});
return { stream: newLocal };
} else if (part.name === "customDuplex") {
const newLocal = await portsFile[part.portName](part.input, {
cwd,
quiet,
env,
});
if (validateStream.isDuplexStream(newLocal.stream)) {
pipeIfPossible(lastStream, newLocal.stream);
return newLocal;
} else {
throw `Expected '${part.portName}' to be a duplex stream!`;
}
} else if (part.name === "customRead") {
return {
metadata: null,
stream: await portsFile[part.portName](part.input, {
cwd,
quiet,
env,
}),
};
} else if (part.name === "customWrite") {
const newLocal = await portsFile[part.portName](part.input, {
cwd,
quiet,
env,
});
if (!validateStream.isWritableStream(newLocal.stream)) {
console.error("Expected a writable stream!");
resolve({ error: "Expected a writable stream!" });
} else {
pipeIfPossible(lastStream, newLocal.stream);
}
return newLocal;
} else if (part.name === "gzip") {
const gzip = zlib.createGzip();
if (!lastStream) {
gzip.end();
}
return {
metadata: null,
stream: pipeIfPossible(lastStream, gzip),
};
} else if (part.name === "unzip") {
return {
metadata: null,
stream: pipeIfPossible(lastStream, zlib.createUnzip()),
};
} else if (part.name === "fileWrite") {
const destinationPath = path.resolve(part.path);
try {
await fsPromises.mkdir(path.dirname(destinationPath), {
recursive: true,
});
} catch (error) {
resolve({ error: error.toString() });
}
const newLocal = fs.createWriteStream(destinationPath);
newLocal.once("error", (error) => {
newLocal.close();
newLocal.removeAllListeners();
resolve({ error: error.toString() });
});
return {
metadata: null,
stream: pipeIfPossible(lastStream, newLocal),
};
} else if (part.name === "httpWrite") {
const makeFetchHappen = makeFetchHappenOriginal.defaults({
// cache: mode === "build" ? "no-cache" : "default",
cache: "default",
});
const response = await makeFetchHappen(part.url, {
body: lastStream,
duplex: "half",
redirect: "follow",
method: part.method,
headers: part.headers,
retry: part.retries,
timeout: part.timeoutInMs,
});
let metadata = () => {
return {
headers: Object.fromEntries(response.headers.entries()),
statusCode: response.status,
// bodyKind,
url: response.url,
statusText: response.statusText,
};
};
return { metadata, stream: response.body };
} else if (part.name === "command") {
const { command, args, allowNon0Status, output } = part;
/** @type {'ignore' | 'inherit'} } */
let letPrint = quiet ? "ignore" : "inherit";
let stderrKind = kind === "none" ? letPrint : "pipe";
if (output === "Ignore") {
stderrKind = "ignore";
} else if (output === "Print") {
stderrKind = letPrint;
}
/**
* @type {import('node:child_process').ChildProcess}
*/
const newProcess = spawnCallback(command, args, {
stdio: [
"pipe",
// if we are capturing stderr instead of stdout, print out stdout with `inherit`
output === "InsteadOfStdout" || kind === "none"
? letPrint
: "pipe",
stderrKind,
],
cwd: cwd,
env: env,
});
// lastStream.once("error", (error) => {
// console.log('Stream error!');
// console.error(error);
// reject(jsonResponse(req, null));
// });
pipeIfPossible(lastStream, newProcess.stdin);
let newStream;
if (output === "MergeWithStdout") {
newStream = mergeStreams([newProcess.stdout, newProcess.stderr]);
} else if (output === "InsteadOfStdout") {
newStream = newProcess.stderr;
} else {
newStream = newProcess.stdout;
}
newProcess.once("error", (error) => {
newStream && newStream.end();
newProcess.removeAllListeners();
resolve({ error: error.toString() });
});
if (isLastProcess) {
return {
stream: newStream,
metadata: new Promise((resoveMeta) => {
newProcess.once("exit", (code) => {
if (code !== 0 && !allowNon0Status) {
newStream && newStream.end();
resolve({
error: `Command ${command} exited with code ${code}`,
});
}
resoveMeta({
exitCode: code,
});
});
}),
};
} else {
return { metadata: null, stream: newStream };
}
} else if (part.name === "fromString") {
return { stream: Readable.from([part.string]), metadata: null };
} else {
// console.error(`Unknown stream part: ${part.name}!`);
// process.exit(1);
throw `Unknown stream part: ${part.name}!`;
}
}
} catch (error) {
console.trace(error);
process.exit(1);
if (lastStream) {
lastStream.destroy();
}
resolve(jsonResponse(req, { error: error.toString() }));
}
});
}
/**
*
* @param {import('node:stream').Stream} lastStream
* @param {{ name: string }} part
* @param {{cwd: string, quiet: boolean, env: object}} param2
* @returns
* @param { import('stream').Stream? } input
* @param {import('stream').Writable | import('stream').Duplex} destination
*/
function pipePartToStream(lastStream, part, { cwd, quiet, env }) {
if (verbosity > 1 && !quiet) {
}
if (part.name === "stdout") {
return lastStream.pipe(process.stdout);
} else if (part.name === "stdin") {
return process.stdin;
} else if (part.name === "fileRead") {
return fs.createReadStream(path.resolve(cwd, part.path));
} else if (part.name === "gzip") {
return lastStream.pipe(zlib.createGzip());
} else if (part.name === "unzip") {
return lastStream.pipe(zlib.createUnzip());
} else if (part.name === "fileWrite") {
return lastStream.pipe(fs.createWriteStream(path.resolve(part.path)));
} else if (part.name === "command") {
const {command, args, allowNon0Status} = part;
/**
* @type {import('node:child_process').ChildProcess}
*/
const newProcess = spawnCallback(command, args, {
stdio: ["pipe", "pipe", "pipe"],
cwd: cwd,
env: env,
});
newProcess.on("error", (error) => {
console.error("ERROR in pipeline!", error);
process.exit(1);
});
newProcess.on("exit", (code) => {
if (code !== 0) {
if (allowNon0Status) {
} else {
console.error("ERROR in exit code!", code);
process.exit(1);
}
}
});
lastStream && lastStream.pipe(newProcess.stdin);
return newProcess.stdout;
} else if (part.name === "fromString") {
return Readable.from([part.string]);
function pipeIfPossible(input, destination) {
if (input) {
return input.pipe(destination);
} else {
console.error(`Unknown stream part: ${part.name}!`);
process.exit(1);
return destination;
}
}
function stdout() {
return new Writable({
write(chunk, encoding, callback) {
process.stdout.write(chunk, callback);
},
});
}
function stderr() {
return new Writable({
write(chunk, encoding, callback) {
process.stderr.write(chunk, callback);
},
});
}
async function tryCallingFunction(func) {
if (func) {
// if is promise
if (func.then) {
return await func;
}
// if is function
else if (typeof func === "function") {
return await func();
}
} else {
return func;
}
}
async function runShell(req) {
const cwd = path.resolve(...req.dir);
const quiet = req.quiet;
const env = { ...process.env, ...req.env };
const captureOutput = req.body.args[0].captureOutput;
if (req.body.args[0].commands.length === 1) {
return jsonResponse(req, await shell({ cwd, quiet, env, captureOutput }, req.body.args[0]));
return jsonResponse(
req,
await shell({ cwd, quiet, env, captureOutput }, req.body.args[0])
);
} else {
return jsonResponse(req, await pipeShells({ cwd, quiet, env, captureOutput }, req.body.args[0]));
return jsonResponse(
req,
await pipeShells({ cwd, quiet, env, captureOutput }, req.body.args[0])
);
}
}
function commandAndArgsToString(cwd, commandsAndArgs) {
return `$ ` + (commandsAndArgs.commands.map((commandAndArgs) => {
return [ commandAndArgs.command, ...commandAndArgs.args ].join(" ");
}).join(" | "));
return (
`$ ` +
commandsAndArgs.commands
.map((commandAndArgs) => {
return [commandAndArgs.command, ...commandAndArgs.args].join(" ");
})
.join(" | ")
);
}
export function shell({ cwd, quiet, env, captureOutput }, commandAndArgs) {
@ -743,40 +937,52 @@ export function shell({ cwd, quiet, env, captureOutput }, commandAndArgs) {
}
if (!captureOutput && !quiet) {
const subprocess = spawnCallback(command, args, {
stdio: quiet ? ['inherit', 'ignore', 'ignore'] : ['inherit', 'inherit', 'inherit'],
stdio: quiet
? ["inherit", "ignore", "ignore"]
: ["inherit", "inherit", "inherit"],
cwd: cwd,
env: env,
});
subprocess.on("close", async (code) => {
resolve({ output: "", errorCode: code, stderrOutput: "", stdoutOutput: "" });
});
subprocess.on("close", async (code) => {
resolve({
output: "",
errorCode: code,
stderrOutput: "",
stdoutOutput: "",
});
});
} else {
const subprocess = spawnCallback(command, args, {
stdio: ["pipe", "pipe", "pipe"],
cwd: cwd,
env: env,
});
let commandOutput = "";
let stderrOutput = "";
let stdoutOutput = "";
const subprocess = spawnCallback(command, args, {
stdio: ["pipe", "pipe", "pipe"],
cwd: cwd,
env: env,
});
let commandOutput = "";
let stderrOutput = "";
let stdoutOutput = "";
if (verbosity > 0 && !quiet) {
subprocess.stdout.pipe(process.stdout);
subprocess.stderr.pipe(process.stderr);
if (verbosity > 0 && !quiet) {
subprocess.stdout.pipe(process.stdout);
subprocess.stderr.pipe(process.stderr);
}
subprocess.stderr.on("data", function (data) {
commandOutput += data;
stderrOutput += data;
});
subprocess.stdout.on("data", function (data) {
commandOutput += data;
stdoutOutput += data;
});
subprocess.on("close", async (code) => {
resolve({
output: commandOutput,
errorCode: code,
stderrOutput,
stdoutOutput,
});
});
}
subprocess.stderr.on("data", function (data) {
commandOutput += data;
stderrOutput += data;
});
subprocess.stdout.on("data", function (data) {
commandOutput += data;
stdoutOutput += data;
});
subprocess.on("close", async (code) => {
resolve({ output: commandOutput, errorCode: code, stderrOutput, stdoutOutput });
});
}
});
}
@ -787,92 +993,103 @@ export function shell({ cwd, quiet, env, captureOutput }, commandAndArgs) {
/**
* @param {{ commands: ElmCommand[] }} commandsAndArgs
*/
export function pipeShells({ cwd, quiet, env, captureOutput }, commandsAndArgs) {
export function pipeShells(
{ cwd, quiet, env, captureOutput },
commandsAndArgs
) {
return new Promise((resolve, reject) => {
if (verbosity > 1 && !quiet) {
console.log(commandAndArgsToString(cwd, commandsAndArgs));
}
/**
* @type {null | import('node:child_process').ChildProcess}
*/
let previousProcess = null;
let currentProcess = null;
/**
* @type {null | import('node:child_process').ChildProcess}
*/
let previousProcess = null;
let currentProcess = null;
commandsAndArgs.commands.forEach(({command, args, timeout }, index) => {
let isLastProcess = index === commandsAndArgs.commands.length - 1;
commandsAndArgs.commands.forEach(({ command, args, timeout }, index) => {
let isLastProcess = index === commandsAndArgs.commands.length - 1;
/**
* @type {import('node:child_process').ChildProcess}
*/
if (previousProcess === null) {
if (previousProcess === null) {
currentProcess = spawnCallback(command, args, {
stdio: ["inherit", "pipe", "inherit"],
timeout: timeout ? undefined : timeout,
cwd: cwd,
env: env,
});
} else {
if (isLastProcess && !captureOutput && false) {
currentProcess = spawnCallback(command, args, {
stdio: ['inherit', 'pipe', 'inherit'],
stdio: quiet
? ["pipe", "ignore", "ignore"]
: ["pipe", "inherit", "inherit"],
timeout: timeout ? undefined : timeout,
cwd: cwd,
env: env,
});
} else {
if (isLastProcess && !captureOutput) {
currentProcess = spawnCallback(command, args, {
stdio: quiet ? ['pipe', 'ignore', 'ignore'] : ['pipe', 'inherit', 'inherit'],
timeout: timeout ? undefined : timeout,
cwd: cwd,
env: env,
});
} else {
currentProcess = spawnCallback(command, args, {
stdio: ['pipe', 'pipe', 'pipe'],
timeout: timeout ? undefined : timeout,
cwd: cwd,
env: env,
});
}
previousProcess.stdout.pipe(currentProcess.stdin);
currentProcess = spawnCallback(command, args, {
stdio: ["pipe", "pipe", "pipe"],
timeout: timeout ? undefined : timeout,
cwd: cwd,
env: env,
});
}
previousProcess = currentProcess;
previousProcess.stdout.pipe(currentProcess.stdin);
}
previousProcess = currentProcess;
});
if (currentProcess === null) { reject('') }
else {
let commandOutput = "";
let stderrOutput = "";
let stdoutOutput = "";
if (currentProcess === null) {
reject("");
} else {
let commandOutput = "";
let stderrOutput = "";
let stdoutOutput = "";
if (verbosity > 0 && !quiet) {
currentProcess.stdout && currentProcess.stdout.pipe(process.stdout);
currentProcess.stderr && currentProcess.stderr.pipe(process.stderr);
}
if (verbosity > 0 && !quiet) {
currentProcess.stdout && currentProcess.stdout.pipe(process.stdout);
currentProcess.stderr && currentProcess.stderr.pipe(process.stderr);
}
currentProcess.stderr && currentProcess.stderr.on("data", function (data) {
commandOutput += data;
stderrOutput += data;
});
currentProcess.stdout && currentProcess.stdout.on("data", function (data) {
commandOutput += data;
stdoutOutput += data;
});
currentProcess.stderr &&
currentProcess.stderr.on("data", function (data) {
commandOutput += data;
stderrOutput += data;
});
currentProcess.stdout &&
currentProcess.stdout.on("data", function (data) {
commandOutput += data;
stdoutOutput += data;
});
currentProcess.on("close", async (code) => {
resolve({ output: commandOutput, errorCode: code, stderrOutput, stdoutOutput });
resolve({
output: commandOutput,
errorCode: code,
stderrOutput,
stdoutOutput,
});
});
}
});
}
export async function question({ prompt }) {
return new Promise((resolve) =>
{
const rl = readline.createInterface({
input: process.stdin,
output: process.stdout,
});
return new Promise((resolve) => {
const rl = readline.createInterface({
input: process.stdin,
output: process.stdout,
});
return rl.question(prompt, (answer) => {
rl.close();
resolve(answer);
});
},
);
return rl.question(prompt, (answer) => {
rl.close();
resolve(answer);
});
});
}
async function runWriteFileJob(req) {
@ -899,11 +1116,11 @@ function runStartSpinner(req) {
if (data.spinnerId) {
spinnerId = data.spinnerId;
// TODO use updateSpinnerState?
spinnies.update(spinnerId, { text: data.text, status: 'spinning' });
spinnies.update(spinnerId, { text: data.text, status: "spinning" });
} else {
spinnerId = Math.random().toString(36);
// spinnies.add(spinnerId, { text: data.text, status: data.immediateStart ? 'spinning' : 'stopped' });
spinnies.add(spinnerId, { text: data.text, status: 'spinning' });
// spinnies.add(spinnerId, { text: data.text, status: data.immediateStart ? 'spinning' : 'stopped' });
spinnies.add(spinnerId, { text: data.text, status: "spinning" });
// }
}
return jsonResponse(req, spinnerId);
@ -913,29 +1130,34 @@ function runStopSpinner(req) {
const data = req.body.args[0];
const { spinnerId, completionText, completionFn } = data;
let completeFn;
if (completionFn === 'succeed') {
spinnies.succeed(spinnerId, { text: completionText })
} else if (completionFn === 'fail') {
spinnies.fail(spinnerId, { text: completionText })
if (completionFn === "succeed") {
spinnies.succeed(spinnerId, { text: completionText });
} else if (completionFn === "fail") {
spinnies.fail(spinnerId, { text: completionText });
} else {
console.log('Unexpected')
console.log("Unexpected");
}
return jsonResponse(req, null);
}
async function runGlobNew(req, patternsToWatch) {
try {
const { pattern, options } = req.body.args[0];
const cwd = path.resolve(...req.dir);
const matchedPaths = await globby.globby(pattern, { ...options, stats: true, cwd });
const matchedPaths = await globby.globby(pattern, {
...options,
stats: true,
cwd,
});
patternsToWatch.add(pattern);
return jsonResponse(
req,
matchedPaths.map((fullPath) => {
const stats = fullPath.stats;
if (!stats) { return null }
if (!stats) {
return null;
}
return {
fullPath: fullPath.path,
captures: mm.capture(pattern, fullPath.path),
@ -947,7 +1169,7 @@ async function runGlobNew(req, patternsToWatch) {
birthtime: Math.round(stats.birthtime.getTime()),
fullPath: fullPath.path,
isDirectory: stats.isDirectory(),
}
},
};
})
);

View File

@ -92,12 +92,13 @@ export function lookupOrPerform(
}),
});
} else {
console.time(`BackendTask.Custom.run "${portName}"`);
!rawRequest.quiet &&
console.time(`BackendTask.Custom.run "${portName}"`);
let context = {
cwd: path.resolve(...rawRequest.dir),
quiet: rawRequest.quiet,
env: { ...process.env, ...rawRequest.env },
}
};
try {
resolve({
kind: "response-json",
@ -134,7 +135,8 @@ export function lookupOrPerform(
});
}
}
console.timeEnd(`BackendTask.Custom.run "${portName}"`);
!rawRequest.quiet &&
console.timeEnd(`BackendTask.Custom.run "${portName}"`);
}
} catch (error) {
console.trace(error);
@ -145,7 +147,7 @@ export function lookupOrPerform(
}
} else {
try {
console.time(`fetch ${request.url}`);
!rawRequest.quiet && console.time(`fetch ${request.url}`);
const response = await safeFetch(makeFetchHappen, request.url, {
method: request.method,
body: request.body,
@ -156,7 +158,7 @@ export function lookupOrPerform(
...rawRequest.cacheOptions,
});
console.timeEnd(`fetch ${request.url}`);
!rawRequest.quiet && console.timeEnd(`fetch ${request.url}`);
const expectString = request.headers["elm-pages-internal"];
let body;

View File

@ -0,0 +1,25 @@
// source: https://www.30secondsofcode.org/js/s/typecheck-nodejs-streams/
export function isReadableStream(val) {
return (
val !== null &&
typeof val === "object" &&
typeof val.pipe === "function" &&
typeof val._read === "function" &&
typeof val._readableState === "object"
);
}
export function isWritableStream(val) {
return (
val !== null &&
typeof val === "object" &&
typeof val.pipe === "function" &&
typeof val._write === "function" &&
typeof val._writableState === "object"
);
}
export function isDuplexStream(val) {
return isReadableStream(val) && isWritableStream(val);
}

18
package-lock.json generated
View File

@ -9,6 +9,7 @@
"version": "3.0.12",
"license": "BSD-3-Clause",
"dependencies": {
"@sindresorhus/merge-streams": "^3.0.0",
"busboy": "^1.6.0",
"chokidar": "^3.5.3",
"cli-cursor": "^4.0.0",
@ -982,9 +983,9 @@
}
},
"node_modules/@sindresorhus/merge-streams": {
"version": "1.0.0",
"resolved": "https://registry.npmjs.org/@sindresorhus/merge-streams/-/merge-streams-1.0.0.tgz",
"integrity": "sha512-rUV5WyJrJLoloD4NDN1V1+LDMDWOa4OTsT4yYJwQNpTU6FWxkxHpL7eu4w+DmiH8x/EAM1otkPE1+LaspIbplw==",
"version": "3.0.0",
"resolved": "https://registry.npmjs.org/@sindresorhus/merge-streams/-/merge-streams-3.0.0.tgz",
"integrity": "sha512-5Muw0TDzXvK/i0BmrL1tiTsb6Sh/DXe/e5d63GpmHWr59t7rUyQhhiIuw605q/yvJxyBf6gMWmsxCC2fqtcFvQ==",
"engines": {
"node": ">=18"
},
@ -4225,6 +4226,17 @@
"url": "https://github.com/sponsors/sindresorhus"
}
},
"node_modules/globby/node_modules/@sindresorhus/merge-streams": {
"version": "1.0.0",
"resolved": "https://registry.npmjs.org/@sindresorhus/merge-streams/-/merge-streams-1.0.0.tgz",
"integrity": "sha512-rUV5WyJrJLoloD4NDN1V1+LDMDWOa4OTsT4yYJwQNpTU6FWxkxHpL7eu4w+DmiH8x/EAM1otkPE1+LaspIbplw==",
"engines": {
"node": ">=18"
},
"funding": {
"url": "https://github.com/sponsors/sindresorhus"
}
},
"node_modules/got": {
"version": "11.8.6",
"resolved": "https://registry.npmjs.org/got/-/got-11.8.6.tgz",

View File

@ -25,6 +25,7 @@
"author": "Dillon Kearns",
"license": "BSD-3-Clause",
"dependencies": {
"@sindresorhus/merge-streams": "^3.0.0",
"busboy": "^1.6.0",
"chokidar": "^3.5.3",
"cli-cursor": "^4.0.0",

View File

@ -5,8 +5,8 @@ module BackendTask exposing
, andThen, resolve, combine
, andMap
, map2, map3, map4, map5, map6, map7, map8, map9
, allowFatal, mapError, onError, toResult, failIf
, do, doEach, sequence
, allowFatal, mapError, onError, toResult
, do, doEach, sequence, failIf
, inDir, quiet, withEnv
)
@ -84,12 +84,12 @@ Any place in your `elm-pages` app where the framework lets you pass in a value o
## FatalError Handling
@docs allowFatal, mapError, onError, toResult, failIf
@docs allowFatal, mapError, onError, toResult
## Scripting
@docs do, doEach, sequence
@docs do, doEach, sequence, failIf
## BackendTask Context
@ -154,7 +154,7 @@ and is relevant for the following types of `BackendTask`s:
- Reading files ([`BackendTask.File`](BackendTask-File))
- Running glob patterns ([`BackendTask.Glob`](BackendTask-Glob))
- Executing shell commands ([`BackendTask.Shell`](BackendTask-Shell))
- Executing shell commands ([`BackendTask.Stream.command`](BackendTask-Stream#command)) and [`Pages.Script.sh`](Pages-Script#command)
See the BackendTask Context section for more about how setting context works.
@ -229,7 +229,12 @@ inDir dir backendTask =
(\a b -> lookupFn a b |> inDir dir)
{-| -}
{-| Sets the verbosity level to `quiet` in the context of the given `BackendTask` (including all nested `BackendTask`s and continuations within it).
This will turn off performance timing logs. It will also prevent shell commands from printing their output to the console when they are run
(see [`BackendTask.Stream.command`](BackendTask-Stream#command)).
-}
quiet : BackendTask error value -> BackendTask error value
quiet backendTask =
-- elm-review: known-unoptimized-recursion
@ -336,7 +341,11 @@ combineHelp items =
List.foldl (map2 (::)) (succeed []) items |> map List.reverse
{-| -}
{-| Perform a List of `BackendTask`s with no output, one-by-one sequentially.
Same as [`sequence`](#sequence), except it ignores the resulting value of each `BackendTask`.
-}
doEach : List (BackendTask error ()) -> BackendTask error ()
doEach items =
items
@ -352,7 +361,12 @@ do backendTask =
|> map (\_ -> ())
{-| -}
{-| Perform a List of `BackendTask`s one-by-one sequentially. [`combine`](#combine) will perform them all in parallel, which is
typically a better default when you aren't sure which you want.
Same as [`doEach`](#doEach), except it ignores the resulting value of each `BackendTask`.
-}
sequence : List (BackendTask error value) -> BackendTask error (List value)
sequence items =
items
@ -721,7 +735,8 @@ toResult backendTask =
|> onError (Err >> succeed)
{-| -}
{-| If the condition is true, fail with the given `FatalError`. Otherwise, succeed with `()`.
-}
failIf : Bool -> FatalError -> BackendTask FatalError ()
failIf condition fatalError =
if condition then

View File

@ -1,5 +1,6 @@
module BackendTask.Do exposing
( do
, allowFatal
, noop
, exec, command
, glob, log, env
@ -25,6 +26,7 @@ apply continuation-style formatting to your Elm code:
You can see more discussion of continuation style in Elm in this Discourse post: <https://discourse.elm-lang.org/t/experimental-json-decoding-api/2121>.
@docs do
@docs allowFatal
## Defining Your Own Continuation Utilities
@ -69,39 +71,96 @@ import FatalError exposing (FatalError)
import Pages.Script as Script
{-| -}
{-| A do-style helper for [`Script.log`](Pages-Script#log).
example : BackendTask FatalError ()
example =
log "Starting script..." <|
\() ->
-- ...
log "Done!" <|
\() ->
noop
-}
log : String -> (() -> BackendTask error b) -> BackendTask error b
log string then_ =
do (Script.log string) then_
{-| Use any `BackendTask` into a continuation-style task.
example : BackendTask FatalError ()
example =
do
(Script.question "What is your name? ")
<|
\name ->
\() ->
Script.log ("Hello " ++ name ++ "!")
-}
do : BackendTask error a -> (a -> BackendTask error b) -> BackendTask error b
do fn requestInfo =
BackendTask.andThen requestInfo fn
{-| -}
{-| A `BackendTask` that does nothing. Defined as `BackendTask.succeed ()`.
It's a useful shorthand for when you want to end a continuation chain.
example : BackendTask FatalError ()
example =
exec "ls" [ "-l" ] <|
\() ->
log "Hello, world!" <|
\() ->
noop
-}
noop : BackendTask error ()
noop =
BackendTask.succeed ()
{-| Same as [`do`](#do), but with a shorthand to call `BackendTask.allowFatal` on it.
import BackendTask exposing (BackendTask)
import FatalError exposing (FatalError)
import BackendTask.File as BackendTask.File
import BackendTask.Do exposing (allowFatal, do)
example : BackendTask FatalError ()
example =
do (BackendTask.File.rawFile "post-1.md" |> BackendTask.allowFatal) <|
\post1 ->
allowFatal (BackendTask.File.rawFile "post-2.md") <|
\post2 ->
Script.log (post1 ++ "\n\n" ++ post2)
-}
allowFatal : BackendTask { error | fatal : FatalError } data -> (data -> BackendTask FatalError b) -> BackendTask FatalError b
allowFatal =
do << BackendTask.allowFatal
{-| A continuation-style helper for [`Glob.fromString`](BackendTask-Glob#fromString).
In a shell script, you can think of this as a stand-in for globbing files directly within a command. All commands in
you run with [`BackendTask.Shell`](BackendTask-Shell) (including the [`sh`](#exec) and [`exec`](#command) helpers)
sanitizes and escapes all arguments passed, and does not do glob expansion, so this is helpful for translating
In a shell script, you can think of this as a stand-in for globbing files directly within a command. The [`BackendTask.Stream.command`](BackendTask-Stream#command)
which lets you run shell commands sanitizes and escapes all arguments passed, and does not do glob expansion, so this is helpful for translating
a shell script to Elm.
This example passes a list of matching file paths along to an `rm -f` command.
example : BackendTask FatalError ()
example =
glob "src/**/*.elm" <| \elmFiles ->
log ("You have " ++ String.fromInt (List.length elmFiles) ++ " Elm files") <| \() ->
noop
glob "src/**/*.elm" <|
\elmFiles ->
log ("Going to delete " ++ String.fromInt (List.length elmFiles) ++ " Elm files") <|
\() ->
exec "rm" ("-f" :: elmFiles) <|
\() ->
noop
-}
glob : String -> (List String -> BackendTask FatalError a) -> BackendTask FatalError a
@ -113,14 +172,16 @@ glob pattern =
checkCompilationInDir : String -> BackendTask FatalError ()
checkCompilationInDir dir =
glob (dir ++ "/**/*.elm") <| \elmFiles ->
each elmFiles
(\elmFile ->
Shell.sh "elm" [ "make", elmFile, "--output", "/dev/null" ]
|> BackendTask.quiet
)
<| \_ ->
noop
glob (dir ++ "/**/*.elm") <|
\elmFiles ->
each elmFiles
(\elmFile ->
Shell.sh "elm" [ "make", elmFile, "--output", "/dev/null" ]
|> BackendTask.quiet
)
<|
\_ ->
noop
-}
each : List a -> (a -> BackendTask error b) -> (List b -> BackendTask error c) -> BackendTask error c
@ -134,26 +195,39 @@ each list fn then_ =
then_
{-| -}
{-| A do-style helper for [`BackendTask.failIf`](BackendTask#failIf).
-}
failIf : Bool -> FatalError -> (() -> BackendTask FatalError b) -> BackendTask FatalError b
failIf condition error =
do <| BackendTask.failIf condition error
{-| -}
{-| A do-style helper for [`Script.exec`](Pages-Script#exec).
-}
exec : String -> List String -> (() -> BackendTask FatalError b) -> BackendTask FatalError b
exec command_ args_ =
do <| Script.exec command_ args_
{-| -}
{-| A do-style helper for [`Script.command`](Pages-Script#command).
-}
command : String -> List String -> (String -> BackendTask FatalError b) -> BackendTask FatalError b
command command_ args_ function =
Script.command command_ args_
|> BackendTask.andThen function
{-| -}
{-| A do-style helper for [`Env.expect`](BackendTask-Env#expect).
example : BackendTask FatalError ()
example =
env "API_KEY" <|
\apiKey ->
allowFatal (apiRequest apiKey) <|
\() ->
noop
-}
env : String -> (String -> BackendTask FatalError b) -> BackendTask FatalError b
env name then_ =
do (Env.expect name |> BackendTask.allowFatal) <| then_

View File

@ -641,6 +641,24 @@ toResultThing ( expect, body, maybeResponse ) =
Err (BadBody Nothing "Unexpected combination, internal error")
{-| -}
type alias Metadata =
{ url : String
, statusCode : Int
, statusText : String
, headers : Dict String String
}
{-| -}
type Error
= BadUrl String
| Timeout
| NetworkError
| BadStatus Metadata String
| BadBody (Maybe Json.Decode.Error) String
errorToString : Error -> { title : String, body : String }
errorToString error =
{ title = "HTTP Error"
@ -658,8 +676,10 @@ errorToString error =
[ TerminalText.text "NetworkError"
]
BadStatus _ string ->
[ TerminalText.text ("BadStatus: " ++ string)
BadStatus metadata _ ->
[ TerminalText.text "BadStatus: "
, TerminalText.red (String.fromInt metadata.statusCode)
, TerminalText.text (" " ++ metadata.statusText)
]
BadBody _ string ->
@ -668,21 +688,3 @@ errorToString error =
)
|> TerminalText.toString
}
{-| -}
type alias Metadata =
{ url : String
, statusCode : Int
, statusText : String
, headers : Dict String String
}
{-| -}
type Error
= BadUrl String
| Timeout
| NetworkError
| BadStatus Metadata String
| BadBody (Maybe Json.Decode.Error) String

1179
src/BackendTask/Stream.elm Normal file

File diff suppressed because it is too large Load Diff

View File

@ -502,8 +502,17 @@ update config appMsg model =
ProcessFetchResponse transitionId response toMsg ->
case response of
Ok ( _, ResponseSketch.Redirect redirectTo ) ->
( model, NoEffect )
|> startNewGetLoad (currentUrlWithPath redirectTo model) toMsg
let
isAbsoluteUrl : Bool
isAbsoluteUrl =
Url.fromString redirectTo /= Nothing
in
if isAbsoluteUrl then
( model, BrowserLoadUrl redirectTo )
else
( model, NoEffect )
|> startNewGetLoad (currentUrlWithPath redirectTo model) toMsg
_ ->
update config (toMsg response) (clearLoadingFetchersAfterDataLoad transitionId model)

View File

@ -43,13 +43,13 @@ Read more about using the `elm-pages` CLI to run (or bundle) scripts, plus a bri
import BackendTask exposing (BackendTask)
import BackendTask.Http
import BackendTask.Internal.Request
import BackendTask.Stream as Stream exposing (defaultCommandOptions)
import Cli.OptionsParser as OptionsParser
import Cli.Program as Program
import FatalError exposing (FatalError)
import Json.Decode as Decode
import Json.Encode as Encode
import Pages.Internal.Script
import Stream
{-| The type for your `run` function that can be executed by `elm-pages run`.
@ -175,7 +175,23 @@ withCliOptions config execute =
)
{-| -}
{-| Sleep for a number of milliseconds.
module MyScript exposing (run)
import BackendTask
import Pages.Script as Script
run =
Script.withoutCliOptions
(Script.log "Hello..."
|> Script.doThen
(Script.sleep 1000)
|> Script.doThen
(Script.log "World!")
)
-}
sleep : Int -> BackendTask error ()
sleep int =
BackendTask.Internal.Request.request
@ -191,14 +207,29 @@ sleep int =
}
{-| -}
{-| Run a command with no output, then run another command.
module MyScript exposing (run)
import BackendTask
import Pages.Script as Script
run =
Script.withoutCliOptions
(Script.log "Hello!"
|> Script.doThen
(Script.log "World!")
)
-}
doThen : BackendTask error value -> BackendTask error () -> BackendTask error value
doThen task1 task2 =
task2
|> BackendTask.andThen (\() -> task1)
{-| -}
{-| Same as [`expectWhich`](#expectWhich), but returns `Nothing` if the command is not found instead of failing with a [`FatalError`](FatalError).
-}
which : String -> BackendTask error (Maybe String)
which command_ =
BackendTask.Internal.Request.request
@ -208,7 +239,33 @@ which command_ =
}
{-| -}
{-| Check if a command is available on the system. If it is, return the full path to the command, otherwise fail with a [`FatalError`](FatalError).
module MyScript exposing (run)
import BackendTask
import Pages.Script as Script
run : Script
run =
Script.withoutCliOptions
(Script.expectWhich "elm-review"
|> BackendTask.andThen
(\path ->
Script.log ("The path to `elm-review` is: " ++ path)
)
)
If you run it with a command that is not available, you will see an error like this:
Script.expectWhich "hype-script"
```shell
-- COMMAND NOT FOUND ---------------
I expected to find `hype-script`, but it was not on your PATH. Make sure it is installed and included in your PATH.
```
-}
expectWhich : String -> BackendTask FatalError String
expectWhich command_ =
which command_
@ -228,7 +285,23 @@ expectWhich command_ =
)
{-| -}
{-|
module QuestionDemo exposing (run)
import BackendTask
run : Script
run =
Script.withoutCliOptions
(Script.question "What is your name? "
|> BackendTask.andThen
(\name ->
Script.log ("Hello, " ++ name ++ "!")
)
)
-}
question : String -> BackendTask error String
question prompt =
BackendTask.Internal.Request.request
@ -240,16 +313,51 @@ question prompt =
}
{-| -}
{-| Like [`command`](#command), but prints stderr and stdout to the console as the command runs instead of capturing them.
module MyScript exposing (run)
import BackendTask
import Pages.Script as Script exposing (Script)
run : Script
run =
Script.withoutCliOptions
(Script.exec "ls" [])
-}
exec : String -> List String -> BackendTask FatalError ()
exec command_ args_ =
Stream.runCommand command_ args_
|> BackendTask.allowFatal
Stream.command command_ args_
|> Stream.run
{-| -}
{-| Run a single command and return stderr and stdout combined as a single String.
If you want to do more advanced things like piping together multiple commands in a pipeline, or piping in a file to a command, etc., see the [`Stream`](BackendTask-Stream) module.
module MyScript exposing (run)
import BackendTask
import Pages.Script as Script exposing (Script)
run : Script
run =
Script.withoutCliOptions
(Script.command "ls" []
|> BackendTask.andThen
(\files ->
Script.log ("Files: " ++ files)
)
)
-}
command : String -> List String -> BackendTask FatalError String
command command_ args_ =
Stream.captureCommand command_ args_
Stream.commandWithOptions
(defaultCommandOptions |> Stream.withOutput Stream.MergeStderrAndStdout)
command_
args_
|> Stream.read
|> BackendTask.map .body
|> BackendTask.allowFatal
|> BackendTask.map .combined

View File

@ -1,8 +1,64 @@
module Pages.Script.Spinner exposing (CompletionIcon(..), Options, Spinner, Steps(..), options, runSteps, runTask, runTaskExisting, runTaskWithOptions, showStep, spinner, start, steps, withImmediateStart, withNamedAnimation, withOnCompletion, withStep, withStepWithOptions)
module Pages.Script.Spinner exposing
( Steps(..), steps, withStep
, withStepWithOptions
, runSteps
, Options, options
, CompletionIcon(..)
, withOnCompletion
, runTask, runTaskWithOptions
, showStep, runSpinnerWithTask, Spinner
)
{-|
@docs CompletionIcon, Options, Spinner, Steps, options, runSteps, runTask, runTaskExisting, runTaskWithOptions, showStep, spinner, start, steps, withImmediateStart, withNamedAnimation, withOnCompletion, withStep, withStepWithOptions
## Running Steps
The easiest way to use spinners is to define a series of [`Steps`](#Steps) and then run them with [`runSteps`](#runSteps).
Steps are a sequential series of `BackendTask`s that are run one after the other. If a step fails (has a [`FatalError`](FatalError)),
its spinner will show a failure, and the remaining steps will not be run and will be displayed as cancelled (the step name in gray).
module StepsDemo exposing (run)
import BackendTask exposing (BackendTask)
import Pages.Script as Script exposing (Script)
import Pages.Script.Spinner as Spinner
run : Script
run =
Script.withoutCliOptions
(Spinner.steps
|> Spinner.withStep "Compile Main.elm" (\() -> Script.exec "elm" [ "make", "src/Main.elm", "--output=/dev/null" ])
|> Spinner.withStep "Verify formatting" (\() -> Script.exec "elm-format" [ "--validate", "src/" ])
|> Spinner.withStep "elm-review" (\() -> Script.exec "elm-review" [])
|> Spinner.runSteps
)
@docs Steps, steps, withStep
@docs withStepWithOptions
@docs runSteps
## Configuring Steps
@docs Options, options
@docs CompletionIcon
@docs withOnCompletion
## Running with BackendTask
@docs runTask, runTaskWithOptions
## Low-Level
@docs showStep, runSpinnerWithTask, Spinner
-}
@ -14,16 +70,17 @@ import Json.Decode as Decode
import Json.Encode as Encode
{-| -}
{-| An icon used to indicate the completion status of a step. Set by using [`withOnCompletion`](#withOnCompletion).
-}
type CompletionIcon
= Succeed
| Fail
| Warn
| Info
| Custom String
{-| -}
{-| Configuration that can be used with [`runTaskWithOptions`](#runTaskWithOptions) and [`withStepWithOptions`](#withStepWithOptions).
-}
type Options error value
= Options
{ text : String
@ -33,7 +90,25 @@ type Options error value
}
{-| -}
{-| Set the completion icon and text based on the result of the task.
import Pages.Script.Spinner as Spinner
example =
Spinner.options "Fetching data"
|> Spinner.withOnCompletion
(\result ->
case result of
Ok _ ->
( Spinner.Succeed, "Fetched data!" )
Err _ ->
( Spinner.Fail
, Just "Could not fetch data."
)
)
-}
withOnCompletion : (Result error value -> ( CompletionIcon, Maybe String )) -> Options error value -> Options error value
withOnCompletion function (Options options_) =
Options { options_ | onCompletion = function }
@ -44,7 +119,14 @@ type Spinner error value
= Spinner String (Options error value)
{-| -}
{-| The default options for a spinner. The spinner `text` is a required argument and will be displayed as the step name.
import Pages.Script.Spinner as Spinner
example =
Spinner.options "Compile Main.elm"
-}
options : String -> Options error value
options text =
Options
@ -62,13 +144,49 @@ options text =
}
{-| -}
withNamedAnimation : String -> Options error value -> Options error value
withNamedAnimation animationName (Options options_) =
Options { options_ | animation = Just animationName }
--{-| -}
--withNamedAnimation : String -> Options error value -> Options error value
--withNamedAnimation animationName (Options options_) =
-- Options { options_ | animation = Just animationName }
{-| A low-level helper for showing a step and getting back a `Spinner` reference which you can later use to `start` the spinner.
{-| `showStep` gives you a `Spinner` reference which you can use to start the spinner later with `runSpinnerWithTask`.
Most use cases can be achieved more easily using more high-level helpers, like [`runTask`](#runTask) or [`steps`](#steps).
`showStep` can be useful if you have more dynamic steps that you want to reveal over time.
module ShowStepDemo exposing (run)
import BackendTask exposing (BackendTask)
import Pages.Script as Script exposing (Script, doThen, sleep)
import Pages.Script.Spinner as Spinner
run : Script
run =
Script.withoutCliOptions
(BackendTask.succeed
(\spinner1 spinner2 spinner3 ->
sleep 3000
|> Spinner.runSpinnerWithTask spinner1
|> doThen
(sleep 3000
|> Spinner.runSpinnerWithTask spinner2
|> doThen
(sleep 3000
|> Spinner.runSpinnerWithTask spinner3
)
)
)
|> BackendTask.andMap
(Spinner.options "Step 1" |> Spinner.showStep)
|> BackendTask.andMap
(Spinner.options "Step 2" |> Spinner.showStep)
|> BackendTask.andMap
(Spinner.options "Step 3" |> Spinner.showStep)
|> BackendTask.andThen identity
)
-}
showStep : Options error value -> BackendTask error (Spinner error value)
showStep (Options options_) =
@ -92,33 +210,18 @@ showStep (Options options_) =
}
{-| -}
start : Spinner error1 value1 -> BackendTask error ()
start (Spinner spinnerId _) =
BackendTask.Internal.Request.request
{ name = "start-spinner"
, body =
BackendTask.Http.jsonBody
([ ( "spinnerId", Encode.string spinnerId )
]
|> Encode.object
)
, expect =
BackendTask.Http.expectJson (Decode.succeed ())
}
{-| -}
withImmediateStart : Options error value -> Options error value
withImmediateStart (Options options_) =
Options { options_ | immediateStart = True }
--{-| -}
--withImmediateStart : Options error value -> Options error value
--withImmediateStart (Options options_) =
-- Options { options_ | immediateStart = True }
{-| -}
runTaskWithOptions : Options error value -> BackendTask error value -> BackendTask error value
runTaskWithOptions (Options options_) backendTask =
Options options_
|> withImmediateStart
--|> withImmediateStart
|> showStep
|> BackendTask.andThen
(\(Spinner spinnerId _) ->
@ -165,7 +268,32 @@ runTaskWithOptions (Options options_) backendTask =
)
{-| -}
{-| Run a `BackendTask` with a spinner. The spinner will show a success icon if the task succeeds, and a failure icon if the task fails.
It's often easier to use [`steps`](#steps) when possible.
module SequentialSteps exposing (run)
import Pages.Script as Script exposing (Script, doThen, sleep)
import Pages.Script.Spinner as Spinner
run : Script
run =
Script.withoutCliOptions
(sleep 3000
|> Spinner.runTask "Step 1..."
|> doThen
(sleep 3000
|> Spinner.runTask "Step 2..."
|> doThen
(sleep 3000
|> Spinner.runTask "Step 3..."
)
)
)
-}
runTask : String -> BackendTask error value -> BackendTask error value
runTask text backendTask =
spinner text
@ -175,14 +303,16 @@ runTask text backendTask =
( Succeed, Nothing )
Err _ ->
( Fail, Just "Uh oh! Failed to fetch" )
( Fail, Nothing )
)
backendTask
{-| -}
runTaskExisting : Spinner error value -> BackendTask error value -> BackendTask error value
runTaskExisting (Spinner spinnerId (Options options_)) backendTask =
{-| After calling `showStep` to get a reference to a `Spinner`, use `runSpinnerWithTask` to run a `BackendTask` and show a failure or success
completion status once it is done.
-}
runSpinnerWithTask : Spinner error value -> BackendTask error value -> BackendTask error value
runSpinnerWithTask (Spinner spinnerId (Options options_)) backendTask =
BackendTask.Internal.Request.request
{ name = "start-spinner"
, body =
@ -190,7 +320,7 @@ runTaskExisting (Spinner spinnerId (Options options_)) backendTask =
(Encode.object
[ ( "text", Encode.string options_.text )
, ( "spinnerId", Encode.string spinnerId )
, ( "immediateStart", Encode.bool True )
, ( "immediateStart", Encode.bool options_.immediateStart )
, ( "spinner", Encode.string "line" )
]
)
@ -316,22 +446,22 @@ encodeCompletionIcon completionIcon =
Info ->
"info"
Custom _ ->
"custom"
{-| -}
{-| The definition of a series of `BackendTask`s to run, with a spinner for each step.
-}
type Steps error value
= Steps (BackendTask error value)
{-| -}
{-| Initialize an empty series of `Steps`.
-}
steps : Steps FatalError ()
steps =
Steps (BackendTask.succeed ())
{-| -}
{-| Add a `Step`. See [`withStepWithOptions`](#withStepWithOptions) to configure the step's spinner.
-}
withStep : String -> (oldValue -> BackendTask FatalError newValue) -> Steps FatalError oldValue -> Steps FatalError newValue
withStep text backendTask steps_ =
case steps_ of
@ -339,7 +469,7 @@ withStep text backendTask steps_ =
Steps
(BackendTask.map2
(\pipelineValue newSpinner ->
runTaskExisting
runSpinnerWithTask
newSpinner
(backendTask pipelineValue)
)
@ -349,7 +479,8 @@ withStep text backendTask steps_ =
)
{-| -}
{-| Add a step with custom [`Options`](#Options).
-}
withStepWithOptions : Options FatalError newValue -> (oldValue -> BackendTask FatalError newValue) -> Steps FatalError oldValue -> Steps FatalError newValue
withStepWithOptions options_ backendTask steps_ =
case steps_ of
@ -357,7 +488,7 @@ withStepWithOptions options_ backendTask steps_ =
Steps
(BackendTask.map2
(\pipelineValue newSpinner ->
runTaskExisting
runSpinnerWithTask
newSpinner
(backendTask pipelineValue)
)
@ -367,7 +498,8 @@ withStepWithOptions options_ backendTask steps_ =
)
{-| -}
{-| Perform the `Steps` in sequence.
-}
runSteps : Steps FatalError value -> BackendTask FatalError value
runSteps (Steps steps_) =
steps_

View File

@ -1,4 +1,4 @@
module RequestsAndPending exposing (HttpError(..), RawResponse, RequestsAndPending, Response(..), ResponseBody(..), bodyEncoder, get)
module RequestsAndPending exposing (HttpError(..), RawResponse, RequestsAndPending, Response(..), ResponseBody(..), bodyEncoder, get, responseDecoder)
import Base64
import Bytes exposing (Bytes)

View File

@ -1,405 +0,0 @@
module Stream exposing
( Stream, command, fileRead, fileWrite, fromString, httpRead, httpWrite, pipe, read, run, stdin, stdout, gzip, readJson, unzip
, CommandOutput, captureCommandWithInput, runCommandWithInput
, captureCommand, runCommand
, commandWithOptions
, CommandOptions, defaultCommandOptions, allowNon0Status, inheritUnused, withOutput, withTimeout
, OutputChannel(..)
)
{-|
@docs Stream, command, fileRead, fileWrite, fromString, httpRead, httpWrite, pipe, read, run, stdin, stdout, gzip, readJson, unzip
@docs CommandOutput, captureCommandWithInput, runCommandWithInput
## Running Commands
@docs captureCommand, runCommand
## Command Options
@docs commandWithOptions
@docs CommandOptions, defaultCommandOptions, allowNon0Status, inheritUnused, withOutput, withTimeout
@docs OutputChannel
-}
import BackendTask exposing (BackendTask)
import BackendTask.Http exposing (Body)
import BackendTask.Internal.Request
import Bytes exposing (Bytes)
import FatalError exposing (FatalError)
import Json.Decode as Decode exposing (Decoder)
import Json.Encode as Encode
{-| -}
type Stream kind
= Stream (List StreamPart)
type StreamPart
= StreamPart String (List ( String, Encode.Value ))
single : String -> List ( String, Encode.Value ) -> Stream kind
single inner1 inner2 =
Stream [ StreamPart inner1 inner2 ]
{-| -}
stdin : Stream { read : (), write : Never }
stdin =
single "stdin" []
{-| -}
stdout : Stream { read : Never, write : () }
stdout =
single "stdout" []
{-| -}
fileRead : String -> Stream { read : (), write : Never }
fileRead path =
single "fileRead" [ ( "path", Encode.string path ) ]
{-| -}
fileWrite : String -> Stream { read : Never, write : () }
fileWrite path =
single "fileWrite" [ ( "path", Encode.string path ) ]
{-| -}
gzip : Stream { read : (), write : () }
gzip =
single "gzip" []
{-| -}
unzip : Stream { read : (), write : () }
unzip =
single "unzip" []
{-| -}
httpRead :
{ url : String
, method : String
, headers : List ( String, String )
, body : Body
, retries : Maybe Int
, timeoutInMs : Maybe Int
}
-> Stream { read : (), write : Never }
httpRead string =
single "httpRead" []
{-| -}
httpWrite :
{ url : String
, method : String
, headers : List ( String, String )
, retries : Maybe Int
, timeoutInMs : Maybe Int
}
-> Stream { read : Never, write : () }
httpWrite string =
single "httpWrite" []
{-| -}
pipe :
Stream { read : toReadable, write : toWriteable }
-> Stream { read : (), write : fromWriteable }
-> Stream { read : toReadable, write : toWriteable }
pipe (Stream to) (Stream from) =
Stream (from ++ to)
{-| -}
run : Stream { read : read, write : () } -> BackendTask FatalError ()
run stream =
BackendTask.Internal.Request.request
{ name = "stream"
, body = BackendTask.Http.jsonBody (pipelineEncoder stream "none")
, expect = BackendTask.Http.expectJson (Decode.succeed ())
}
pipelineEncoder : Stream a -> String -> Encode.Value
pipelineEncoder (Stream parts) kind =
Encode.object
[ ( "kind", Encode.string kind )
, ( "parts"
, Encode.list
(\(StreamPart name data) ->
Encode.object (( "name", Encode.string name ) :: data)
)
parts
)
]
{-| -}
fromString : String -> Stream { read : (), write : Never }
fromString string =
single "fromString" [ ( "string", Encode.string string ) ]
{-| -}
read : Stream { read : (), write : write } -> BackendTask FatalError String
read stream =
BackendTask.Internal.Request.request
{ name = "stream"
, body = BackendTask.Http.jsonBody (pipelineEncoder stream "text")
, expect = BackendTask.Http.expectJson Decode.string
}
{-| -}
readJson : Decoder value -> Stream { read : (), write : write } -> BackendTask FatalError value
readJson decoder stream =
BackendTask.Internal.Request.request
{ name = "stream"
, body = BackendTask.Http.jsonBody (pipelineEncoder stream "json")
, expect = BackendTask.Http.expectJson decoder
}
{-| -}
readBytes : Stream { read : (), write : write } -> BackendTask FatalError Bytes
readBytes stream =
BackendTask.fail (FatalError.fromString "Not implemented")
{-| -}
command : String -> List String -> Stream { read : read, write : write }
command command_ args_ =
commandWithOptions defaultCommandOptions command_ args_
{-| -}
commandWithOptions : CommandOptions -> String -> List String -> Stream { read : read, write : write }
commandWithOptions (CommandOptions options) command_ args_ =
single "command"
[ ( "command", Encode.string command_ )
, ( "args", Encode.list Encode.string args_ )
, ( "allowNon0Status", Encode.bool options.allowNon0Status )
, ( "output", encodeChannel options.output )
, ( "timeoutInMs", nullable Encode.int options.timeoutInMs )
]
nullable : (a -> Encode.Value) -> Maybe a -> Encode.Value
nullable encoder maybeValue =
case maybeValue of
Just value ->
encoder value
Nothing ->
Encode.null
{-| -}
type OutputChannel
= Stdout
| Stderr
| Both
{-| -}
type CommandOptions
= CommandOptions CommandOptions_
type alias CommandOptions_ =
{ output : OutputChannel
, inheritUnused : Bool
, allowNon0Status : Bool
, timeoutInMs : Maybe Int
}
{-| -}
defaultCommandOptions : CommandOptions
defaultCommandOptions =
CommandOptions
{ output = Stdout
, inheritUnused = False
, allowNon0Status = False
, timeoutInMs = Nothing
}
{-| -}
withOutput : OutputChannel -> CommandOptions -> CommandOptions
withOutput output (CommandOptions cmd) =
CommandOptions { cmd | output = output }
{-| -}
allowNon0Status : CommandOptions -> CommandOptions
allowNon0Status (CommandOptions cmd) =
CommandOptions { cmd | allowNon0Status = True }
{-| -}
withTimeout : Int -> CommandOptions -> CommandOptions
withTimeout timeoutMs (CommandOptions cmd) =
CommandOptions { cmd | timeoutInMs = Just timeoutMs }
{-| -}
inheritUnused : CommandOptions -> CommandOptions
inheritUnused (CommandOptions cmd) =
CommandOptions { cmd | inheritUnused = True }
encodeChannel : OutputChannel -> Encode.Value
encodeChannel output =
Encode.string
(case output of
Stdout ->
"stdout"
Stderr ->
"stderr"
Both ->
"both"
)
{-| -}
type alias CommandOutput =
{ stdout : String
, stderr : String
, combined : String
, exitCode : Int
}
commandOutputDecoder : Decoder CommandOutput
commandOutputDecoder =
Decode.map4 CommandOutput
(Decode.field "stdoutOutput" Decode.string)
(Decode.field "stderrOutput" Decode.string)
(Decode.field "combinedOutput" Decode.string)
(Decode.field "exitCode" Decode.int)
{-| -}
captureCommandWithInput :
String
-> List String
-> Stream { read : (), write : write }
-> BackendTask { fatal : FatalError, recoverable : { code : Int, output : CommandOutput } } CommandOutput
captureCommandWithInput command_ args_ stream =
captureCommand_ command_ args_ (Just stream)
{-| -}
captureCommand_ :
String
-> List String
-> Maybe (Stream { read : (), write : write })
-> BackendTask { fatal : FatalError, recoverable : { code : Int, output : CommandOutput } } CommandOutput
captureCommand_ command_ args_ maybeStream =
BackendTask.Internal.Request.request
{ name = "stream"
, body =
BackendTask.Http.jsonBody
(pipelineEncoder
(case maybeStream of
Just stream ->
stream
|> pipe (command command_ args_)
Nothing ->
command command_ args_
)
"command"
)
, expect = BackendTask.Http.expectJson commandOutputDecoder
}
{-| -}
runCommandWithInput :
String
-> List String
-> Stream { read : (), write : write }
-> BackendTask { fatal : FatalError, recoverable : Int } ()
runCommandWithInput command_ args_ maybeStream =
runCommand_ command_ args_ (Just maybeStream)
{-| -}
runCommand_ :
String
-> List String
-> Maybe (Stream { read : (), write : write })
-> BackendTask { fatal : FatalError, recoverable : Int } ()
runCommand_ command_ args_ maybeStream =
BackendTask.Internal.Request.request
{ name = "stream"
, body =
BackendTask.Http.jsonBody
(pipelineEncoder
(case maybeStream of
Just stream ->
stream
|> pipe (command command_ args_)
Nothing ->
command command_ args_
)
"commandCode"
)
, expect = BackendTask.Http.expectJson (Decode.field "exitCode" Decode.int)
}
|> BackendTask.andThen
(\exitCode ->
if exitCode == 0 then
BackendTask.succeed ()
else
BackendTask.fail
(FatalError.recoverable
{ title = "Command Failed"
, body = "Command `" ++ commandToString command_ args_ ++ "` failed with exit code " ++ String.fromInt exitCode
}
exitCode
)
)
{-| -}
captureCommand :
String
-> List String
-> BackendTask { fatal : FatalError, recoverable : { code : Int, output : CommandOutput } } CommandOutput
captureCommand command_ args_ =
captureCommand_ command_ args_ Nothing
{-| -}
runCommand :
String
-> List String
-> BackendTask { fatal : FatalError, recoverable : Int } ()
runCommand command_ args_ =
runCommand_ command_ args_ Nothing
commandToString : String -> List String -> String
commandToString command_ args_ =
command_ ++ " " ++ String.join " " args_

View File

@ -11,6 +11,7 @@ module TerminalText exposing
, red
, resetColors
, text
, toPlainString
, toString
, toString_
, yellow
@ -109,6 +110,13 @@ toString_ (Style ansiStyle innerText) =
]
toPlainString : List Text -> String
toPlainString list =
list
|> List.map (\(Style _ inner) -> inner)
|> String.concat
fromAnsiString : String -> List Text
fromAnsiString ansiString =
Ansi.parseInto ( blankStyle, [] ) parseInto ansiString