Original commit: 0ff9e3f62b
This commit is contained in:
Wojciech Daniło 2021-03-05 16:22:19 +01:00 committed by GitHub
parent 3a04557ac5
commit 87a19bec3d
14 changed files with 536 additions and 692 deletions

View File

@ -60,6 +60,7 @@ jobs:
run: >- run: >-
if [[ ${{ steps.checkCurrentReleaseTag.outputs.exists }} == true ]]; if [[ ${{ steps.checkCurrentReleaseTag.outputs.exists }} == true ]];
then exit 1; fi then exit 1; fi
if: github.base_ref == 'unstable' || github.base_ref == 'stable'
- name: Get list of changed files - name: Get list of changed files
id: changed_files id: changed_files
run: |2- run: |2-
@ -107,6 +108,8 @@ jobs:
run: npm install --save-dev --save-exact prettier run: npm install --save-dev --save-exact prettier
- name: Install Clippy - name: Install Clippy
run: rustup component add clippy run: rustup component add clippy
- name: Lint Markdown sources
run: npx prettier --check '*.md'
- name: Lint JavaScript sources - name: Lint JavaScript sources
run: npx prettier --check 'src/**/*.js' run: npx prettier --check 'src/**/*.js'
- name: Lint Rust sources - name: Lint Rust sources
@ -161,7 +164,7 @@ jobs:
mv $WASMPACKDIR/wasm-pack ~/.cargo/bin mv $WASMPACKDIR/wasm-pack ~/.cargo/bin
rm -r $WASMPACKDIR rm -r $WASMPACKDIR
shell: bash shell: bash
if: matrix.os == 'macOS-latest' if: startsWith(matrix.os,'macOS')
- name: Install wasm-pack (Windows) - name: Install wasm-pack (Windows)
env: env:
WASMPACKURL: https://github.com/rustwasm/wasm-pack/releases/download/v0.9.1 WASMPACKURL: https://github.com/rustwasm/wasm-pack/releases/download/v0.9.1
@ -172,7 +175,7 @@ jobs:
mv $WASMPACKDIR/wasm-pack ~/.cargo/bin mv $WASMPACKDIR/wasm-pack ~/.cargo/bin
rm -r $WASMPACKDIR rm -r $WASMPACKDIR
shell: bash shell: bash
if: matrix.os == 'windows-latest' if: startsWith(matrix.os,'windows')
- name: Install wasm-pack (Linux) - name: Install wasm-pack (Linux)
env: env:
WASMPACKURL: https://github.com/rustwasm/wasm-pack/releases/download/v0.9.1 WASMPACKURL: https://github.com/rustwasm/wasm-pack/releases/download/v0.9.1
@ -183,7 +186,7 @@ jobs:
mv $WASMPACKDIR/wasm-pack ~/.cargo/bin mv $WASMPACKDIR/wasm-pack ~/.cargo/bin
rm -r $WASMPACKDIR rm -r $WASMPACKDIR
shell: bash shell: bash
if: matrix.os == 'ubuntu-latest' if: startsWith(matrix.os,'ubuntu')
- name: Run tests (WASM) - name: Run tests (WASM)
run: node ./run test --no-native --skip-version-validation run: node ./run test --no-native --skip-version-validation
simple_build: simple_build:
@ -215,7 +218,7 @@ jobs:
mv $WASMPACKDIR/wasm-pack ~/.cargo/bin mv $WASMPACKDIR/wasm-pack ~/.cargo/bin
rm -r $WASMPACKDIR rm -r $WASMPACKDIR
shell: bash shell: bash
if: matrix.os == 'macOS-latest' if: startsWith(matrix.os,'macOS')
- name: Install wasm-pack (Windows) - name: Install wasm-pack (Windows)
env: env:
WASMPACKURL: https://github.com/rustwasm/wasm-pack/releases/download/v0.9.1 WASMPACKURL: https://github.com/rustwasm/wasm-pack/releases/download/v0.9.1
@ -226,7 +229,7 @@ jobs:
mv $WASMPACKDIR/wasm-pack ~/.cargo/bin mv $WASMPACKDIR/wasm-pack ~/.cargo/bin
rm -r $WASMPACKDIR rm -r $WASMPACKDIR
shell: bash shell: bash
if: matrix.os == 'windows-latest' if: startsWith(matrix.os,'windows')
- name: Install wasm-pack (Linux) - name: Install wasm-pack (Linux)
env: env:
WASMPACKURL: https://github.com/rustwasm/wasm-pack/releases/download/v0.9.1 WASMPACKURL: https://github.com/rustwasm/wasm-pack/releases/download/v0.9.1
@ -237,10 +240,10 @@ jobs:
mv $WASMPACKDIR/wasm-pack ~/.cargo/bin mv $WASMPACKDIR/wasm-pack ~/.cargo/bin
rm -r $WASMPACKDIR rm -r $WASMPACKDIR
shell: bash shell: bash
if: matrix.os == 'ubuntu-latest' if: startsWith(matrix.os,'ubuntu')
- name: Build (macos) - name: Build (macos)
run: node ./run dist --skip-version-validation --target macos run: node ./run dist --skip-version-validation --target macos
if: matrix.os == 'macos-latest' if: startsWith(matrix.os,'macos')
if: >- if: >-
!(contains(github.event.head_commit.message,'[ci build]') || !(contains(github.event.head_commit.message,'[ci build]') ||
github.base_ref == 'develop' || github.base_ref == 'unstable' || github.base_ref == 'develop' || github.base_ref == 'unstable' ||
@ -286,7 +289,7 @@ jobs:
mv $WASMPACKDIR/wasm-pack ~/.cargo/bin mv $WASMPACKDIR/wasm-pack ~/.cargo/bin
rm -r $WASMPACKDIR rm -r $WASMPACKDIR
shell: bash shell: bash
if: matrix.os == 'macOS-latest' if: startsWith(matrix.os,'macOS')
- name: Install wasm-pack (Windows) - name: Install wasm-pack (Windows)
env: env:
WASMPACKURL: https://github.com/rustwasm/wasm-pack/releases/download/v0.9.1 WASMPACKURL: https://github.com/rustwasm/wasm-pack/releases/download/v0.9.1
@ -297,7 +300,7 @@ jobs:
mv $WASMPACKDIR/wasm-pack ~/.cargo/bin mv $WASMPACKDIR/wasm-pack ~/.cargo/bin
rm -r $WASMPACKDIR rm -r $WASMPACKDIR
shell: bash shell: bash
if: matrix.os == 'windows-latest' if: startsWith(matrix.os,'windows')
- name: Install wasm-pack (Linux) - name: Install wasm-pack (Linux)
env: env:
WASMPACKURL: https://github.com/rustwasm/wasm-pack/releases/download/v0.9.1 WASMPACKURL: https://github.com/rustwasm/wasm-pack/releases/download/v0.9.1
@ -308,29 +311,29 @@ jobs:
mv $WASMPACKDIR/wasm-pack ~/.cargo/bin mv $WASMPACKDIR/wasm-pack ~/.cargo/bin
rm -r $WASMPACKDIR rm -r $WASMPACKDIR
shell: bash shell: bash
if: matrix.os == 'ubuntu-latest' if: startsWith(matrix.os,'ubuntu')
- name: Build (macos) - name: Build (macos)
run: node ./run dist --skip-version-validation --target macos run: node ./run dist --skip-version-validation --target macos
if: matrix.os == 'macos-latest' if: startsWith(matrix.os,'macos')
- name: Build (win) - name: Build (win)
run: node ./run dist --skip-version-validation --target win run: node ./run dist --skip-version-validation --target win
if: matrix.os == 'windows-latest' if: startsWith(matrix.os,'windows')
- name: Build (linux) - name: Build (linux)
run: node ./run dist --skip-version-validation --target linux run: node ./run dist --skip-version-validation --target linux
if: matrix.os == 'ubuntu-latest' if: startsWith(matrix.os,'ubuntu')
- name: Upload Content Artifacts - name: Upload Content Artifacts
uses: actions/upload-artifact@v1 uses: actions/upload-artifact@v1
with: with:
name: content name: content
path: dist/content path: dist/content
if: matrix.os == 'macOS-latest' if: startsWith(matrix.os,'macOS')
- name: Upload Artifacts (macOS, dmg) - name: Upload Artifacts (macOS, dmg)
uses: actions/upload-artifact@v1 uses: actions/upload-artifact@v1
with: with:
name: enso-mac-${{fromJson(steps.changelog.outputs.content).version}}.dmg name: enso-mac-${{fromJson(steps.changelog.outputs.content).version}}.dmg
path: >- path: >-
dist/client/enso-mac-${{fromJson(steps.changelog.outputs.content).version}}.dmg dist/client/enso-mac-${{fromJson(steps.changelog.outputs.content).version}}.dmg
if: matrix.os == 'macos-latest' if: startsWith(matrix.os,'macos')
- name: Upload Artifacts (macOS, dmg.sha256) - name: Upload Artifacts (macOS, dmg.sha256)
uses: actions/upload-artifact@v1 uses: actions/upload-artifact@v1
with: with:
@ -338,14 +341,14 @@ jobs:
enso-mac-${{fromJson(steps.changelog.outputs.content).version}}.dmg.sha256 enso-mac-${{fromJson(steps.changelog.outputs.content).version}}.dmg.sha256
path: >- path: >-
dist/client/enso-mac-${{fromJson(steps.changelog.outputs.content).version}}.dmg.sha256 dist/client/enso-mac-${{fromJson(steps.changelog.outputs.content).version}}.dmg.sha256
if: matrix.os == 'macos-latest' if: startsWith(matrix.os,'macos')
- name: Upload Artifacts (Windows, exe) - name: Upload Artifacts (Windows, exe)
uses: actions/upload-artifact@v1 uses: actions/upload-artifact@v1
with: with:
name: enso-win-${{fromJson(steps.changelog.outputs.content).version}}.exe name: enso-win-${{fromJson(steps.changelog.outputs.content).version}}.exe
path: >- path: >-
dist/client/enso-win-${{fromJson(steps.changelog.outputs.content).version}}.exe dist/client/enso-win-${{fromJson(steps.changelog.outputs.content).version}}.exe
if: matrix.os == 'windows-latest' if: startsWith(matrix.os,'windows')
- name: Upload Artifacts (Windows, exe.sha256) - name: Upload Artifacts (Windows, exe.sha256)
uses: actions/upload-artifact@v1 uses: actions/upload-artifact@v1
with: with:
@ -353,7 +356,7 @@ jobs:
enso-win-${{fromJson(steps.changelog.outputs.content).version}}.exe.sha256 enso-win-${{fromJson(steps.changelog.outputs.content).version}}.exe.sha256
path: >- path: >-
dist/client/enso-win-${{fromJson(steps.changelog.outputs.content).version}}.exe.sha256 dist/client/enso-win-${{fromJson(steps.changelog.outputs.content).version}}.exe.sha256
if: matrix.os == 'windows-latest' if: startsWith(matrix.os,'windows')
- name: Upload Artifacts (Linux, AppImage) - name: Upload Artifacts (Linux, AppImage)
uses: actions/upload-artifact@v1 uses: actions/upload-artifact@v1
with: with:
@ -361,7 +364,7 @@ jobs:
enso-linux-${{fromJson(steps.changelog.outputs.content).version}}.AppImage enso-linux-${{fromJson(steps.changelog.outputs.content).version}}.AppImage
path: >- path: >-
dist/client/enso-linux-${{fromJson(steps.changelog.outputs.content).version}}.AppImage dist/client/enso-linux-${{fromJson(steps.changelog.outputs.content).version}}.AppImage
if: matrix.os == 'ubuntu-latest' if: startsWith(matrix.os,'ubuntu')
- name: Upload Artifacts (Linux, AppImage.sha256) - name: Upload Artifacts (Linux, AppImage.sha256)
uses: actions/upload-artifact@v1 uses: actions/upload-artifact@v1
with: with:
@ -369,7 +372,7 @@ jobs:
enso-linux-${{fromJson(steps.changelog.outputs.content).version}}.AppImage.sha256 enso-linux-${{fromJson(steps.changelog.outputs.content).version}}.AppImage.sha256
path: >- path: >-
dist/client/enso-linux-${{fromJson(steps.changelog.outputs.content).version}}.AppImage.sha256 dist/client/enso-linux-${{fromJson(steps.changelog.outputs.content).version}}.AppImage.sha256
if: matrix.os == 'ubuntu-latest' if: startsWith(matrix.os,'ubuntu')
if: >- if: >-
contains(github.event.head_commit.message,'[ci build]') || github.base_ref contains(github.event.head_commit.message,'[ci build]') || github.base_ref
== 'develop' || github.base_ref == 'unstable' || github.base_ref == == 'develop' || github.base_ref == 'unstable' || github.base_ref ==
@ -408,6 +411,11 @@ jobs:
run: >- run: >-
if [[ ${{ steps.checkCurrentReleaseTag.outputs.exists }} == true ]]; if [[ ${{ steps.checkCurrentReleaseTag.outputs.exists }} == true ]];
then exit 1; fi then exit 1; fi
if: github.base_ref == 'unstable' || github.base_ref == 'stable'
- name: Install Prettier
run: npm install --save-dev --save-exact prettier
- name: Pretty print changelog.
run: npx prettier --prose-wrap never CHANGELOG.md --write
- name: Upload GitHub Release - name: Upload GitHub Release
uses: softprops/action-gh-release@v1 uses: softprops/action-gh-release@v1
env: env:
@ -431,7 +439,7 @@ jobs:
strategy: strategy:
matrix: matrix:
os: os:
- ubuntu-latest - ubuntu-18.04
fail-fast: false fail-fast: false
steps: steps:
- uses: actions/checkout@v1 - uses: actions/checkout@v1

View File

@ -1,4 +0,0 @@
trailingComma = "es5"
tabWidth = 4
semi = false
singleQuote = true

15
gui/.prettierrc.yaml Normal file
View File

@ -0,0 +1,15 @@
overrides:
- files: "*.js"
options:
printWidth: 100
tabWidth: 4
semi: false
singleQuote: true
trailingComma: "es5"
arrowParens: "avoid"
- files: "*.md"
options:
printWidth: 80
proseWrap: "always"

View File

@ -2,18 +2,19 @@
This is a release focused on bug-fixing, stability, and performance. It improves This is a release focused on bug-fixing, stability, and performance. It improves
the performance of workflows and visualizations, and improves the look and feel the performance of workflows and visualizations, and improves the look and feel
of the graphical interface. In addition, the graphical interface informs the of the graphical interface. In addition, the graphical interface now informs the
users now about errors and where they originate. users about errors and where they originate.
<br/>![New Learning Resources](/docs/assets/tags/new_learning_resources.svg) <br/>![New Learning Resources](/docs/assets/tags/new_learning_resources.svg)
- [Learn how to define custom data visualizations in - [Learn how to define custom data visualizations in
Enso][podcast-custom-visualizations]. Enso][podcast-custom-visualizations].
- [Learn how to use Java libraries in Enso][podcast-java-interop]. - [Learn how to use Java libraries in Enso, to build a
- [Learn how to use HTTP libraries in Enso to build custom server-side website webserver][podcast-java-interop].
rendering][podcast-http-server]. - [Learn how to use Javascript libraries in Enso, to build custom server-side
- [Discover why Enso Compiler is so fast and how it was build to support a website rendering][podcast-http-server].
dual-representation langauge][podcast-compiler-internals]. - [Discover why Enso Compiler is so fast and how it was built to support a
dual-representation language][podcast-compiler-internals].
- [Learn more about the vision behind Enso and about its planned - [Learn more about the vision behind Enso and about its planned
future][podcast-future-of-enso]. future][podcast-future-of-enso].
@ -24,23 +25,22 @@ users now about errors and where they originate.
- [Errors in workflows are now displayed in the graphical interface][1215]. - [Errors in workflows are now displayed in the graphical interface][1215].
Previously, these errors were silently skipped, which was non-intuitive and Previously, these errors were silently skipped, which was non-intuitive and
hard to understand. Now, the IDE displays both dataflow errors and panics in a hard to understand. Now, the IDE displays both dataflow errors and panics in a
nice and descriptive way. nice and descriptive fashion.
- [Added geographic map support for Tables (data frames).][1187] Tables that - [Added geographic map support for Tables (data frames).][1187] Tables that
have `latitude`, `longitude` and optionally `label` columns can now be shown have `latitude`, `longitude`, and optionally `label` columns can now be shown
as points on a map. as points on a map.
- [Added a shortcut for live reloading of visualization files.][1190] It - [Added a shortcut for live reloading of visualization files.][1190] This
drastically improves how fast new visualizations can be tested during their drastically improves how quickly new visualizations can be tested during their
development. This is _currently_ limited in that, after reloading development. This is _currently_ limited in that, after reloading
visualization definitions, the currently visible visualizations must be visualization definitions, the currently visible visualizations must be
switched to another and switched back to refresh its content. See the [video switched to another and switched back to refresh their content. See the [video
podcast about building custom visualizations][podcast-custom-visualizations] podcast about building custom visualizations][podcast-custom-visualizations]
to learn more. to learn more.
- [Added a visual indicator of the ongoing standard library compilation][1264]. - [Added a visual indicator of the ongoing standard library compilation][1264].
Currently, each time after IDE is started, backend needs to compile the Currently, each time IDE is started, the backend needs to compile the standard
standard library, before it can provide IDE with type information and values. library before it can provide IDE with type information and values. Because of
Because of that, not all functionalities are ready to work directly after that, not all functionalities are ready to work directly after starting the
starting the IDE. Now, there is a visible indication of the ongoing background IDE. Now, there is a visible indication of the ongoing background process.
process.
- [Added the ability to reposition visualisations.][1096] There is now an icon - [Added the ability to reposition visualisations.][1096] There is now an icon
in the visualization action bar that allows dragging the visualization away in the visualization action bar that allows dragging the visualization away
from a node. Once the visualization has been moved, another icon appears that from a node. Once the visualization has been moved, another icon appears that
@ -56,7 +56,7 @@ users now about errors and where they originate.
note, that large tables will get truncated to 2000 entries. This limitation note, that large tables will get truncated to 2000 entries. This limitation
will be lifted in future releases. will be lifted in future releases.
- [Performance improvements during visual workflow][1067]. Nodes added with the - [Performance improvements during visual workflow][1067]. Nodes added with the
searcher will have their values automatically assigned to a newly generated searcher will have their values automatically assigned to newly generated
variables, which allows the Enso Engine to cache intermediate values and hence variables, which allows the Enso Engine to cache intermediate values and hence
improve visualization performance. improve visualization performance.
- [Minor documentation rendering fixes][1098]. Fixed cases where text would be - [Minor documentation rendering fixes][1098]. Fixed cases where text would be
@ -66,7 +66,7 @@ users now about errors and where they originate.
is now better at dealing with incompatible metadata in files, which stores is now better at dealing with incompatible metadata in files, which stores
node visual position information, the history of chosen searcher suggestions, node visual position information, the history of chosen searcher suggestions,
etc. This will allow IDE to correctly open projects that were created using a etc. This will allow IDE to correctly open projects that were created using a
different IDE version and prevent unnecessary lose of metadata. different IDE version and prevent unnecessary loss of metadata.
- Pressing and holding up and down arrow keys make the list view selection move - Pressing and holding up and down arrow keys make the list view selection move
continuously. continuously.
- The shortcuts to close the application and to toggle the developer tools at - The shortcuts to close the application and to toggle the developer tools at
@ -78,14 +78,14 @@ users now about errors and where they originate.
- [Fixed visual glitch where a node's text was displayed as white on a white - [Fixed visual glitch where a node's text was displayed as white on a white
background][1264]. Most notably this occurred with the output node of a background][1264]. Most notably this occurred with the output node of a
function generated using the node collapse refactoring. function generated using the node collapse refactoring.
- Many visual glitches vere fixed, including small "pixel-like" artifacts - Many visual glitches were fixed, including small "pixel-like" artifacts
appearing on the screen. appearing on the screen.
- [Several parser improvements][1274]. The parser used in the IDE has been - [Several parser improvements][1274]. The parser used in the IDE has been
updated to the latest version. This resolves several issues with language updated to the latest version. This resolves several issues with language
constructs like `import`, lambdas, and parentheses, where upon typing certain constructs like `import`, lambdas, and parentheses, whereupon typing certain
text the edit could be automatically reverted. text the edit could be automatically reverted.
- [The auto-import functionality was improved][1279]. Libraries' `Main` modules - [The auto-import functionality was improved][1279]. Libraries' `Main` modules
are omitted in expressions inserted by searcher. For example, the `point` are omitted in expressions inserted by the searcher. For example, the `point`
method of `Geo` library will be displayed as `Geo.point` and will insert method of `Geo` library will be displayed as `Geo.point` and will insert
import `Geo` instead of `Geo.Main`. import `Geo` instead of `Geo.Main`.
- Cursors in text editors behave correctly now (they are not affected by scene - Cursors in text editors behave correctly now (they are not affected by scene
@ -94,7 +94,7 @@ users now about errors and where they originate.
#### EnsoGL (rendering engine) #### EnsoGL (rendering engine)
- A new multi-camera management system, allowing the same shape systems be - A new multi-camera management system, allowing the same shape systems to be
rendered on different layers from different cameras. The implementation rendered on different layers from different cameras. The implementation
automatically caches the same shape system definitions per scene layer in automatically caches the same shape system definitions per scene layer in
order to minimize the amount of WebGL draw calls and hence improve order to minimize the amount of WebGL draw calls and hence improve

View File

@ -21,7 +21,6 @@
<br/> <br/>
</p> </p>
### Overview ### Overview
<p> <p>
@ -52,11 +51,13 @@ University and NASA as one of 20 most unique technologies worldwide. Enso
consists of several sub-projects, including the consists of several sub-projects, including the
[Enso Language Compiler](https://github.com/enso-org/enso), the [Enso Language Compiler](https://github.com/enso-org/enso), the
[Enso Integrated Development Environment (IDE)](https://github.com/enso-org/ide), [Enso Integrated Development Environment (IDE)](https://github.com/enso-org/ide),
and [a high performance WebGL UI framework (EnsoGL)](https://github.com/enso-org/ide/tree/main/src/rust/ensogl). and
[a high performance WebGL UI framework (EnsoGL)](https://github.com/enso-org/ide/tree/main/src/rust/ensogl).
<br/> <br/>
### Getting Started ### Getting Started
Enso is distributed both in form of Enso is distributed both in form of
[pre-build packages for MacOS, Windows, or Linux](https://github.com/enso-org/ide/releases), [pre-build packages for MacOS, Windows, or Linux](https://github.com/enso-org/ide/releases),
as well as the [source code](https://github.com/enso-org). See the as well as the [source code](https://github.com/enso-org). See the
@ -64,11 +65,13 @@ as well as the [source code](https://github.com/enso-org). See the
more. more.
Currently to start IDE you have to run **Enso Project Manager** first. For more Currently to start IDE you have to run **Enso Project Manager** first. For more
information and packages see [Enso repository](https://github.com/enso-org/enso). information and packages see
[Enso repository](https://github.com/enso-org/enso).
<br/> <br/>
### Building ### Building
The project builds on MacOS, Linux, and Windows. Simply run `node ./run build` The project builds on MacOS, Linux, and Windows. Simply run `node ./run build`
to build it and use `node ./run --help` to learn about other available commands to build it and use `node ./run --help` to learn about other available commands
and options. Read the detailed [development guide](docs/CONTRIBUTING.md) to and options. Read the detailed [development guide](docs/CONTRIBUTING.md) to
@ -77,18 +80,20 @@ learn more.
<br/> <br/>
### License ### License
The Enso Language Compiler is released under the terms of the The Enso Language Compiler is released under the terms of the
[Apache v2 License](https://github.com/enso-org/enso/blob/main/LICENSE). The Enso [Apache v2 License](https://github.com/enso-org/enso/blob/main/LICENSE). The
Graphical Interface and it's rendering engine are released under the terms of Enso Graphical Interface and it's rendering engine are released under the terms
the [AGPL v3 License](https://github.com/enso-org/ide/blob/main/LICENSE). This of the [AGPL v3 License](https://github.com/enso-org/ide/blob/main/LICENSE).
license set was choosen to both provide you with a complete freedom to use Enso, This license set was choosen to both provide you with a complete freedom to use
create libraries, and release them under any license of your choice, while also Enso, create libraries, and release them under any license of your choice, while
allowing us to release commercial products on top of the platform, including also allowing us to release commercial products on top of the platform,
Enso Cloud and Enso Enterprise on-premise server managers. including Enso Cloud and Enso Enterprise on-premise server managers.
<br/> <br/>
### Contributing ### Contributing
Enso is a community-driven open source project which is and will always be open Enso is a community-driven open source project which is and will always be open
and free to use. We are committed to a fully transparent development process and and free to use. We are committed to a fully transparent development process and
highly appreciate every contribution. If you love the vision behind Enso and you highly appreciate every contribution. If you love the vision behind Enso and you

View File

@ -7,10 +7,11 @@ order: 4
--- ---
# Security Policy # Security Policy
This document outlines the security policy for Enso and its libraries. This document outlines the security policy for Enso and its libraries.
> **If you believe that you have found a vulnerability in Enso or one of its > **If you believe that you have found a vulnerability in Enso or one of its
> libraries, please see the section on > libraries, please see the section on
> [reporting a vulnerability](#reporting-a-vulnerability) below.** > [reporting a vulnerability](#reporting-a-vulnerability) below.**
<!-- MarkdownTOC levels="2" autolink="true" --> <!-- MarkdownTOC levels="2" autolink="true" -->
@ -21,16 +22,18 @@ This document outlines the security policy for Enso and its libraries.
<!-- /MarkdownTOC --> <!-- /MarkdownTOC -->
## Supported Versions ## Supported Versions
Security updates for Enso are provided for the versions shown below with a Security updates for Enso are provided for the versions shown below with a
:white_check_mark: next to them. No other versions have security updates :white_check_mark: next to them. No other versions have security updates
provided. provided.
| Version | Supported | | Version | Supported |
|-------------|--------------------| | ----------- | ------------------ |
| `main@HEAD` | :white_check_mark: | | `main@HEAD` | :white_check_mark: |
| `wip/*` | :x: | | `wip/*` | :x: |
## Reporting a Vulnerability ## Reporting a Vulnerability
If you believe that you've found a security vulnerability in the Enso codebase If you believe that you've found a security vulnerability in the Enso codebase
or one of the libraries maintained in this repository, please contact or one of the libraries maintained in this repository, please contact
[security@enso.org](mailto:security@enso.org) and provide details of the bug. [security@enso.org](mailto:security@enso.org) and provide details of the bug.

View File

@ -52,10 +52,14 @@ function job_on_macos(...args) {
return job(["macOS-latest"],...args) return job(["macOS-latest"],...args)
} }
function job_on_linux(...args) { function job_on_ubuntu(...args) {
return job(["ubuntu-latest"],...args) return job(["ubuntu-latest"],...args)
} }
function job_on_ubuntu_18_04(...args) {
return job(["ubuntu-18.04"],...args)
}
function list(...args) { function list(...args) {
let out = [] let out = []
for (let arg of args) { for (let arg of args) {
@ -128,7 +132,7 @@ function installWasmPackOn(name,sys,pkg) {
mv $WASMPACKDIR/wasm-pack ~/.cargo/bin mv $WASMPACKDIR/wasm-pack ~/.cargo/bin
rm -r $WASMPACKDIR`, rm -r $WASMPACKDIR`,
shell: "bash", shell: "bash",
if: `matrix.os == '${sys}-latest'`, if: `startsWith(matrix.os,'${sys}')`,
} }
} }
@ -149,7 +153,7 @@ function buildOn(name,sys) {
return { return {
name: `Build (${name})`, name: `Build (${name})`,
run: `node ./run dist --skip-version-validation --target ${name}`, run: `node ./run dist --skip-version-validation --target ${name}`,
if: `matrix.os == '${sys}-latest'` if: `startsWith(matrix.os,'${sys}')`
} }
} }
@ -157,6 +161,11 @@ buildOnMacOS = buildOn('macos','macos')
buildOnWindows = buildOn('win','windows') buildOnWindows = buildOn('win','windows')
buildOnLinux = buildOn('linux','ubuntu') buildOnLinux = buildOn('linux','ubuntu')
let lintMarkdown = {
name: "Lint Markdown sources",
run: "npx prettier --check '*.md'",
}
let lintJavaScript = { let lintJavaScript = {
name: "Lint JavaScript sources", name: "Lint JavaScript sources",
run: "npx prettier --check 'src/**/*.js'", run: "npx prettier --check 'src/**/*.js'",
@ -190,7 +199,7 @@ let uploadContentArtifacts = {
name: 'content', name: 'content',
path: `dist/content` path: `dist/content`
}, },
if: `matrix.os == 'macOS-latest'` if: `startsWith(matrix.os,'macOS')`
} }
function uploadBinArtifactsFor(name,sys,ext,os) { function uploadBinArtifactsFor(name,sys,ext,os) {
@ -201,7 +210,7 @@ function uploadBinArtifactsFor(name,sys,ext,os) {
name: `enso-${os}-\${{fromJson(steps.changelog.outputs.content).version}}.${ext}`, name: `enso-${os}-\${{fromJson(steps.changelog.outputs.content).version}}.${ext}`,
path: `dist/client/enso-${os}-\${{fromJson(steps.changelog.outputs.content).version}}.${ext}` path: `dist/client/enso-${os}-\${{fromJson(steps.changelog.outputs.content).version}}.${ext}`
}, },
if: `matrix.os == '${sys}-latest'` if: `startsWith(matrix.os,'${sys}')`
} }
} }
@ -275,20 +284,27 @@ let assertChangelogWasUpdated = [
// === GitHub Release === // === GitHub Release ===
// ====================== // ======================
let uploadGitHubRelease = { let uploadGitHubRelease = [
name: `Upload GitHub Release`, installPrettier,
uses: "softprops/action-gh-release@v1", {
env: { name: `Pretty print changelog.`,
GITHUB_TOKEN: "${{ secrets.GITHUB_TOKEN }}" run: "npx prettier --prose-wrap never CHANGELOG.md --write"
}, },
with: { {
files: "artifacts/**/enso-*", name: `Upload GitHub Release`,
name: "Enso ${{fromJson(steps.changelog.outputs.content).version}}", uses: "softprops/action-gh-release@v1",
tag_name: "v${{fromJson(steps.changelog.outputs.content).version}}", env: {
body: "${{fromJson(steps.changelog.outputs.content).body}}", GITHUB_TOKEN: "${{ secrets.GITHUB_TOKEN }}"
prerelease: "${{fromJson(steps.changelog.outputs.content).prerelease}}", },
}, with: {
} files: "artifacts/**/enso-*",
name: "Enso ${{fromJson(steps.changelog.outputs.content).version}}",
tag_name: "v${{fromJson(steps.changelog.outputs.content).version}}",
body: "${{fromJson(steps.changelog.outputs.content).body}}",
prerelease: "${{fromJson(steps.changelog.outputs.content).prerelease}}",
},
}
]
@ -355,6 +371,7 @@ let assertReleaseDoNotExists = [
{ {
name: 'Fail if release already exists', name: 'Fail if release already exists',
run: 'if [[ ${{ steps.checkCurrentReleaseTag.outputs.exists }} == true ]]; then exit 1; fi', run: 'if [[ ${{ steps.checkCurrentReleaseTag.outputs.exists }} == true ]]; then exit 1; fi',
if: `github.base_ref == 'unstable' || github.base_ref == 'stable'`
} }
] ]
@ -407,6 +424,7 @@ let workflow = {
installRust, installRust,
installPrettier, installPrettier,
installClippy, installClippy,
lintMarkdown,
lintJavaScript, lintJavaScript,
lintRust lintRust
]), ]),
@ -451,7 +469,7 @@ let workflow = {
],{ if:releaseCondition, ],{ if:releaseCondition,
needs:['version_assertions','lint','test','wasm-test','build'] needs:['version_assertions','lint','test','wasm-test','build']
}), }),
release_to_cdn: job_on_linux("CDN Release", [ release_to_cdn: job_on_ubuntu_18_04("CDN Release", [
downloadArtifacts, downloadArtifacts,
getCurrentReleaseChangelogInfo, getCurrentReleaseChangelogInfo,
prepareAwsSessionCDN, prepareAwsSessionCDN,

View File

@ -1,392 +1,391 @@
# Enso App Framework # Enso App Framework
## Overview ## Overview
Enso App Framework is a fully featured framework for building modern, blazing fast web applications Enso App Framework is a fully featured framework for building modern, blazing
in the Rust programming language. It comes batteries included, containing: fast web applications in the Rust programming language. It comes batteries
included, containing:
- **[Enso Canvas], a WebGL-based vector shapes rendering engine** - **[Enso Canvas], a WebGL-based vector shapes rendering engine**
It is blazing-fast, pixel-perfect, uses a high-quality computational anti-aliasing, allows It is blazing-fast, pixel-perfect, uses a high-quality computational
*almost zero-cost* boolean operations on shapes, and uses sophisticated Lab CIECH color management anti-aliasing, allows _almost zero-cost_ boolean operations on shapes, and
system for unparalleled results. uses sophisticated Lab CIECH color management system for unparalleled results.
- \*\*[Enso Signals], a [functional reactive programming] signal processing
- **[Enso Signals], a [functional reactive programming] signal processing engine designed exclusively engine designed exclusively for the needs of efficient GUI programming and
for the needs of efficient GUI programming and optimized for Rust semantics. optimized for Rust semantics.
- [Enso GUI], a rich set of modern GUI components, including iOS-like mouse
- [Enso GUI], a rich set of modern GUI components, including iOS-like mouse cursor. cursor.
- -
EnsoGL is a blazing fast vector rendering engine that comes batteries included. It was developed
as part of the [Enso](https://github.com/enso-org/enso) project.
EnsoGL is a blazing fast vector rendering engine that comes batteries included.
It was developed as part of the [Enso](https://github.com/enso-org/enso)
project.
## Demo ## Demo
See the demo videos of [Enso](https://github.com/enso-org/enso) to see an example application based See the demo videos of [Enso](https://github.com/enso-org/enso) to see an
on EnsoGl example application based on EnsoGl
## Features ## Features
### High performance and small size ### High performance and small size
- **No garbage collector** - **No garbage collector**
EnsoGL is written in Rust. All memory management is static, there is not garbage collection EnsoGL is written in Rust. All memory management is static, there is not
needed, and thus, you can be sure that your creations will run 60 frames per second without garbage collection needed, and thus, you can be sure that your creations will
unexpected hiccups. run 60 frames per second without unexpected hiccups.
- **Small binary size** - **Small binary size**
EnsoGL is a very feature rich library, however, it includes all aspects needed to build fully EnsoGL is a very feature rich library, however, it includes all aspects needed
featured, production ready applications, including rich set of GUI elements, animation engine, to build fully featured, production ready applications, including rich set of
user events processing engine, keyboard shortcut management, mouse gesture management, and even GUI elements, animation engine, user events processing engine, keyboard
dedicated theme resolution engine. For example, [Enso](https://github.com/enso-org/enso), which shortcut management, mouse gesture management, and even dedicated theme
naturally uses EnsoGl for all client-side logic weights less than 4Mb in production mode build. resolution engine. For example, [Enso](https://github.com/enso-org/enso),
which naturally uses EnsoGl for all client-side logic weights less than 4Mb in
production mode build.
### Vector Shapes ### Vector Shapes
- **Highest anti-aliasing quality possible** - **Highest anti-aliasing quality possible**
The shapes are always smooth and crisp. They are described using mathematical equations and do not The shapes are always smooth and crisp. They are described using mathematical
use triangle-based approximation nor are they interpolated in any way. For example, after equations and do not use triangle-based approximation nor are they
subtracting two circles, no matter how much you scale the resulting shape, it will always render interpolated in any way. For example, after subtracting two circles, no matter
smooth, crisp, and without any visual glitches and imperfections. It's worth noting that EnsoGL how much you scale the resulting shape, it will always render smooth, crisp,
uses [Signed Distance Functions][SDF] to describe shapes and perform anti-aliasing, and thus do and without any visual glitches and imperfections. It's worth noting that
not need EnsoGL uses [Signed Distance Functions][sdf] to describe shapes and perform
anti-aliasing, and thus do not need
- **Pixel prefect** - **Pixel prefect**
Shapes align perfectly with the pixels on the screen. Rendering a rectangle with integer position Shapes align perfectly with the pixels on the screen. Rendering a rectangle
will not produce any anti-aliased borders. with integer position will not produce any anti-aliased borders.
- **Rich set of primitive shapes** - **Rich set of primitive shapes**
Including a circle, a rectangle, a rectangle with rounded corners, a triangle, a line, a bezier Including a circle, a rectangle, a rectangle with rounded corners, a triangle,
curve, and many more. You can also define your own shapes by using [Signed Distance a line, a bezier curve, and many more. You can also define your own shapes by
Functions][SDF]. using [Signed Distance Functions][sdf].
- **Blazing fast boolean operations on shapes** - **Blazing fast boolean operations on shapes**
EnsoGL allows performing boolean operations on shapes, including subtracting shapes, finding EnsoGL allows performing boolean operations on shapes, including subtracting
common part of two shapes, and even merging shapes with rounded intersection areas (bevels). All shapes, finding common part of two shapes, and even merging shapes with
these operations are very fast and do not depend on the shapes' complexity. Subtracting two rounded intersection areas (bevels). All these operations are very fast and do
circles is as fast as subtracting two shapes build out of 100 circles each. not depend on the shapes' complexity. Subtracting two circles is as fast as
subtracting two shapes build out of 100 circles each.
- **Infinite amount of symbols instancing** - **Infinite amount of symbols instancing** EnsoGL supports rendering of
EnsoGL supports rendering of infinite amount of shapes instances at close-to-zero performance infinite amount of shapes instances at close-to-zero performance cost (a cost
cost (a cost of a few GPU cycles for all instances altogether). The instancing is done by folding of a few GPU cycles for all instances altogether). The instancing is done by
the used coordinate system into cyclic space. folding the used coordinate system into cyclic space.
- **Lab CIECH color space based color management** - **Lab CIECH color space based color management** EnsoGL uses Lab CIECH color
EnsoGL uses Lab CIECH color blending in order to output color blending results. Unlike HTML and blending in order to output color blending results. Unlike HTML and CSS
CSS implementations in all popular browsers nowadays, EnsoGL do not produce [visual artifacts when implementations in all popular browsers nowadays, EnsoGL do not produce
blending colors together][Blending in browsers]. [visual artifacts when blending colors together][blending in browsers].
- **Various coordinate systems** EnsoGL supports various coordinate systems
- **Various coordinate systems** including Cartesian and Polar ones. You can freely switch between in order to
EnsoGL supports various coordinate systems including Cartesian and Polar ones. You can freely for example bend some parts of the shapes around a given point.
switch between in order to for example bend some parts of the shapes around a given point.
### Signals ### Signals
EnsoGL ships with a state of the art [Functional Reactive Programming (FRP)][FRP] event processing EnsoGL ships with a state of the art [Functional Reactive Programming
system designed exclusively for the needs of GUI programming and optimized for Rust semantics. FRP (FRP)][frp] event processing system designed exclusively for the needs of GUI
systems allow designing even very complex event dependencies in a static, easy to debug way. Unlike programming and optimized for Rust semantics. FRP systems allow designing even
old-school event-listener based approach, FRP does not cause [callback hell] nor leads to very complex event dependencies in a static, easy to debug way. Unlike
'spaghetti' code, which is hard to read and extend. old-school event-listener based approach, FRP does not cause [callback hell] nor
leads to 'spaghetti' code, which is hard to read and extend.
### Animation ### Animation
EnsoGL delivers a set of lightweight animation engines in a form of a reactive FRP API. It allows EnsoGL delivers a set of lightweight animation engines in a form of a reactive
attaching animations to every interface element simply by plugging an FRP event source to FRP FRP API. It allows attaching animations to every interface element simply by
animation node. For example, the Inertia Simulator enables physical-based animations of positions plugging an FRP event source to FRP animation node. For example, the Inertia
and colors, allowing at the same time changing the destination values with smooth interpolation Simulator enables physical-based animations of positions and colors, allowing at
between states. The Tween engine does not allow smooth destination value change, however, its so the same time changing the destination values with smooth interpolation between
lightweight, that you can consider it non-existent from the performance point of view. states. The Tween engine does not allow smooth destination value change,
however, its so lightweight, that you can consider it non-existent from the
performance point of view.
- Mixing HTML elements
+ Mixing HTML elements
### Modern GUI Components ### Modern GUI Components
### Built-in performance statistics ### Built-in performance statistics
# Rendering Architecture
https://www.nomnoml.com :
# Rendering Architecture ```ignore
#zoom: 0.6
#gutter:100
#padding: 14
#leading: 1.4
#spacing: 60
#edgeMargin:5
#arrowSize: 0.8
#fill: #FFFFFF; #fdf6e3
https://www.nomnoml.com : #background: #FFFFFF
```ignore #.usr: visual=roundrect title=bold stroke=rgb(237,80,80)
#zoom: 0.6 #.dyn: visual=roundrect title=bold dashed
#gutter:100 #.cpu: visual=roundrect title=bold
#padding: 14 #.gpu: stroke=rgb(68,133,187) visual=roundrect
#leading: 1.4
#spacing: 60
#edgeMargin:5
#arrowSize: 0.8
#fill: #FFFFFF; #fdf6e3
#background: #FFFFFF [<gpu> Buffer]
#.usr: visual=roundrect title=bold stroke=rgb(237,80,80) [<gpu> WebGL Context]
#.dyn: visual=roundrect title=bold dashed [<cpu> AttributeScope]
#.cpu: visual=roundrect title=bold [<cpu> Attribute]
#.gpu: stroke=rgb(68,133,187) visual=roundrect [<cpu> Mesh]
[<cpu> Material]
[<cpu> Symbol]
[<cpu> SymbolRegistry]
[<cpu> World]
[<cpu> Scene]
[<cpu> View]
[<cpu> SpriteSystem]
[<cpu> Sprite]
[<cpu> ShapeSystem]
[<dyn> ShapeView]
[<usr> *Shape]
[<usr> *ShapeSystem]
[<usr> *Component]
[<cpu> Application]
[<gpu> Buffer] [AttributeScope] o- [Buffer]
[<gpu> WebGL Context] [Buffer] o-- [Attribute]
[<cpu> AttributeScope] [Mesh]* o- 4[AttributeScope]
[<cpu> Attribute] [Symbol]* o- [Mesh]
[<cpu> Mesh] [Symbol]* o- [Material]
[<cpu> Material] [SymbolRegistry] o- [Symbol]
[<cpu> Symbol] [Scene] - [SymbolRegistry]
[<cpu> SymbolRegistry] [Scene] o- [View]
[<cpu> World] [Scene] - [WebGL Context]
[<cpu> Scene]
[<cpu> View]
[<cpu> SpriteSystem]
[<cpu> Sprite]
[<cpu> ShapeSystem]
[<dyn> ShapeView]
[<usr> *Shape]
[<usr> *ShapeSystem]
[<usr> *Component]
[<cpu> Application]
[AttributeScope] o- [Buffer] [SpriteSystem] o- [Symbol]
[Buffer] o-- [Attribute] [SpriteSystem] o-- [Sprite]
[Mesh]* o- 4[AttributeScope] [ShapeSystem] o- [SpriteSystem]
[Symbol]* o- [Mesh] [Sprite] o- [Symbol]
[Symbol]* o- [Material] [Sprite] o- [Attribute]
[SymbolRegistry] o- [Symbol] [*Shape] o- [Sprite]
[Scene] - [SymbolRegistry] [*ShapeSystem] o- [ShapeSystem]
[Scene] o- [View] [*ShapeSystem] o-- [*Shape]
[Scene] - [WebGL Context] [*Component] o- [ShapeView]
[ShapeView] - [*Shape]
[View] o- [Symbol]
[View] o- [*ShapeSystem]
[World] o- [Scene]
[Application] - [World]
[Application] o- [*Component]
```
[SpriteSystem] o- [Symbol] # Shapes Rendering
[SpriteSystem] o-- [Sprite]
[ShapeSystem] o- [SpriteSystem]
[Sprite] o- [Symbol]
[Sprite] o- [Attribute]
[*Shape] o- [Sprite]
[*ShapeSystem] o- [ShapeSystem]
[*ShapeSystem] o-- [*Shape]
[*Component] o- [ShapeView]
[ShapeView] - [*Shape]
[View] o- [Symbol]
[View] o- [*ShapeSystem]
[World] o- [Scene]
[Application] - [World]
[Application] o- [*Component]
```
# Shapes Rendering
## The Current Architecture
The current implementation uses instanced rendering to display shapes. First, a
simple rectangular geometry is defined, and for each new instance, a new
attribute is added to the list of attached attribute arrays. During rendering,
we use the `draw_arrays_instanced` WebGL call to iterate over the arrays and
draw each shape. The shape placement is done from within its vertex shader.
## The Current Architecture See the documentation of [`crate::system::gpu::data::Buffer`]. See the
The current implementation uses instanced rendering to display shapes. First, a simple documentation of [`crate::system::gpu::data::Attribute`]. See the documentation
rectangular geometry is defined, and for each new instance, a new attribute is added to the list of [`crate::system::gpu::data::AttributeScope`].
of attached attribute arrays. During rendering, we use the `draw_arrays_instanced` WebGL call to
iterate over the arrays and draw each shape. The shape placement is done from within its vertex
shader.
See the documentation of [`crate::system::gpu::data::Buffer`]. ### Known Issues / Ideas of Improvement
See the documentation of [`crate::system::gpu::data::Attribute`].
See the documentation of [`crate::system::gpu::data::AttributeScope`].
### Known Issues / Ideas of Improvement The current architecture is very efficient at shapes rendering, which comes with
a few limitations. Below, there are many other architectures described with
their own gains and problems and we should consider improving the current
approach in the future. However, keep in mind that the listed limitations allow
us for very fast rendering pipeline, so it's questionable whether we would like
to ever change it.
The current architecture is very efficient at shapes rendering, which comes with a few The most significant limitations of the current approach are:
limitations. Below, there are many other architectures described with their own gains and
problems and we should consider improving the current approach in the future. However, keep in
mind that the listed limitations allow us for very fast rendering pipeline, so it's questionable
whether we would like to ever change it.
The most significant limitations of the current approach are: - No possibility to depth-sort the shapes instances. The used
`draw_arrays_instanced` WebGL draw call iterates over all attrib arrays and
draws a new instance for each entry. There is no possibility to specify the
iteration order, while re-ordering the attrib arrays can be CPU heavy (with
big instance count) and would require re-sending big amount of data between
CPU and GPU (e.g. moving the top-most instance to the bottom would require
moving its attribs in all attached attrib arrays from the last position to the
front, and thus, re sending ALL attrib arrays to the GPU (for ALL INSTANCES)).
- No possibility to depth-sort the shapes instances. - No efficient memory management. In case an instance with a high ID exists and
The used `draw_arrays_instanced` WebGL draw call iterates over all attrib arrays and draws a many instances with lower IDs are already destroyed, the memory of the
new instance for each entry. There is no possibility to specify the iteration order, while destroyed instances cannot be freed. This is because currently the sprite
re-ordering the attrib arrays can be CPU heavy (with big instance count) and would require instances remember the ID (wrapper over usize) of the instance, which is used
re-sending big amount of data between CPU and GPU (e.g. moving the top-most instance to the as the attrib array index. Thus, it is impossible to update the number in all
bottom would require moving its attribs in all attached attrib arrays from the last position sprite instances in memory, and sort the instances to move the destroyed ones
to the front, and thus, re sending ALL attrib arrays to the GPU (for ALL INSTANCES)). to the end of the buffer to free it. This could be easily solved by using
`Rc<Cell<ID>>` instead, however, it is important to benchmark how big
performance impact this will cause. Also, other architectures may provide
alternative solutions.
- No efficient memory management. - No possibility to render shape instances using different cameras (in separate
In case an instance with a high ID exists and many instances with lower IDs are already draw calls). Currently, the shape instances are drawn with the
destroyed, the memory of the destroyed instances cannot be freed. This is because currently `draw_arrays_instanced` WebGL draw call. This API allows drawing all instances
the sprite instances remember the ID (wrapper over usize) of the instance, which is used as at once, so it is not possible to draw only some subset of them, and thus, it
the attrib array index. Thus, it is impossible to update the number in all sprite instances in is not possible to update the view-matrix uniform between the calls. The
memory, and sort the instances to move the destroyed ones to the end of the buffer to free it. OpenGL 4.2 introduced a specialized draw call that would solve this issue
This could be easily solved by using `Rc<Cell<ID>>` instead, however, it is important to entirely, however, it is not accessible from within WebGL
benchmark how big performance impact this will cause. Also, other architectures may provide ([glDrawArraysInstancedBaseInstance](https://www.khronos.org/registry/OpenGL-Refpages/gl4/html/glDrawArraysInstancedBaseInstance.xhtml)).
alternative solutions.
- No possibility to render shape instances using different cameras (in separate draw calls). ### Depth-sorting, memory cleaning, and indexes re-using.
Currently, the shape instances are drawn with the `draw_arrays_instanced` WebGL draw call.
This API allows drawing all instances at once, so it is not possible to draw only some subset
of them, and thus, it is not possible to update the view-matrix uniform between the calls.
The OpenGL 4.2 introduced a specialized draw call that would solve this issue entirely,
however, it is not accessible from within WebGL
([glDrawArraysInstancedBaseInstance](https://www.khronos.org/registry/OpenGL-Refpages/gl4/html/glDrawArraysInstancedBaseInstance.xhtml)).
The current approach, however, doesn't allow us to depth-sort the shapes
instances. Also, it does not allow for efficient memory management in case an
instance with a high ID exists and many instances with lover IDs are already
destroyed. This section describes possible alternative architectures and
compares them from this perspective.
### Depth-sorting, memory cleaning, and indexes re-using. There are several possible implementation architectures for attribute
management. The currently used architecture may not be the best one, but the
choice is not obvious and would require complex benchmarking. However, lets
compare the available architectures and lets list their good and bad sides:
The current approach, however, doesn't allow us to depth-sort the shapes instances. Also, it #### A. Drawing instanced geometry (the current architecture).
does not allow for efficient memory management in case an instance with a high ID exists and
many instances with lover IDs are already destroyed. This section describes possible alternative
architectures and compares them from this perspective.
There are several possible implementation architectures for attribute management. The currently - Rendering. Very fast. May not be as fast as some of other methods, but that
used architecture may not be the best one, but the choice is not obvious and would require may not be the case with modern hardware, see:
complex benchmarking. However, lets compare the available architectures and lets list their https://stackoverflow.com/a/65376034/889902, and also
good and bad sides: https://stackoverflow.com/questions/62537968/using-opengl-instancing-for-rendering-2d-scene-with-object-depths-and-alpha-blen#answer-62538277
- Changing attribute & GPU memory consumption. Very fast and with low memory
consumption. Requires only 1 WebGL call (attribute per instance).
#### A. Drawing instanced geometry (the current architecture). - Visual sorting of instances (depth management). Complex. Requires sorting of
all attribute buffers connected with a particular instance. For big buffers
(many instances) it may require significant CPU -> GPU data upload. For
example, taking the last element to the front, would require shifting all
attributes in all buffers, which basically would mean uploading all data to
the GPU from scratch for that particular geometry. Also, this would require
keeping instance IDs in some kind of `Rc<Cell<usize>>`, as during sorting, the
instance IDs will change, so all sprites would need to be updated.
- Rendering. #### B. Drawing non-instanced, indexed geometry.
Very fast. May not be as fast as some of other methods, but that may not be the case with
modern hardware, see: https://stackoverflow.com/a/65376034/889902, and also
https://stackoverflow.com/questions/62537968/using-opengl-instancing-for-rendering-2d-scene-with-object-depths-and-alpha-blen#answer-62538277
- Changing attribute & GPU memory consumption. - Rendering. Very fast. May be faster than architecture (A). See it's
Very fast and with low memory consumption. Requires only 1 WebGL call (attribute per description to learn more.
instance).
- Visual sorting of instances (depth management). - Changing attribute & GPU memory consumption. 4 times slower and 4 times more
Complex. Requires sorting of all attribute buffers connected with a particular instance. For memory hungry than architecture (A). Requires setting each attribute for each
big buffers (many instances) it may require significant CPU -> GPU data upload. For example, vertex (4 WebGL calls). During drawing, vertexes are re-used by using indexed
taking the last element to the front, would require shifting all attributes in all buffers, geometry rendering.
which basically would mean uploading all data to the GPU from scratch for that particular
geometry. Also, this would require keeping instance IDs in some kind of `Rc<Cell<usize>>`,
as during sorting, the instance IDs will change, so all sprites would need to be updated.
- Visual sorting of instances (depth management). The same issues as in
architecture (A). Even more CPU -> GPU heavy, as the attribute count is
bigger.
#### B. Drawing non-instanced, indexed geometry. #### C. Drawing non-instanced, non-indexed geometry. Using indexing for sorting.
- Rendering. - Rendering. Very fast. May be faster than architecture (A). See it's
Very fast. May be faster than architecture (A). See it's description to learn more. description to learn more.
- Changing attribute & GPU memory consumption. - Changing attribute & GPU memory consumption. 6 times slower and 6 times more
4 times slower and 4 times more memory hungry than architecture (A). Requires setting each memory hungry than architecture (A). Requires setting each attribute for each
attribute for each vertex (4 WebGL calls). During drawing, vertexes are re-used by using vertex (6 WebGL calls). During drawing, vertexes are not re-used, and thus we
indexed geometry rendering. need to set attributes for each vertex of each triangle.
- Visual sorting of instances (depth management). - Visual sorting of instances (depth management). Simple. We can re-use index
The same issues as in architecture (A). Even more CPU -> GPU heavy, as the attribute count buffer to sort the geometry by telling GPU in what order it should render each
is bigger. of the vertexes. Unlike previous architectures, this would not require to
create any more internally mutable state regarding attribute index management
(the indexes will not change during sorting).
However, sorting for the needs of memory compression (removing unused memory
for sparse attrib arrays) would still require re-uploading sorted data to GPU,
just as in architecture (A).
#### C. Drawing non-instanced, non-indexed geometry. Using indexing for sorting. #### D. Keeping all attribute values in a texture and passing index buffer to the shader.
- Rendering. This is a very different architecture to what is currently implemented and might
Very fast. May be faster than architecture (A). See it's description to learn more. require very complex refactoring in order to be even tested and benchmarked
properly. To learn more about the idea, follow the link:
https://stackoverflow.com/a/65376034/889902.
- Changing attribute & GPU memory consumption. - Rendering. Fast. May be slower than architecture (A). Needs real benchmarks.
6 times slower and 6 times more memory hungry than architecture (A). Requires setting each
attribute for each vertex (6 WebGL calls). During drawing, vertexes are not re-used, and thus
we need to set attributes for each vertex of each triangle.
- Visual sorting of instances (depth management). - Changing attribute & GPU memory consumption. Changing attribute would require
Simple. We can re-use index buffer to sort the geometry by telling GPU in what order it 2 WebGL calls: the `bindTexture`, and `texParameterf` (or similar).
should render each of the vertexes. Unlike previous architectures, this would not require to Performance of this solution is questionable, but in real life, it may be as
create any more internally mutable state regarding attribute index management (the indexes fast as architecture (A). The memory consumption should be fine as well, as
will not change during sorting). WebGL textures behave like C++ Vectors, so even if we allocate the texture of
max size, it will occupy only the needed space. This will also limit the
number of instances on the stage, but the limit will be big enough (assuming
max texture od 2048px x 2048px and 20 float attributes per shader, this will
allow us to render over 200 000 shapes). Also, this architecture would allow
us to pass more attributes to shaders than it is currently possible, which on
the other hand, would probably negatively affect the fragment shader
performance.
However, sorting for the needs of memory compression (removing unused memory for sparse - Visual sorting of instances (depth management). Simple. Next to the attribute
attrib arrays) would still require re-uploading sorted data to GPU, just as in architecture texture, we can pass index buffer to the shader, which will dictate what
(A). initial offset in the texture should be used. This would allow for the fastest
sorting mechanism of all of the above architectures.
However, sorting for the needs of memory compression (removing unused memory
for sparse attrib arrays) would still require re-uploading sorted data to GPU,
just as in architecture (A).
#### D. Keeping all attribute values in a texture and passing index buffer to the shader. #### E. Using the depth-buffer for sorting.
This is a very different architecture to what is currently implemented and might require very As with architecture (C), this is a very different architecture to what is
complex refactoring in order to be even tested and benchmarked properly. To learn more about the currently implemented and might require very complex refactoring in order to be
idea, follow the link: https://stackoverflow.com/a/65376034/889902. even tested and benchmarked properly. This architecture, however, is the most
common architecture among all WebGL / OpenGL applications, but it is not really
well suitable for SDF-based shapes rendering, as it requires anti-aliasing to be
done by multisampling, which is not needed with SDF-based rasterization. It
lowers the quality and drastically increases the rendering time (in the case of
4x4 multisampling, the rendering time is 16x bigger than the time of
architecture (A)).
- Rendering. There is one additional thread to consider here, namely, with some browsers,
Fast. May be slower than architecture (A). Needs real benchmarks. systems, and GPU combinations, the super-sampling anti-aliasing is not
accessible in WebGL. In such situations we could use a post-processing
anti-aliasing techniques, such as [FXAA][1] or [SMAA][2], however, the resulting
image quality will be even worse. We could also use custom multi-sampled render
buffers for implementing [multi-sampled depth buffers][3]. [1]
https://github.com/mitsuhiko/webgl-meincraft/blob/master/assets/shaders/fxaa.glsl
[2]
http://www.iryoku.com/papers/SMAA-Enhanced-Subpixel-Morphological-Antialiasing.pdf
[3]
https://stackoverflow.com/questions/50613696/whats-the-purpose-of-multisample-renderbuffers
- Changing attribute & GPU memory consumption. - Rendering. May be 9x - 16x slower than architecture (A), depending on
Changing attribute would require 2 WebGL calls: the `bindTexture`, and `texParameterf` (or multi-sampling level. Also, the final image quality and edge sharpness will be
similar). Performance of this solution is questionable, but in real life, it may be as fast lower. There is, however, an open question, whether there is an SDF-suitable
as architecture (A). The memory consumption should be fine as well, as WebGL textures behave depth-buffer sorting technique which would not cause such downsides (maybe
like C++ Vectors, so even if we allocate the texture of max size, it will occupy only the involving SDF-based depth buffer). Currently, we don't know of any such
needed space. This will also limit the number of instances on the stage, but the limit will technique.
be big enough (assuming max texture od 2048px x 2048px and 20 float attributes per shader,
this will allow us to render over 200 000 shapes). Also, this architecture would allow us to
pass more attributes to shaders than it is currently possible, which on the other hand,
would probably negatively affect the fragment shader performance.
- Visual sorting of instances (depth management). - Changing attribute & GPU memory consumption. Fast with low memory consumption.
Simple. Next to the attribute texture, we can pass index buffer to the shader, which will The same as with architecture (A), (B), or (C).
dictate what initial offset in the texture should be used. This would allow for the fastest
sorting mechanism of all of the above architectures.
However, sorting for the needs of memory compression (removing unused memory for sparse - Visual sorting of instances (depth management). Simple and fast. Much faster
attrib arrays) would still require re-uploading sorted data to GPU, just as in architecture than any other architecture listed before, as it does not require upfront
(A). CPU-side buffer sorting.
#### F. Using depth-peeling / dual depth-peeling algorithms.
#### E. Using the depth-buffer for sorting. As with architecture (C), this is a very different architecture to what is
currently implemented and might require very complex refactoring in order to be
even tested and benchmarked properly. The idea is to render the scene multiple
times, as long as some objects do overlap, by "peeling" the top-most (and
bottom-most) layers every time. See the [Interactive Order-Independent
Transparency][1], the [Order Independent Transparency with Dual Depth
Peeling][2], and the [sample WebGL implementation][3] to learn more.
As with architecture (C), this is a very different architecture to what is currently [1] https://my.eng.utah.edu/~cs5610/handouts/order_independent_transparency.pdf
implemented and might require very complex refactoring in order to be even tested and [2]
benchmarked properly. This architecture, however, is the most common architecture among all http://citeseerx.ist.psu.edu/viewdoc/download?doi=10.1.1.193.3485&rep=rep1&type=pdf
WebGL / OpenGL applications, but it is not really well suitable for SDF-based shapes rendering, [3]
as it requires anti-aliasing to be done by multisampling, which is not needed with SDF-based https://medium.com/@shrekshao_71662/dual-depth-peeling-implementation-in-webgl-11baa061ba4b
rasterization. It lowers the quality and drastically increases the rendering time (in the case
of 4x4 multisampling, the rendering time is 16x bigger than the time of architecture (A)).
There is one additional thread to consider here, namely, with some browsers, systems, and GPU - Rendering. May be several times slower than architecture (A) due to the need
combinations, the super-sampling anti-aliasing is not accessible in WebGL. In such situations we to render the scene by peeling components. However, in contrast to the
could use a post-processing anti-aliasing techniques, such as [FXAA][1] or [SMAA][2], however, architecture (D), the final image quality should be as good as with
the resulting image quality will be even worse. We could also use custom multi-sampled render architecture (A), (B), or (C).
buffers for implementing [multi-sampled depth buffers][3].
[1] https://github.com/mitsuhiko/webgl-meincraft/blob/master/assets/shaders/fxaa.glsl
[2] http://www.iryoku.com/papers/SMAA-Enhanced-Subpixel-Morphological-Antialiasing.pdf
[3] https://stackoverflow.com/questions/50613696/whats-the-purpose-of-multisample-renderbuffers
- Rendering. - Changing attribute & GPU memory consumption. Fast with low memory consumption.
May be 9x - 16x slower than architecture (A), depending on multi-sampling level. Also, the The same as with architecture (A), (B), or (C).
final image quality and edge sharpness will be lower. There is, however, an open question,
whether there is an SDF-suitable depth-buffer sorting technique which would not cause such
downsides (maybe involving SDF-based depth buffer). Currently, we don't know of any such
technique.
- Changing attribute & GPU memory consumption. - Visual sorting of instances (depth management). Simple and fast. As fast as
Fast with low memory consumption. The same as with architecture (A), (B), or (C). architecture (E), as it does not require upfront CPU-side buffer sorting.
- Visual sorting of instances (depth management).
Simple and fast. Much faster than any other architecture listed before, as it does not
require upfront CPU-side buffer sorting.
#### F. Using depth-peeling / dual depth-peeling algorithms.
As with architecture (C), this is a very different architecture to what is currently
implemented and might require very complex refactoring in order to be even tested and
benchmarked properly. The idea is to render the scene multiple times, as long as some objects
do overlap, by "peeling" the top-most (and bottom-most) layers every time. See the
[Interactive Order-Independent Transparency][1], the
[Order Independent Transparency with Dual Depth Peeling][2], and the
[sample WebGL implementation][3] to learn more.
[1] https://my.eng.utah.edu/~cs5610/handouts/order_independent_transparency.pdf
[2] http://citeseerx.ist.psu.edu/viewdoc/download?doi=10.1.1.193.3485&rep=rep1&type=pdf
[3] https://medium.com/@shrekshao_71662/dual-depth-peeling-implementation-in-webgl-11baa061ba4b
- Rendering.
May be several times slower than architecture (A) due to the need to render the scene by
peeling components. However, in contrast to the architecture (D), the final image quality
should be as good as with architecture (A), (B), or (C).
- Changing attribute & GPU memory consumption.
Fast with low memory consumption. The same as with architecture (A), (B), or (C).
- Visual sorting of instances (depth management).
Simple and fast. As fast as architecture (E), as it does not require upfront CPU-side buffer
sorting.

View File

@ -1,14 +1,10 @@
## Spaces ## Spaces
- **Object Space** - **Object Space**
Local object coordinates. Local object coordinates.
- **World Space `(world_matrix * object_space)`** - **World Space `(world_matrix * object_space)`**
The position relatively to the origin of the world (point `(0,0)` below). The position relatively to the origin of the world (point `(0,0)` below).
<img width="400" src="https://user-images.githubusercontent.com/1623053/85816645-37e00280-b76c-11ea-9831-e6ae7378830e.png"/> <img width="400" src="https://user-images.githubusercontent.com/1623053/85816645-37e00280-b76c-11ea-9831-e6ae7378830e.png"/>
- **Eye Space** - **Eye Space**
The position relatively to the placement of the camera. The position relatively to the placement of the camera.
```rust ```rust
@ -16,13 +12,16 @@
let eye_space = view_matrix * world_space; let eye_space = view_matrix * world_space;
``` ```
<img width="400" src="https://user-images.githubusercontent.com/1623053/85816908-d40a0980-b76c-11ea-8be6-6c982b1d8ce5.png"/> <img width="400" src="https://user-images.githubusercontent.com/1623053/85816908-d40a0980-b76c-11ea-8be6-6c982b1d8ce5.png"/>
- **Clip Space** - **Clip Space**
The position inside of the Normalized Device Coordinates (NDC) cube. In perspective projection, a 3D point in a truncated The position inside of the Normalized Device Coordinates (NDC) cube. In
pyramid frustum (eye coordinates) is mapped to the NDC cube. The range of x-coordinate from `[l,r]` to `[-1,1]`, the perspective projection, a 3D point in a truncated pyramid frustum (eye
y-coordinate from `[b,t]` to `[-1,1]` and the z-coordinate from `[-n,-f]` to `[-1,1]`. Note that the eye coordinates are defined coordinates) is mapped to the NDC cube. The range of x-coordinate from `[l,r]`
in the right-handed coordinate system, but NDC uses the left-handed coordinate system. That is, the camera at the origin is to `[-1,1]`, the y-coordinate from `[b,t]` to `[-1,1]` and the z-coordinate
looking along -Z axis in eye space, but it is looking along +Z axis in NDC. from `[-n,-f]` to `[-1,1]`. Note that the eye coordinates are defined in the
right-handed coordinate system, but NDC uses the left-handed coordinate
system. That is, the camera at the origin is looking along -Z axis in eye
space, but it is looking along +Z axis in NDC.
```rust ```rust
let clip_space = projection_matrix * eye_space; let clip_space = projection_matrix * eye_space;
``` ```
@ -31,7 +30,6 @@
<img width="600" src="https://user-images.githubusercontent.com/1623053/85817751-22b8a300-b76f-11ea-8f18-f3e78f3139c1.png"/> <img width="600" src="https://user-images.githubusercontent.com/1623053/85817751-22b8a300-b76f-11ea-8f18-f3e78f3139c1.png"/>
<img width="600" src="https://user-images.githubusercontent.com/1623053/85817783-3e23ae00-b76f-11ea-8972-c90f1eb6ba1e.png"/> <img width="600" src="https://user-images.githubusercontent.com/1623053/85817783-3e23ae00-b76f-11ea-8972-c90f1eb6ba1e.png"/>
## Examples ## Examples
```rust ```rust
@ -81,9 +79,10 @@ println!("world_space2: {:?}", world_space2);
println!("object_space2: {:?}", object_space2); println!("object_space2: {:?}", object_space2);
``` ```
## Sources ## Sources
Images and fragments used here are parts of the following articles: Images and fragments used here are parts of the following articles:
- https://webglfundamentals.org/webgl/lessons/webgl-3d-camera.html - https://webglfundamentals.org/webgl/lessons/webgl-3d-camera.html
- http://www.songho.ca/opengl/gl_transform.html - http://www.songho.ca/opengl/gl_transform.html
- http://www.songho.ca/opengl/gl_projectionmatrix.html - http://www.songho.ca/opengl/gl_projectionmatrix.html

View File

@ -11,18 +11,14 @@ class BubbleVisualization extends Visualization {
const svgElem = document.createElementNS(xmlns, 'svg') const svgElem = document.createElementNS(xmlns, 'svg')
svgElem.setAttributeNS(null, 'class', 'vis-svg') svgElem.setAttributeNS(null, 'class', 'vis-svg')
svgElem.setAttributeNS( svgElem.setAttributeNS(null, 'viewBox', 0 + ' ' + 0 + ' ' + width + ' ' + height)
null,
'viewBox',
0 + ' ' + 0 + ' ' + width + ' ' + height
)
svgElem.setAttributeNS(null, 'width', '100%') svgElem.setAttributeNS(null, 'width', '100%')
svgElem.setAttributeNS(null, 'height', '100%') svgElem.setAttributeNS(null, 'height', '100%')
svgElem.setAttributeNS(null, 'transform', 'matrix(1 0 0 -1 0 0)') svgElem.setAttributeNS(null, 'transform', 'matrix(1 0 0 -1 0 0)')
this.dom.appendChild(svgElem) this.dom.appendChild(svgElem)
data.forEach((data) => { data.forEach(data => {
const bubble = document.createElementNS(xmlns, 'circle') const bubble = document.createElementNS(xmlns, 'circle')
bubble.setAttributeNS(null, 'stroke', 'black') bubble.setAttributeNS(null, 'stroke', 'black')
bubble.setAttributeNS(null, 'fill', 'red') bubble.setAttributeNS(null, 'fill', 'red')

View File

@ -110,11 +110,7 @@ class GeoMapVisualization extends Visualization {
const mapElem = document.createElement('div') const mapElem = document.createElement('div')
this.mapId = makeId() this.mapId = makeId()
mapElem.setAttributeNS(null, 'id', this.mapId) mapElem.setAttributeNS(null, 'id', this.mapId)
mapElem.setAttributeNS( mapElem.setAttributeNS(null, 'style', 'width:' + width + 'px;height: ' + height + 'px;')
null,
'style',
'width:' + width + 'px;height: ' + height + 'px;'
)
this.dom.appendChild(mapElem) this.dom.appendChild(mapElem)
this.mapElem = mapElem this.mapElem = mapElem
} }
@ -126,8 +122,7 @@ class GeoMapVisualization extends Visualization {
let labelColor = LABEL_LIGHT_COLOR let labelColor = LABEL_LIGHT_COLOR
let labelOutline = LABEL_LIGHT_OUTLINE let labelOutline = LABEL_LIGHT_OUTLINE
if (document.getElementById('root').classList.contains('dark-theme')) { if (document.getElementById('root').classList.contains('dark-theme')) {
defaultMapStyle = defaultMapStyle = 'mapbox://styles/enso-org/ckiu0o0in2fpp19rpk0jfvg2s'
'mapbox://styles/enso-org/ckiu0o0in2fpp19rpk0jfvg2s'
accentColor = DARK_ACCENT_COLOR accentColor = DARK_ACCENT_COLOR
labelBackgroundColor = LABEL_DARK_BACKGROUND labelBackgroundColor = LABEL_DARK_BACKGROUND
labelColor = LABEL_DARK_COLOR labelColor = LABEL_DARK_COLOR
@ -229,8 +224,8 @@ class GeoMapVisualization extends Visualization {
makeScatterLayer() { makeScatterLayer() {
return new deck.ScatterplotLayer({ return new deck.ScatterplotLayer({
data: this.dataPoints, data: this.dataPoints,
getFillColor: (d) => d.color, getFillColor: d => d.color,
getRadius: (d) => d.radius, getRadius: d => d.radius,
pickable: this.showingLabels, pickable: this.showingLabels,
}) })
} }
@ -323,15 +318,11 @@ class GeoMapVisualization extends Visualization {
/** /**
* Extract the visualisation data from a full configuration object. * Extract the visualisation data from a full configuration object.
*/ */
function extractVisualizationDataFromFullConfig( function extractVisualizationDataFromFullConfig(parsedData, preparedDataPoints, accentColor) {
parsedData,
preparedDataPoints,
accentColor
) {
if (parsedData.type === SCATTERPLOT_LAYER && parsedData.data.length) { if (parsedData.type === SCATTERPLOT_LAYER && parsedData.data.length) {
pushPoints(parsedData.data, preparedDataPoints, accentColor) pushPoints(parsedData.data, preparedDataPoints, accentColor)
} else if (ok(parsedData.layers)) { } else if (ok(parsedData.layers)) {
parsedData.layers.forEach((layer) => { parsedData.layers.forEach(layer => {
if (layer.type === SCATTERPLOT_LAYER) { if (layer.type === SCATTERPLOT_LAYER) {
let dataPoints = layer.data || [] let dataPoints = layer.data || []
pushPoints(dataPoints, preparedDataPoints, accentColor) pushPoints(dataPoints, preparedDataPoints, accentColor)
@ -345,11 +336,7 @@ function extractVisualizationDataFromFullConfig(
/** /**
* Extract the visualisation data from a dataframe. * Extract the visualisation data from a dataframe.
*/ */
function extractVisualizationDataFromDataFrame( function extractVisualizationDataFromDataFrame(parsedData, preparedDataPoints, accentColor) {
parsedData,
preparedDataPoints,
accentColor
) {
const geoPoints = parsedData.df_latitude.map(function (lat, i) { const geoPoints = parsedData.df_latitude.map(function (lat, i) {
const lon = parsedData.df_longitude[i] const lon = parsedData.df_longitude[i]
let label = ok(parsedData.df_label) ? parsedData.df_label[i] : undefined let label = ok(parsedData.df_label) ? parsedData.df_label[i] : undefined
@ -372,17 +359,9 @@ function isDataFrame(data) {
*/ */
function extractDataPoints(parsedData, preparedDataPoints, accentColor) { function extractDataPoints(parsedData, preparedDataPoints, accentColor) {
if (isDataFrame(parsedData)) { if (isDataFrame(parsedData)) {
extractVisualizationDataFromDataFrame( extractVisualizationDataFromDataFrame(parsedData, preparedDataPoints, accentColor)
parsedData,
preparedDataPoints,
accentColor
)
} else { } else {
extractVisualizationDataFromFullConfig( extractVisualizationDataFromFullConfig(parsedData, preparedDataPoints, accentColor)
parsedData,
preparedDataPoints,
accentColor
)
} }
} }
@ -394,11 +373,9 @@ function extractDataPoints(parsedData, preparedDataPoints, accentColor) {
* optionally `radius`, `color` and `label`. * optionally `radius`, `color` and `label`.
*/ */
function pushPoints(dataPoints, targetList, accentColor) { function pushPoints(dataPoints, targetList, accentColor) {
dataPoints.forEach((geoPoint) => { dataPoints.forEach(geoPoint => {
let position = [geoPoint.longitude, geoPoint.latitude] let position = [geoPoint.longitude, geoPoint.latitude]
let radius = isNaN(geoPoint.radius) let radius = isNaN(geoPoint.radius) ? DEFAULT_POINT_RADIUS : geoPoint.radius
? DEFAULT_POINT_RADIUS
: geoPoint.radius
let color = ok(geoPoint.color) ? geoPoint.color : accentColor let color = ok(geoPoint.color) ? geoPoint.color : accentColor
let label = ok(geoPoint.label) ? geoPoint.label : '' let label = ok(geoPoint.label) ? geoPoint.label : ''
targetList.push({ position, color, radius, label }) targetList.push({ position, color, radius, label })
@ -412,7 +389,7 @@ function pushPoints(dataPoints, targetList, accentColor) {
function calculateExtent(dataPoints) { function calculateExtent(dataPoints) {
const xs = [] const xs = []
const ys = [] const ys = []
dataPoints.forEach((e) => { dataPoints.forEach(e => {
xs.push(e.position[0]) xs.push(e.position[0])
ys.push(e.position[1]) ys.push(e.position[1])
}) })

View File

@ -4,9 +4,9 @@ loadScript('https://d3js.org/d3.v4.min.js')
loadStyle('https://fontlibrary.org/face/dejavu-sans-mono') loadStyle('https://fontlibrary.org/face/dejavu-sans-mono')
let shortcuts = { let shortcuts = {
zoomIn: (e) => (e.ctrlKey || e.metaKey) && e.key === 'z', zoomIn: e => (e.ctrlKey || e.metaKey) && e.key === 'z',
showAll: (e) => (e.ctrlKey || e.metaKey) && e.key === 'a', showAll: e => (e.ctrlKey || e.metaKey) && e.key === 'a',
debugPreprocessor: (e) => (e.ctrlKey || e.metaKey) && e.key === 'd', debugPreprocessor: e => (e.ctrlKey || e.metaKey) && e.key === 'd',
} }
const LABEL_STYLE = 'font-family: DejaVuSansMonoBook; font-size: 10px;' const LABEL_STYLE = 'font-family: DejaVuSansMonoBook; font-size: 10px;'
@ -86,9 +86,7 @@ class Histogram extends Visualization {
if (isUpdate) { if (isUpdate) {
this._axisSpec = ok(data.axis) ? data.axis : this._axisSpec this._axisSpec = ok(data.axis) ? data.axis : this._axisSpec
this._focus = ok(data.focus) ? data.focus : this._focus this._focus = ok(data.focus) ? data.focus : this._focus
this._dataValues = ok(data.data.values) this._dataValues = ok(data.data.values) ? data.data.values : this.data
? data.data.values
: this.data
this._bins = ok(data.bins) ? data.bins : this._bins this._bins = ok(data.bins) ? data.bins : this._bins
} else { } else {
this._axisSpec = data.axis this._axisSpec = data.axis
@ -187,19 +185,13 @@ class Histogram extends Visualization {
.append('g') .append('g')
.attr( .attr(
'transform', 'transform',
'translate(' + 'translate(' + this.canvas.margin.left + ',' + this.canvas.margin.top + ')'
this.canvas.margin.left +
',' +
this.canvas.margin.top +
')'
) )
this.yAxis = this.svg.append('g').attr('style', LABEL_STYLE) this.yAxis = this.svg.append('g').attr('style', LABEL_STYLE)
this.xAxis = this.svg.append('g').attr('style', LABEL_STYLE) this.xAxis = this.svg.append('g').attr('style', LABEL_STYLE)
this.plot = this.svg this.plot = this.svg.append('g').attr('clip-path', 'url(#hist-clip-path)')
.append('g')
.attr('clip-path', 'url(#hist-clip-path)')
// Create clip path // Create clip path
const defs = this.svg.append('defs') const defs = this.svg.append('defs')
@ -225,7 +217,7 @@ class Histogram extends Visualization {
} }
initDebugShortcut() { initDebugShortcut() {
document.addEventListener('keydown', (e) => { document.addEventListener('keydown', e => {
if (shortcuts.debugPreprocessor(e)) { if (shortcuts.debugPreprocessor(e)) {
this.setPreprocessor('x -> "[1,2,3,4]"') this.setPreprocessor('x -> "[1,2,3,4]"')
e.preventDefault() e.preventDefault()
@ -250,10 +242,7 @@ class Histogram extends Visualization {
let scroll_wheel = 0 let scroll_wheel = 0
switch (d3.event.type) { switch (d3.event.type) {
case 'mousedown': case 'mousedown':
return ( return d3.event.button === right_button || d3.event.button === mid_button
d3.event.button === right_button ||
d3.event.button === mid_button
)
case 'wheel': case 'wheel':
return d3.event.button === scroll_wheel return d3.event.button === scroll_wheel
default: default:
@ -310,10 +299,7 @@ class Histogram extends Visualization {
// The brush element must be child of zoom element - this is only way we found to have both // The brush element must be child of zoom element - this is only way we found to have both
// zoom and brush events working at the same time. See https://stackoverflow.com/a/59757276 . // zoom and brush events working at the same time. See https://stackoverflow.com/a/59757276 .
const brushElem = zoom.zoomElem const brushElem = zoom.zoomElem.append('g').attr('class', brushClass).call(brush)
.append('g')
.attr('class', brushClass)
.call(brush)
const self = this const self = this
@ -334,7 +320,7 @@ class Histogram extends Visualization {
self.rescale(self.scale, true) self.rescale(self.scale, true)
} }
const zoomInKeyEvent = (event) => { const zoomInKeyEvent = event => {
if (shortcuts.zoomIn(event)) { if (shortcuts.zoomIn(event)) {
zoomIn() zoomIn()
endBrushing() endBrushing()
@ -363,9 +349,7 @@ class Histogram extends Visualization {
} }
let endEvents = ['click', 'auxclick', 'contextmenu', 'scroll'] let endEvents = ['click', 'auxclick', 'contextmenu', 'scroll']
endEvents.forEach((e) => endEvents.forEach(e => document.addEventListener(e, endBrushing, false))
document.addEventListener(e, endBrushing, false)
)
} }
/** /**
@ -383,7 +367,7 @@ class Histogram extends Visualization {
.duration(animation_duration) .duration(animation_duration)
.attr( .attr(
'transform', 'transform',
(d) => d =>
'translate(' + 'translate(' +
scale.x(d.x0) + scale.x(d.x0) +
',' + ',' +
@ -417,10 +401,7 @@ class Histogram extends Visualization {
} }
} }
const x = d3 const x = d3.scaleLinear().domain(domain_x).range([0, this.canvas.inner.width])
.scaleLinear()
.domain(domain_x)
.range([0, this.canvas.inner.width])
this.xAxis this.xAxis
.attr('transform', 'translate(0,' + this.canvas.inner.height + ')') .attr('transform', 'translate(0,' + this.canvas.inner.height + ')')
@ -428,21 +409,18 @@ class Histogram extends Visualization {
const histogram = d3 const histogram = d3
.histogram() .histogram()
.value((d) => d) .value(d => d)
.domain(x.domain()) .domain(x.domain())
.thresholds(x.ticks(this.binCount())) .thresholds(x.ticks(this.binCount()))
const bins = histogram(dataPoints) const bins = histogram(dataPoints)
const y = d3.scaleLinear().range([this.canvas.inner.height, 0]) const y = d3.scaleLinear().range([this.canvas.inner.height, 0])
y.domain([0, d3.max(bins, (d) => d.length)]) y.domain([0, d3.max(bins, d => d.length)])
const yAxisTicks = y.ticks().filter((tick) => Number.isInteger(tick)) const yAxisTicks = y.ticks().filter(tick => Number.isInteger(tick))
const yAxis = d3 const yAxis = d3.axisLeft(y).tickValues(yAxisTicks).tickFormat(d3.format('d'))
.axisLeft(y)
.tickValues(yAxisTicks)
.tickFormat(d3.format('d'))
this.yAxis.call(yAxis) this.yAxis.call(yAxis)
@ -458,12 +436,9 @@ class Histogram extends Visualization {
.enter() .enter()
.append('rect') .append('rect')
.attr('x', 1) .attr('x', 1)
.attr( .attr('transform', d => 'translate(' + x(d.x0) + ',' + y(d.length) + ')')
'transform', .attr('width', d => x(d.x1) - x(d.x0))
(d) => 'translate(' + x(d.x0) + ',' + y(d.length) + ')' .attr('height', d => this.canvas.inner.height - y(d.length))
)
.attr('width', (d) => x(d.x1) - x(d.x0))
.attr('height', (d) => this.canvas.inner.height - y(d.length))
.style('fill', accentColor) .style('fill', accentColor)
items.exit().remove() items.exit().remove()
@ -496,28 +471,15 @@ class Histogram extends Visualization {
const fontStyle = '10px DejaVuSansMonoBook' const fontStyle = '10px DejaVuSansMonoBook'
if (axis.x.label !== undefined) { if (axis.x.label !== undefined) {
this.xAxisLabel this.xAxisLabel
.attr( .attr('y', canvas.inner.height + canvas.margin.bottom - X_AXIS_LABEL_WIDTH / 2.0)
'y', .attr('x', canvas.inner.width / 2.0 + this.textWidth(axis.x.label, fontStyle) / 2)
canvas.inner.height +
canvas.margin.bottom -
X_AXIS_LABEL_WIDTH / 2.0
)
.attr(
'x',
canvas.inner.width / 2.0 +
this.textWidth(axis.x.label, fontStyle) / 2
)
.text(axis.x.label) .text(axis.x.label)
} }
// Note: y axis is rotated by 90 degrees, so x/y is switched. // Note: y axis is rotated by 90 degrees, so x/y is switched.
if (axis.y.label !== undefined) { if (axis.y.label !== undefined) {
this.yAxisLabel this.yAxisLabel
.attr('y', -canvas.margin.left + Y_AXIS_LABEL_WIDTH) .attr('y', -canvas.margin.left + Y_AXIS_LABEL_WIDTH)
.attr( .attr('x', -canvas.inner.height / 2 + this.textWidth(axis.y.label, fontStyle) / 2)
'x',
-canvas.inner.height / 2 +
this.textWidth(axis.y.label, fontStyle) / 2
)
.text(axis.y.label) .text(axis.y.label)
} }
} }
@ -546,7 +508,7 @@ class Histogram extends Visualization {
let xMin = dataPoints[0] let xMin = dataPoints[0]
let xMax = dataPoints[0] let xMax = dataPoints[0]
dataPoints.forEach((value) => { dataPoints.forEach(value => {
if (value < xMin) { if (value < xMin) {
xMin = value xMin = value
} }
@ -602,11 +564,7 @@ class Histogram extends Visualization {
createOuterContainerWithStyle(width, height) { createOuterContainerWithStyle(width, height) {
const divElem = document.createElementNS(null, 'div') const divElem = document.createElementNS(null, 'div')
divElem.setAttributeNS(null, 'class', 'vis-histogram') divElem.setAttributeNS(null, 'class', 'vis-histogram')
divElem.setAttributeNS( divElem.setAttributeNS(null, 'viewBox', 0 + ' ' + 0 + ' ' + width + ' ' + height)
null,
'viewBox',
0 + ' ' + 0 + ' ' + width + ' ' + height
)
divElem.setAttributeNS(null, 'width', '100%') divElem.setAttributeNS(null, 'width', '100%')
divElem.setAttributeNS(null, 'height', '100%') divElem.setAttributeNS(null, 'height', '100%')
@ -692,10 +650,7 @@ class Histogram extends Visualization {
const self = this const self = this
const reset_zoom_and_pan = () => { const reset_zoom_and_pan = () => {
zoom.zoomElem zoom.zoomElem.transition().duration(0).call(zoom.zoom.transform, d3.zoomIdentity)
.transition()
.duration(0)
.call(zoom.zoom.transform, d3.zoomIdentity)
let domain_x = [ let domain_x = [
extremesAndDeltas.xMin - extremesAndDeltas.paddingX, extremesAndDeltas.xMin - extremesAndDeltas.paddingX,
@ -707,7 +662,7 @@ class Histogram extends Visualization {
self.rescale(self.scale, true) self.rescale(self.scale, true)
} }
document.addEventListener('keydown', (e) => { document.addEventListener('keydown', e => {
if (shortcuts.showAll(e)) { if (shortcuts.showAll(e)) {
reset_zoom_and_pan() reset_zoom_and_pan()
} }

View File

@ -2,8 +2,8 @@ loadScript('https://d3js.org/d3.v4.min.js')
loadStyle('https://fontlibrary.org/face/dejavu-sans-mono') loadStyle('https://fontlibrary.org/face/dejavu-sans-mono')
let shortcuts = { let shortcuts = {
zoomIn: (e) => (e.ctrlKey || e.metaKey) && e.key === 'z', zoomIn: e => (e.ctrlKey || e.metaKey) && e.key === 'z',
showAll: (e) => (e.ctrlKey || e.metaKey) && event.key === 'a', showAll: e => (e.ctrlKey || e.metaKey) && event.key === 'a',
} }
const label_style = 'font-family: DejaVuSansMonoBook; font-size: 10px;' const label_style = 'font-family: DejaVuSansMonoBook; font-size: 10px;'
@ -52,10 +52,7 @@ class ScatterPlot extends Visualization {
this.dom.removeChild(this.dom.lastChild) this.dom.removeChild(this.dom.lastChild)
} }
const divElem = this.createDivElem( const divElem = this.createDivElem(this.canvasWidth(), this.canvasHeight())
this.canvasWidth(),
this.canvasHeight()
)
this.dom.appendChild(divElem) this.dom.appendChild(divElem)
let parsedData = this.parseData(data) let parsedData = this.parseData(data)
@ -67,10 +64,7 @@ class ScatterPlot extends Visualization {
.attr('width', this.canvasWidth()) .attr('width', this.canvasWidth())
.attr('height', this.canvasHeight()) .attr('height', this.canvasHeight())
.append('g') .append('g')
.attr( .attr('transform', 'translate(' + this.margin.left + ',' + this.margin.top + ')')
'transform',
'translate(' + this.margin.left + ',' + this.margin.top + ')'
)
let extremesAndDeltas = this.getExtremesAndDeltas(this.dataPoints) let extremesAndDeltas = this.getExtremesAndDeltas(this.dataPoints)
let scaleAndAxis = this.createAxes( let scaleAndAxis = this.createAxes(
@ -81,13 +75,7 @@ class ScatterPlot extends Visualization {
svg, svg,
focus focus
) )
this.createLabels( this.createLabels(this.axis, svg, this.box_width, this.margin, this.box_height)
this.axis,
svg,
this.box_width,
this.margin,
this.box_height
)
let scatter = this.createScatter( let scatter = this.createScatter(
svg, svg,
this.box_width, this.box_width,
@ -148,10 +136,8 @@ class ScatterPlot extends Visualization {
this.dataPoints = this.extractValues(parsedData) this.dataPoints = this.extractValues(parsedData)
this.margin = this.getMargins(this.axis) this.margin = this.getMargins(this.axis)
this.box_width = this.box_width = this.canvasWidth() - this.margin.left - this.margin.right
this.canvasWidth() - this.margin.left - this.margin.right this.box_height = this.canvasHeight() - this.margin.top - this.margin.bottom
this.box_height =
this.canvasHeight() - this.margin.top - this.margin.bottom
} }
extractValues(data) { extractValues(data) {
@ -186,15 +172,7 @@ class ScatterPlot extends Visualization {
/** /**
* Adds panning and zooming functionality to the visualization. * Adds panning and zooming functionality to the visualization.
*/ */
addPanAndZoom( addPanAndZoom(box_width, box_height, svg, margin, scaleAndAxis, scatter, points) {
box_width,
box_height,
svg,
margin,
scaleAndAxis,
scatter,
points
) {
let zoomClass = 'zoom' let zoomClass = 'zoom'
let minScale = 0.5 let minScale = 0.5
let maxScale = 20 let maxScale = 20
@ -207,10 +185,7 @@ class ScatterPlot extends Visualization {
let scroll_wheel = 0 let scroll_wheel = 0
switch (d3.event.type) { switch (d3.event.type) {
case 'mousedown': case 'mousedown':
return ( return d3.event.button === right_button || d3.event.button === mid_button
d3.event.button === right_button ||
d3.event.button === mid_button
)
case 'wheel': case 'wheel':
return d3.event.button === scroll_wheel return d3.event.button === scroll_wheel
default: default:
@ -241,27 +216,20 @@ class ScatterPlot extends Visualization {
let new_xScale = d3.event.transform.rescaleX(scaleAndAxis.xScale) let new_xScale = d3.event.transform.rescaleX(scaleAndAxis.xScale)
let new_yScale = d3.event.transform.rescaleY(scaleAndAxis.yScale) let new_yScale = d3.event.transform.rescaleY(scaleAndAxis.yScale)
scaleAndAxis.xAxis.call( scaleAndAxis.xAxis.call(d3.axisBottom(new_xScale).ticks(box_width / x_axis_label_width))
d3.axisBottom(new_xScale).ticks(box_width / x_axis_label_width)
)
scaleAndAxis.yAxis.call(d3.axisLeft(new_yScale)) scaleAndAxis.yAxis.call(d3.axisLeft(new_yScale))
scatter scatter
.selectAll('path') .selectAll('path')
.attr( .attr(
'transform', 'transform',
(d) => d => 'translate(' + new_xScale(d.x) + ',' + new_yScale(d.y) + ')'
'translate(' +
new_xScale(d.x) +
',' +
new_yScale(d.y) +
')'
) )
if (points.labels === visilbe_points) { if (points.labels === visilbe_points) {
scatter scatter
.selectAll('text') .selectAll('text')
.attr('x', (d) => new_xScale(d.x) + point_label_padding_x) .attr('x', d => new_xScale(d.x) + point_label_padding_x)
.attr('y', (d) => new_yScale(d.y) + point_label_padding_y) .attr('y', d => new_yScale(d.y) + point_label_padding_y)
} }
} }
@ -275,8 +243,7 @@ class ScatterPlot extends Visualization {
let current_transform = d3.zoomTransform(scatter) let current_transform = d3.zoomTransform(scatter)
let delta_multiplier = 0.01 let delta_multiplier = 0.01
if (d3.event.ctrlKey) { if (d3.event.ctrlKey) {
current_transform.k = current_transform.k = current_transform.k - d3.event.deltaY * delta_multiplier
current_transform.k - d3.event.deltaY * delta_multiplier
} }
scatter.attr('transform', current_transform) scatter.attr('transform', current_transform)
} }
@ -290,15 +257,7 @@ class ScatterPlot extends Visualization {
* Brush is a tool which enables user to select points, and zoom into selection via * Brush is a tool which enables user to select points, and zoom into selection via
* keyboard shortcut or button event. * keyboard shortcut or button event.
*/ */
addBrushing( addBrushing(box_width, box_height, scatter, scaleAndAxis, selectedZoomBtn, points, zoom) {
box_width,
box_height,
scatter,
scaleAndAxis,
selectedZoomBtn,
points,
zoom
) {
let extent let extent
let brushClass = 'brush' let brushClass = 'brush'
@ -312,10 +271,7 @@ class ScatterPlot extends Visualization {
// The brush element must be child of zoom element - this is only way we found to have both zoom and brush // The brush element must be child of zoom element - this is only way we found to have both zoom and brush
// events working at the same time. See https://stackoverflow.com/a/59757276 . // events working at the same time. See https://stackoverflow.com/a/59757276 .
let brushElem = zoom.zoomElem let brushElem = zoom.zoomElem.append('g').attr('class', brushClass).call(brush)
.append('g')
.attr('class', brushClass)
.call(brush)
let self = this let self = this
@ -337,7 +293,7 @@ class ScatterPlot extends Visualization {
self.zoomingHelper(scaleAndAxis, box_width, scatter, points) self.zoomingHelper(scaleAndAxis, box_width, scatter, points)
} }
const zoomInKeyEvent = (event) => { const zoomInKeyEvent = event => {
if (shortcuts.zoomIn(event)) { if (shortcuts.zoomIn(event)) {
zoomIn() zoomIn()
endBrushing() endBrushing()
@ -366,9 +322,7 @@ class ScatterPlot extends Visualization {
} }
let endEvents = ['click', 'auxclick', 'contextmenu', 'scroll'] let endEvents = ['click', 'auxclick', 'contextmenu', 'scroll']
endEvents.forEach((e) => endEvents.forEach(e => document.addEventListener(e, endBrushing, false))
document.addEventListener(e, endBrushing, false)
)
} }
/** /**
@ -378,11 +332,7 @@ class ScatterPlot extends Visualization {
scaleAndAxis.xAxis scaleAndAxis.xAxis
.transition() .transition()
.duration(animation_duration) .duration(animation_duration)
.call( .call(d3.axisBottom(scaleAndAxis.xScale).ticks(box_width / x_axis_label_width))
d3
.axisBottom(scaleAndAxis.xScale)
.ticks(box_width / x_axis_label_width)
)
scaleAndAxis.yAxis scaleAndAxis.yAxis
.transition() .transition()
.duration(animation_duration) .duration(animation_duration)
@ -394,12 +344,7 @@ class ScatterPlot extends Visualization {
.duration(animation_duration) .duration(animation_duration)
.attr( .attr(
'transform', 'transform',
(d) => d => 'translate(' + scaleAndAxis.xScale(d.x) + ',' + scaleAndAxis.yScale(d.y) + ')'
'translate(' +
scaleAndAxis.xScale(d.x) +
',' +
scaleAndAxis.yScale(d.y) +
')'
) )
if (points.labels === visilbe_points) { if (points.labels === visilbe_points) {
@ -407,28 +352,15 @@ class ScatterPlot extends Visualization {
.selectAll('text') .selectAll('text')
.transition() .transition()
.duration(animation_duration) .duration(animation_duration)
.attr( .attr('x', d => scaleAndAxis.xScale(d.x) + point_label_padding_x)
'x', .attr('y', d => scaleAndAxis.yScale(d.y) + point_label_padding_y)
(d) => scaleAndAxis.xScale(d.x) + point_label_padding_x
)
.attr(
'y',
(d) => scaleAndAxis.yScale(d.y) + point_label_padding_y
)
} }
} }
/** /**
* Creates a plot object and populates it with given data. * Creates a plot object and populates it with given data.
*/ */
createScatter( createScatter(svg, box_width, box_height, points, dataPoints, scaleAndAxis) {
svg,
box_width,
box_height,
points,
dataPoints,
scaleAndAxis
) {
let clip = svg let clip = svg
.append('defs') .append('defs')
.append('svg:clipPath') .append('svg:clipPath')
@ -452,20 +384,13 @@ class ScatterPlot extends Visualization {
.append('path') .append('path')
.attr( .attr(
'd', 'd',
symbol symbol.type(this.matchShape()).size(d => (d.size || 1.0) * sizeScaleMultiplier)
.type(this.matchShape())
.size((d) => (d.size || 1.0) * sizeScaleMultiplier)
) )
.attr( .attr(
'transform', 'transform',
(d) => d => 'translate(' + scaleAndAxis.xScale(d.x) + ',' + scaleAndAxis.yScale(d.y) + ')'
'translate(' +
scaleAndAxis.xScale(d.x) +
',' +
scaleAndAxis.yScale(d.y) +
')'
) )
.style('fill', (d) => '#' + (d.color || '000000')) .style('fill', d => '#' + (d.color || '000000'))
.style('opacity', 0.5) .style('opacity', 0.5)
if (points.labels === visilbe_points) { if (points.labels === visilbe_points) {
@ -474,15 +399,9 @@ class ScatterPlot extends Visualization {
.data(dataPoints) .data(dataPoints)
.enter() .enter()
.append('text') .append('text')
.text((d) => d.label) .text(d => d.label)
.attr( .attr('x', d => scaleAndAxis.xScale(d.x) + point_label_padding_x)
'x', .attr('y', d => scaleAndAxis.yScale(d.y) + point_label_padding_y)
(d) => scaleAndAxis.xScale(d.x) + point_label_padding_x
)
.attr(
'y',
(d) => scaleAndAxis.yScale(d.y) + point_label_padding_y
)
.attr('style', label_style) .attr('style', label_style)
.attr('fill', 'black') .attr('fill', 'black')
} }
@ -494,7 +413,7 @@ class ScatterPlot extends Visualization {
* Helper function to match d3 shape from string. * Helper function to match d3 shape from string.
*/ */
matchShape() { matchShape() {
return (d) => { return d => {
if (d.shape === 'cross') { if (d.shape === 'cross') {
return d3.symbolCross return d3.symbolCross
} }
@ -524,10 +443,7 @@ class ScatterPlot extends Visualization {
svg.append('text') svg.append('text')
.attr('text-anchor', 'end') .attr('text-anchor', 'end')
.attr('style', label_style) .attr('style', label_style)
.attr( .attr('x', margin.left + this.getTextWidth(axis.x.label, fontStyle) / 2)
'x',
margin.left + this.getTextWidth(axis.x.label, fontStyle) / 2
)
.attr('y', box_height + margin.top + padding_y) .attr('y', box_height + margin.top + padding_y)
.text(axis.x.label) .text(axis.x.label)
} }
@ -541,9 +457,7 @@ class ScatterPlot extends Visualization {
.attr('y', -margin.left + padding_y) .attr('y', -margin.left + padding_y)
.attr( .attr(
'x', 'x',
-margin.top - -margin.top - box_height / 2 + this.getTextWidth(axis.y.label, fontStyle) / 2
box_height / 2 +
this.getTextWidth(axis.y.label, fontStyle) / 2
) )
.text(axis.y.label) .text(axis.y.label)
} }
@ -585,10 +499,7 @@ class ScatterPlot extends Visualization {
} }
yScale.domain(domain_y).range([box_height, 0]) yScale.domain(domain_y).range([box_height, 0])
let yAxis = svg let yAxis = svg.append('g').attr('style', label_style).call(d3.axisLeft(yScale))
.append('g')
.attr('style', label_style)
.call(d3.axisLeft(yScale))
return { xScale: xScale, yScale: yScale, xAxis: xAxis, yAxis: yAxis } return { xScale: xScale, yScale: yScale, xAxis: xAxis, yAxis: yAxis }
} }
@ -611,11 +522,7 @@ class ScatterPlot extends Visualization {
] ]
if (focus !== undefined) { if (focus !== undefined) {
if ( if (focus.x !== undefined && focus.y !== undefined && focus.zoom !== undefined) {
focus.x !== undefined &&
focus.y !== undefined &&
focus.zoom !== undefined
) {
let padding_x = extremesAndDeltas.dx * (1 / (2 * focus.zoom)) let padding_x = extremesAndDeltas.dx * (1 / (2 * focus.zoom))
let padding_y = extremesAndDeltas.dy * (1 / (2 * focus.zoom)) let padding_y = extremesAndDeltas.dy * (1 / (2 * focus.zoom))
domain_x = [focus.x - padding_x, focus.x + padding_x] domain_x = [focus.x - padding_x, focus.x + padding_x]
@ -638,7 +545,7 @@ class ScatterPlot extends Visualization {
let yMin = dataPoints[0].y let yMin = dataPoints[0].y
let yMax = dataPoints[0].y let yMax = dataPoints[0].y
dataPoints.forEach((d) => { dataPoints.forEach(d => {
if (d.x < xMin) { if (d.x < xMin) {
xMin = d.x xMin = d.x
} }
@ -691,11 +598,7 @@ class ScatterPlot extends Visualization {
createDivElem(width, height) { createDivElem(width, height) {
const divElem = document.createElementNS(null, 'div') const divElem = document.createElementNS(null, 'div')
divElem.setAttributeNS(null, 'class', 'vis-scatterplot') divElem.setAttributeNS(null, 'class', 'vis-scatterplot')
divElem.setAttributeNS( divElem.setAttributeNS(null, 'viewBox', 0 + ' ' + 0 + ' ' + width + ' ' + height)
null,
'viewBox',
0 + ' ' + 0 + ' ' + width + ' ' + height
)
divElem.setAttributeNS(null, 'width', '100%') divElem.setAttributeNS(null, 'width', '100%')
divElem.setAttributeNS(null, 'height', '100%') divElem.setAttributeNS(null, 'height', '100%')
divElem.setAttributeNS(null, 'transform', 'matrix(1 0 0 -1 0 0)') divElem.setAttributeNS(null, 'transform', 'matrix(1 0 0 -1 0 0)')
@ -773,14 +676,7 @@ class ScatterPlot extends Visualization {
/** /**
* Creates a button to fit all points on plot. * Creates a button to fit all points on plot.
*/ */
createButtonFitAll( createButtonFitAll(scaleAndAxis, scatter, points, extremesAndDeltas, zoom, box_width) {
scaleAndAxis,
scatter,
points,
extremesAndDeltas,
zoom,
box_width
) {
const btn = this.createBtnHelper() const btn = this.createBtnHelper()
let text = document.createTextNode('Fit all') let text = document.createTextNode('Fit all')
@ -788,10 +684,7 @@ class ScatterPlot extends Visualization {
let self = this let self = this
const unzoom = () => { const unzoom = () => {
zoom.zoomElem zoom.zoomElem.transition().duration(0).call(zoom.zoom.transform, d3.zoomIdentity)
.transition()
.duration(0)
.call(zoom.zoom.transform, d3.zoomIdentity)
let domain_x = [ let domain_x = [
extremesAndDeltas.xMin - extremesAndDeltas.paddingX, extremesAndDeltas.xMin - extremesAndDeltas.paddingX,
@ -808,7 +701,7 @@ class ScatterPlot extends Visualization {
self.zoomingHelper(scaleAndAxis, box_width, scatter, points) self.zoomingHelper(scaleAndAxis, box_width, scatter, points)
} }
document.addEventListener('keydown', (e) => { document.addEventListener('keydown', e => {
if (shortcuts.showAll(e)) { if (shortcuts.showAll(e)) {
unzoom() unzoom()
} }

View File

@ -28,15 +28,12 @@ class TableVisualization extends Visualization {
} }
function hasExactlyKeys(keys, obj) { function hasExactlyKeys(keys, obj) {
return ( return Object.keys(obj).length === keys.length && keys.every(k => obj.hasOwnProperty(k))
Object.keys(obj).length === keys.length &&
keys.every((k) => obj.hasOwnProperty(k))
)
} }
function getAtNestedKey(data, key) { function getAtNestedKey(data, key) {
let res = data let res = data
key.forEach((k) => (res = res[k])) key.forEach(k => (res = res[k]))
return res return res
} }
@ -48,14 +45,10 @@ class TableVisualization extends Visualization {
let first = getAtNestedKey(data[0], key) let first = getAtNestedKey(data[0], key)
if (!(first instanceof Object)) return [key] if (!(first instanceof Object)) return [key]
let firstKeys = Object.keys(first) let firstKeys = Object.keys(first)
let isNestable = data.every((obj) => let isNestable = data.every(obj => hasExactlyKeys(firstKeys, getAtNestedKey(obj, key)))
hasExactlyKeys(firstKeys, getAtNestedKey(obj, key))
)
if (isNestable) { if (isNestable) {
let withNests = firstKeys.map((k) => key.concat([k])) let withNests = firstKeys.map(k => key.concat([k]))
let furtherNestings = withNests.map((k) => let furtherNestings = withNests.map(k => generateNestings(data, k))
generateNestings(data, k)
)
return [].concat.apply([], furtherNestings) return [].concat.apply([], furtherNestings)
} else { } else {
return [key] return [key]
@ -66,7 +59,7 @@ class TableVisualization extends Visualization {
let isList = Array.isArray(data) && data[0] let isList = Array.isArray(data) && data[0]
if (!isList || !(typeof data[0] === 'object')) return false if (!isList || !(typeof data[0] === 'object')) return false
let firstKeys = Object.keys(data[0]) let firstKeys = Object.keys(data[0])
return data.every((obj) => hasExactlyKeys(firstKeys, obj)) return data.every(obj => hasExactlyKeys(firstKeys, obj))
} }
function genObjectMatrix(data, level) { function genObjectMatrix(data, level) {
@ -74,15 +67,15 @@ class TableVisualization extends Visualization {
let keys = Object.keys(data[0]) let keys = Object.keys(data[0])
let nests = [].concat.apply( let nests = [].concat.apply(
[], [],
keys.map((k) => generateNestings(data, [k])) keys.map(k => generateNestings(data, [k]))
) )
nests.forEach((key) => { nests.forEach(key => {
result += '<th>' + repNestedKey(key) + '</th>' result += '<th>' + repNestedKey(key) + '</th>'
}) })
result += '</tr>' result += '</tr>'
data.forEach((row, ix) => { data.forEach((row, ix) => {
result += '<tr><th>' + ix + '</th>' result += '<tr><th>' + ix + '</th>'
nests.forEach((k) => { nests.forEach(k => {
result += toTableCell(getAtNestedKey(row, k), level) result += toTableCell(getAtNestedKey(row, k), level)
}) })
result += '</tr>' result += '</tr>'
@ -96,7 +89,7 @@ class TableVisualization extends Visualization {
let firstIsArray = Array.isArray(data[0]) let firstIsArray = Array.isArray(data[0])
if (!firstIsArray) return false if (!firstIsArray) return false
let firstLen = data[0].length let firstLen = data[0].length
return data.every((d) => d.length === firstLen) return data.every(d => d.length === firstLen)
} }
function genMatrix(data, level, header) { function genMatrix(data, level, header) {
@ -122,7 +115,7 @@ class TableVisualization extends Visualization {
table.forEach((row, ix) => { table.forEach((row, ix) => {
result += '<tr><th>' + ix + '</th>' result += '<tr><th>' + ix + '</th>'
row.forEach((d) => { row.forEach(d => {
result += toTableCell(d, level) result += toTableCell(d, level)
}) })
result += '</tr>' result += '</tr>'
@ -133,12 +126,7 @@ class TableVisualization extends Visualization {
function genGenericTable(data, level) { function genGenericTable(data, level) {
let result = '' let result = ''
data.forEach((point, ix) => { data.forEach((point, ix) => {
result += result += '<tr><th>' + ix + '</th>' + toTableCell(point, level) + '</tr>'
'<tr><th>' +
ix +
'</th>' +
toTableCell(point, level) +
'</tr>'
}) })
return tableOf(result, level) return tableOf(result, level)
} }
@ -146,11 +134,11 @@ class TableVisualization extends Visualization {
function genRowObjectTable(data, level) { function genRowObjectTable(data, level) {
let keys = Object.keys(data) let keys = Object.keys(data)
let result = '<tr>' let result = '<tr>'
keys.forEach((key) => { keys.forEach(key => {
result += '<th>' + key + '</th>' result += '<th>' + key + '</th>'
}) })
result += '</tr><tr>' result += '</tr><tr>'
keys.forEach((key) => { keys.forEach(key => {
result += toTableCell(data[key], level) result += toTableCell(data[key], level)
}) })
result += '</tr>' result += '</tr>'
@ -165,11 +153,7 @@ class TableVisualization extends Visualization {
} else { } else {
if (data === undefined || data === null) data = '' if (data === undefined || data === null) data = ''
let res = data.toString() let res = data.toString()
return ( return '<td class="plaintext">' + (res === '' ? 'N/A' : res) + '</td>'
'<td class="plaintext">' +
(res === '' ? 'N/A' : res) +
'</td>'
)
} }
} }
@ -280,11 +264,7 @@ class TableVisualization extends Visualization {
if (document.getElementById('root').classList.contains('dark-theme')) { if (document.getElementById('root').classList.contains('dark-theme')) {
style = style_dark style = style_dark
} }
const table = genTable( const table = genTable(parsedData.data || parsedData, 0, parsedData.header)
parsedData.data || parsedData,
0,
parsedData.header
)
tabElem.innerHTML = style + table tabElem.innerHTML = style + table
} }