Merge branch 'master' into auto-rename-binaries-in-CI-for-Regular-releases

This commit is contained in:
DeeDeeG 2023-08-29 22:28:06 -04:00
commit 5ecd2b8475
139 changed files with 31444 additions and 27961 deletions

View File

@ -3,64 +3,66 @@ env:
GITHUB_TOKEN: ENCRYPTED[!b0ff4671044672be50914a3a10b49af642bd8e0e681a6f4e5855ec5230a5cf9afbc53d9e90239b8d2c79455f014f383f!]
# The above token, is a GitHub API Token, that allows us to download RipGrep without concern of API limits
linux_task:
alias: linux
container:
image: node:16-slim
memory: 8G
prepare_script:
- apt-get update
- export DEBIAN_FRONTEND="noninteractive"
- apt-get install -y
ffmpeg
rpm
build-essential
git
libsecret-1-dev
fakeroot
libx11-dev
libxkbfile-dev
libgdk-pixbuf2.0-dev
libgtk-3-dev
libxss-dev
libasound2-dev
libnss3
xvfb
- git submodule init
- git submodule update
- sed -i -e "s/[0-9]*-dev/`date -u +%Y%m%d%H`/g" package.json
install_script:
- yarn install --ignore-engines || yarn install --ignore-engines
build_script:
- yarn build
- yarn run build:apm
build_binary_script:
- yarn dist || yarn dist
rename_binary_script:
- node script/rename.js "Linux"
binary_artifacts:
path: ./binaries/*
test_script:
- rm -R node_modules/electron; yarn install --check-files
- ./binaries/*AppImage --appimage-extract
- export BINARY_NAME='squashfs-root/pulsar'
- mkdir -p ./tests/videos
- Xvfb -screen 0 1024x768x24+32 :99 & nohup ffmpeg -video_size 1024x768 -f x11grab -i :99.0 ./tests/videos/out.mpg & DISPLAY=:99 PLAYWRIGHT_JUNIT_OUTPUT_NAME=report.xml npx playwright test --reporter=junit,list
always:
videos_artifacts:
path: ./tests/videos/**
junit_artifacts:
path: report.xml
type: text/xml
format: junit
# linux_task:
# alias: linux
# container:
# image: node:16-slim
# memory: 8G
# prepare_script:
# - apt-get update
# - export DEBIAN_FRONTEND="noninteractive"
# - apt-get install -y
# ffmpeg
# rpm
# build-essential
# git
# libsecret-1-dev
# fakeroot
# libx11-dev
# libxkbfile-dev
# libgdk-pixbuf2.0-dev
# libgtk-3-dev
# libxss-dev
# libasound2-dev
# libnss3
# xvfb
# - git submodule init
# - git submodule update
# - sed -i -e "s/[0-9]*-dev/`date -u +%Y%m%d%H`/g" package.json
# install_script:
# - yarn install --ignore-engines || yarn install --ignore-engines
# build_script:
# - yarn build
# - yarn run build:apm
# build_binary_script:
# - yarn dist || yarn dist
# rename_binary_script:
# - node script/rename.js "Linux"
# binary_artifacts:
# path: ./binaries/*
# test_script:
# - rm -R node_modules/electron; yarn install --check-files
# - ./binaries/*AppImage --appimage-extract
# - export BINARY_NAME='squashfs-root/pulsar'
# - mkdir -p ./tests/videos
# - Xvfb -screen 0 1024x768x24+32 :99 & nohup ffmpeg -video_size 1024x768 -f x11grab -i :99.0 ./tests/videos/out.mpg & DISPLAY=:99 PLAYWRIGHT_JUNIT_OUTPUT_NAME=report.xml npx playwright test --reporter=junit,list
# always:
# videos_artifacts:
# path: ./tests/videos/**
# junit_artifacts:
# path: report.xml
# type: text/xml
# format: junit
arm_linux_task:
alias: linux
only_if: $CIRRUS_CRON != "" || $CIRRUS_TAG == "regular_release"
arm_container:
image: node:16-slim
memory: 8G
env:
USE_SYSTEM_FPM: 'true'
ROLLING_UPLOAD_TOKEN: ENCRYPTED[690950798401ec3715e9d20ac29a0859d3c58097038081ff6afeaf4721e661672d34eb952d8a6442bc7410821ab8545a]
prepare_script:
- apt-get update
- export DEBIAN_FRONTEND="noninteractive"
@ -102,6 +104,10 @@ arm_linux_task:
- ./binaries/*AppImage --appimage-extract
- export BINARY_NAME='squashfs-root/pulsar'
- Xvfb :99 & DISPLAY=:99 PLAYWRIGHT_JUNIT_OUTPUT_NAME=report.xml npx playwright test --reporter=junit,list
rolling_upload_script:
- cd ./script/rolling-release-scripts
- npm install
- node ./rolling-release-binary-upload.js cirrus
always:
videos_artifacts:
path: ./tests/videos/**
@ -112,6 +118,7 @@ arm_linux_task:
silicon_mac_task:
alias: mac
only_if: $CIRRUS_CRON != "" || $CIRRUS_TAG == "regular_release"
macos_instance:
image: ghcr.io/cirruslabs/macos-monterey-xcode:14
memory: 8G
@ -121,6 +128,7 @@ silicon_mac_task:
APPLEID: ENCRYPTED[549ce052bd5666dba5245f4180bf93b74ed206fe5e6e7c8f67a8596d3767c1f682b84e347b326ac318c62a07c8844a57]
APPLEID_PASSWORD: ENCRYPTED[774c3307fd3b62660ecf5beb8537a24498c76e8d90d7f28e5bc816742fd8954a34ffed13f9aa2d1faf66ce08b4496e6f]
TEAM_ID: ENCRYPTED[11f3fedfbaf4aff1859bf6c105f0437ace23d84f5420a2c1cea884fbfa43b115b7834a463516d50cb276d4c4d9128b49]
ROLLING_UPLOAD_TOKEN: ENCRYPTED[690950798401ec3715e9d20ac29a0859d3c58097038081ff6afeaf4721e661672d34eb952d8a6442bc7410821ab8545a]
prepare_script:
- brew install node@16 yarn git python@$PYTHON_VERSION
- git submodule init
@ -149,6 +157,11 @@ silicon_mac_task:
- hdiutil mount binaries/*Pulsar*dmg
- export BINARY_NAME=`ls /Volumes/Pulsar*/Pulsar.app/Contents/MacOS/Pulsar`
- PLAYWRIGHT_JUNIT_OUTPUT_NAME=report.xml npx playwright test --reporter=junit,list
rolling_upload_script:
- export PATH="/opt/homebrew/bin:/opt/homebrew/opt/node@16/bin:$PATH"
- cd ./script/rolling-release-scripts
- npm install
- node ./rolling-release-binary-upload.js cirrus
always:
videos_artifacts:
path: ./tests/videos/**
@ -157,94 +170,94 @@ silicon_mac_task:
type: text/xml
format: junit
intel_mac_task:
alias: mac
macos_instance:
image: ghcr.io/cirruslabs/macos-monterey-xcode:14
memory: 8G
env:
CSC_LINK: ENCRYPTED[0078015a03bb6cfdbd80113ae5bbb6f448fd4bbbc40efd81bf2cb1554373046b475a4d7c77e3e3e82ac1ce2f7e3d2da5]
CSC_KEY_PASSWORD: ENCRYPTED[82bb72653d39578035ed1860ab4978703d50bd326d925a146ff08782f987ceb37ac2d8dbace52dec2b0e2ef92debf097]
APPLEID: ENCRYPTED[549ce052bd5666dba5245f4180bf93b74ed206fe5e6e7c8f67a8596d3767c1f682b84e347b326ac318c62a07c8844a57]
APPLEID_PASSWORD: ENCRYPTED[774c3307fd3b62660ecf5beb8537a24498c76e8d90d7f28e5bc816742fd8954a34ffed13f9aa2d1faf66ce08b4496e6f]
TEAM_ID: ENCRYPTED[11f3fedfbaf4aff1859bf6c105f0437ace23d84f5420a2c1cea884fbfa43b115b7834a463516d50cb276d4c4d9128b49]
prepare_script:
- sudo rm -rf /Library/Developer/CommandLineTools
- echo A | softwareupdate --install-rosetta
- arch -x86_64 xcode-select --install
- arch -x86_64 /bin/bash -c "$(curl -fsSL https://raw.githubusercontent.com/Homebrew/install/master/install.sh)"
- export PATH="/usr/local/opt/node@16/bin:/usr/local/bin:$PATH"
- arch -x86_64 brew install node@16 yarn git python@$PYTHON_VERSION
- ln -s /usr/local/bin/python$PYTHON_VERSION /usr/local/bin/python
- git submodule init
- git submodule update
- sed -i -e "s/[0-9]*-dev/`date -u +%Y%m%d%H`/g" package.json
install_script:
- export PATH="/usr/local/opt/node@16/bin:/usr/local/bin:$PATH"
- arch -x86_64 npx yarn install --ignore-engines || arch -x86_64 npx yarn install --ignore-engines
build_script:
- export PATH="/usr/local/opt/node@16/bin:/usr/local/bin:$PATH"
- arch -x86_64 npx yarn build
- arch -x86_64 yarn run build:apm
build_binary_script:
- export PATH="/usr/local/opt/node@16/bin:/usr/local/bin:$PATH"
- arch -x86_64 npx yarn dist || arch -x86_64 npx yarn dist
rename_binary_script:
- export PATH="/usr/local/opt/node@16/bin:/usr/local/bin:$PATH"
- node script/rename.js "Intel.Mac"
binary_artifacts:
path: ./binaries/*
test_script:
- export PATH="/usr/local/opt/node@16/bin:/usr/local/bin:$PATH"
- rm -R node_modules/electron; yarn install --check-files
- hdiutil mount binaries/*Pulsar*dmg
- export BINARY_NAME=`ls /Volumes/Pulsar*/Pulsar.app/Contents/MacOS/Pulsar`
- PLAYWRIGHT_JUNIT_OUTPUT_NAME=report.xml arch -x86_64 npx playwright test --reporter=junit,list
always:
videos_artifacts:
path: ./tests/videos/**
junit_artifacts:
path: report.xml
type: text/xml
format: junit
# intel_mac_task:
# alias: mac
# macos_instance:
# image: ghcr.io/cirruslabs/macos-monterey-xcode:14
# memory: 8G
# env:
# CSC_LINK: ENCRYPTED[0078015a03bb6cfdbd80113ae5bbb6f448fd4bbbc40efd81bf2cb1554373046b475a4d7c77e3e3e82ac1ce2f7e3d2da5]
# CSC_KEY_PASSWORD: ENCRYPTED[82bb72653d39578035ed1860ab4978703d50bd326d925a146ff08782f987ceb37ac2d8dbace52dec2b0e2ef92debf097]
# APPLEID: ENCRYPTED[549ce052bd5666dba5245f4180bf93b74ed206fe5e6e7c8f67a8596d3767c1f682b84e347b326ac318c62a07c8844a57]
# APPLEID_PASSWORD: ENCRYPTED[774c3307fd3b62660ecf5beb8537a24498c76e8d90d7f28e5bc816742fd8954a34ffed13f9aa2d1faf66ce08b4496e6f]
# TEAM_ID: ENCRYPTED[11f3fedfbaf4aff1859bf6c105f0437ace23d84f5420a2c1cea884fbfa43b115b7834a463516d50cb276d4c4d9128b49]
# prepare_script:
# - sudo rm -rf /Library/Developer/CommandLineTools
# - echo A | softwareupdate --install-rosetta
# - arch -x86_64 xcode-select --install
# - arch -x86_64 /bin/bash -c "$(curl -fsSL https://raw.githubusercontent.com/Homebrew/install/master/install.sh)"
# - export PATH="/usr/local/opt/node@16/bin:/usr/local/bin:$PATH"
# - arch -x86_64 brew install node@16 yarn git python@$PYTHON_VERSION
# - ln -s /usr/local/bin/python$PYTHON_VERSION /usr/local/bin/python
# - git submodule init
# - git submodule update
# - sed -i -e "s/[0-9]*-dev/`date -u +%Y%m%d%H`/g" package.json
# install_script:
# - export PATH="/usr/local/opt/node@16/bin:/usr/local/bin:$PATH"
# - arch -x86_64 npx yarn install --ignore-engines || arch -x86_64 npx yarn install --ignore-engines
# build_script:
# - export PATH="/usr/local/opt/node@16/bin:/usr/local/bin:$PATH"
# - arch -x86_64 npx yarn build
# - arch -x86_64 yarn run build:apm
# build_binary_script:
# - export PATH="/usr/local/opt/node@16/bin:/usr/local/bin:$PATH"
# - arch -x86_64 npx yarn dist || arch -x86_64 npx yarn dist
# rename_binary_script:
# - export PATH="/usr/local/opt/node@16/bin:/usr/local/bin:$PATH"
# - node script/rename.js "Intel.Mac"
# binary_artifacts:
# path: ./binaries/*
# test_script:
# - export PATH="/usr/local/opt/node@16/bin:/usr/local/bin:$PATH"
# - rm -R node_modules/electron; yarn install --check-files
# - hdiutil mount binaries/*Pulsar*dmg
# - export BINARY_NAME=`ls /Volumes/Pulsar*/Pulsar.app/Contents/MacOS/Pulsar`
# - PLAYWRIGHT_JUNIT_OUTPUT_NAME=report.xml arch -x86_64 npx playwright test --reporter=junit,list
# always:
# videos_artifacts:
# path: ./tests/videos/**
# junit_artifacts:
# path: report.xml
# type: text/xml
# format: junit
windows_task:
alias: windows
timeout_in: 90m
windows_container:
image: cirrusci/windowsservercore:visualstudio2022-2022.06.23
env:
CIRRUS_SHELL: bash
PATH: C:\Python310\Scripts\;C:\Python310\;%PATH%;C:\Program Files\nodejs\;C:\Program Files\Git\cmd;C:\Users\User\AppData\Local\Microsoft\WindowsApps;C:\Users\User\AppData\Roaming\npm;C:\Program Files\Microsoft Visual Studio\2022\Community\Msbuild\Current\Bin\
prepare_script:
- choco install nodejs --version=16.16.0 -y
- choco install python --version=3.10.3 -y
- choco install git visualstudio2019-workload-vctools -y
- git submodule init
- git submodule update
- npm config set python 'C:\Python310\python.exe'
install_script:
- npx yarn install --ignore-engines
|| rm -R node_modules && npx yarn install --ignore-engines
|| rm -R node_modules && npx yarn install --ignore-engines
build_script:
- npx yarn build:apm
- npx yarn build || npx yarn build || npx yarn build
build_binary_script:
- sed -i -e "s/[0-9]*-dev/`date -u +%Y%m%d%H`/g" package.json
- npx yarn dist || npx yarn dist || npx yarn dist
rename_binary_script:
- node script/rename.js "Windows"
binary_artifacts:
path: .\binaries\*
test_script:
- mkdir extracted; tar -xf binaries/*zip -C ./extracted/
- export BINARY_NAME=./extracted/Pulsar.exe
- PLAYWRIGHT_JUNIT_OUTPUT_NAME=report.xml npx playwright test --reporter=junit,list || echo "Yeah, tests failed, Windows is like this"
always:
videos_artifacts:
path: ./tests/videos/**
junit_artifacts:
path: report.xml
type: text/xml
format: junit
# windows_task:
# alias: windows
# timeout_in: 90m
# windows_container:
# image: cirrusci/windowsservercore:visualstudio2022-2022.06.23
# env:
# CIRRUS_SHELL: bash
# PATH: C:\Python310\Scripts\;C:\Python310\;%PATH%;C:\Program Files\nodejs\;C:\Program Files\Git\cmd;C:\Users\User\AppData\Local\Microsoft\WindowsApps;C:\Users\User\AppData\Roaming\npm;C:\Program Files\Microsoft Visual Studio\2022\Community\Msbuild\Current\Bin\
# prepare_script:
# - choco install nodejs --version=16.16.0 -y
# - choco install python --version=3.10.3 -y
# - choco install git visualstudio2019-workload-vctools -y
# - git submodule init
# - git submodule update
# - npm config set python 'C:\Python310\python.exe'
# install_script:
# - npx yarn install --ignore-engines
# || rm -R node_modules && npx yarn install --ignore-engines
# || rm -R node_modules && npx yarn install --ignore-engines
# build_script:
# - npx yarn build:apm
# - npx yarn build || npx yarn build || npx yarn build
# build_binary_script:
# - sed -i -e "s/[0-9]*-dev/`date -u +%Y%m%d%H`/g" package.json
# - npx yarn dist || npx yarn dist || npx yarn dist
# rename_binary_script:
# - node script/rename.js "Windows"
# binary_artifacts:
# path: .\binaries\*
# test_script:
# - mkdir extracted; tar -xf binaries/*zip -C ./extracted/
# - export BINARY_NAME=./extracted/Pulsar.exe
# - PLAYWRIGHT_JUNIT_OUTPUT_NAME=report.xml npx playwright test --reporter=junit,list || echo "Yeah, tests failed, Windows is like this"
# always:
# videos_artifacts:
# path: ./tests/videos/**
# junit_artifacts:
# path: report.xml
# type: text/xml
# format: junit

40
.github/renovate.json vendored
View File

@ -5,6 +5,13 @@
},
"labels": ["dependencies"],
"separateMajorMinor": "false",
"ignorePaths": [
"packages/autocomplete-atom-api/spec/fixtures/",
"packages/dev-live-reload/spec/fixtures/",
"packages/incompatible-packages/spec/fixtures/",
"packages/settings-view/spec/fixtures/",
"spec/fixtures/"
],
"packageRules": [
{
"description": "Group all DevDependencies for the Core Editor",
@ -38,6 +45,9 @@
{
"matchPaths": [ "packages/archive-view/**" ], "groupName": "archive-view package"
},
{
"matchPaths": [ "packages/autocomplete-atom-api/**" ], "groupName": "autocomplete-atom-api"
},
{
"matchPaths": [ "packages/autocomplete-css/**" ], "groupName": "autocomplete-css package"
},
@ -53,6 +63,9 @@
{
"matchPaths": [ "packages/autoflow/**" ], "groupName": "autoflow package"
},
{
"matchPaths": [ "packages/autosave/**" ], "groupName": "autosave package"
},
{
"matchPaths": [ "packages/background-tips/**" ], "groupName": "background-tips package"
},
@ -80,6 +93,12 @@
{
"matchPaths": [ "packages/exception-reporting/**" ], "groupName": "exception-reporting package"
},
{
"matchPaths": [ "packages/find-and-replace/**" ], "groupName": "find-and-replace package"
},
{
"matchPaths": [ "packages/fuzzy-finder/**" ], "groupName": "fuzzy-finder package"
},
{
"matchPaths": [ "packages/git-diff/**" ], "groupName": "git-diff package"
},
@ -95,6 +114,9 @@
{
"matchPaths": [ "packages/incompatible-packages/**" ], "groupName": "incompatible-packages package"
},
{
"matchPaths": [ "packages/keybinding-resolver/**" ], "groupName": "keybinding-resolver package"
},
{
"matchPaths": [ "packages/line-ending-selector/**" ], "groupName": "line-ending-selector package"
},
@ -104,24 +126,42 @@
{
"matchPaths": [ "packages/markdown-preview/**" ], "groupName": "markdown-preview package"
},
{
"matchPaths": [ "packages/notifications/**" ], "groupName": "notifications package"
},
{
"matchPaths": [ "packages/open-on-github/**" ], "groupName": "open-on-github package"
},
{
"matchPaths": [ "packages/package-generator/**" ], "groupName": "package-generator package"
},
{
"matchPaths": [ "packages/pulsar-updater/**" ], "groupName": "pulsar-updater package"
},
{
"matchPaths": [ "packages/settings-view/**" ], "groupName": "settings-view package"
},
{
"matchPaths": [ "packages/spell-check/**" ], "groupName": "spell-check package"
},
{
"matchPaths": [ "packages/status-bar/**" ], "groupName": "status-bar package"
},
{
"matchPaths": [ "packages/styleguide/**" ], "groupName": "styleguide package"
},
{
"matchPaths": [ "packages/symbols-view/**" ], "groupName": "symbols-view package"
},
{
"matchPaths": [ "packages/tabs/**" ], "groupName": "tabs package"
},
{
"matchPaths": [ "packages/timecop/**" ], "groupName": "timecop package"
},
{
"matchPaths": [ "packages/tree-view/**" ], "groupName": "tree-view package"
},
{
"matchPaths": [ "packages/update-package-dependencies/**" ], "groupName": "update-package-dependencies package"
},

145
.github/workflows/build.yml vendored Normal file
View File

@ -0,0 +1,145 @@
name: Build Pulsar Binaries
on:
push:
branches:
- 'master'
pull_request:
workflow_dispatch:
env:
# Variables needed for build information
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
PYTHON_VERSION: '3.10'
NODE_VERSION: 16
ROLLING_UPLOAD_TOKEN: ${{ secrets.ROLLING_RELEASE_UPLOAD_TOKEN }}
# Below variables allow us to quickly control visual tests for each platform
RUN_WINDOWS_VT: false
RUN_LINUX_VT: true
RUN_MACOS_VT: true
jobs:
build:
strategy:
matrix:
os: [ ubuntu-latest, windows-latest, macos-latest ]
fail-fast: false
runs-on: ${{ matrix.os }}
steps:
- name: Checkout the latest code
uses: actions/checkout@v3
- name: Setup Node
uses: actions/setup-node@v3
with:
node-version: ${{ env.NODE_VERSION }}
- name: Setup Python
uses: actions/setup-python@v4
with:
python-version: ${{ env.PYTHON_VERSION }}
- name: Setup Git Submodule
run: |
git submodule init
git submodule update
- name: Check Pulsar Version
if: ${{ runner.os != 'Windows' }}
run: sed -i -e "s/[0-9]*-dev/`date -u +%Y%m%d%H`/g" package.json
- name: Check Pulsar Version - Windows
if: ${{ runner.os == 'Windows' }}
run: (Get-Content package.json) -replace '[0-9]*-dev', (date -u +%Y%m%d%H) | Set-Content -Path package.json
- name: Install Pulsar Dependencies
uses: nick-fields/retry@943e742917ac94714d2f408a0e8320f2d1fcafcd
with:
timeout_minutes: 30
max_attempts: 3
retry_on: error
command: yarn install --ignore-engines
on_retry_command: rm -R node_modules
- name: Build Pulsar
uses: nick-fields/retry@943e742917ac94714d2f408a0e8320f2d1fcafcd
with:
timeout_minutes: 30
max_attempts: 3
retry_on: error
command: |
yarn build
yarn run build:apm
# macOS Signing Stuff
- name: Build Pulsar Binaries (macOS)
if: ${{ runner.os == 'macOS' }}
env:
CSC_LINK: ${{ secrets.CSC_LINK }}
CSC_KEY_PASSWORD: ${{ secrets.CSC_KEY_PASSWORD }}
APPLEID: ${{ secrets.APPLEID }}
APPLEID_PASSWORD: ${{ secrets.APPLEID_PASSWORD }}
TEAM_ID: ${{ secrets.TEAM_ID }}
uses: nick-fields/retry@943e742917ac94714d2f408a0e8320f2d1fcafcd
with:
timeout_minutes: 30
max_attempts: 3
retry_on: error
command: yarn dist
- name: Build Pulsar Binaries
if: ${{ runner.os != 'macOS' }}
uses: nick-fields/retry@943e742917ac94714d2f408a0e8320f2d1fcafcd
with:
timeout_minutes: 30
max_attempts: 3
retry_on: error
command: yarn dist
- name: Upload Binary Artifacts
uses: actions/upload-artifact@v3
with:
name: ${{ matrix.os }} Binaries
path: ./binaries/*
- name: Test Binary - Linux
if: ${{ (runner.os == 'Linux') && env.RUN_LINUX_VT }}
run: |
rm -R node_modules/electron; yarn install --check-files
./binaries/*AppImage --appimage-extract
export BINARY_NAME='squashfs-root/pulsar'
mkdir -p ./tests/videos
Xvfb -screen 0 1024x768x24+32 :99 & nohup ffmpeg -video_size 1024x768 -f x11grab -i :99.0 ./tests/videos/out.mpg & DISPLAY=:99 PLAYWRIGHT_JUNIT_OUTPUT_NAME=report.xml npx playwright test --reporter=junit,list
- name: Test Binary - Windows
if: runner.os == 'Windows' && env.RUN_WINDOWS_VT == true
# TODO: Convert script to PowerShell
run: |
mkdir extracted; tar -xf binaries/*zip -C ./extracted/
export BINARY_NAME=./extracted/Pulsar.exe
PLAYWRIGHT_JUNIT_OUTPUT_NAME=report.xml npx playwright test --reporter=junit,list || echo "Yeah, tests failed, Windows is like this"
- name: Test Binary - macOS
if: runner.os == 'macOS' && env.RUN_MACOS_VT == true
run: |
export PATH="/usr/local/opt/node@16/bin:/usr/local/bin:$PATH"
rm -R node_modules/electron; yarn install --check-files
hdiutil mount binaries/Pulsar*dmg
export BINARY_NAME=`ls /Volumes/Pulsar*/Pulsar.app/Contents/MacOS/Pulsar`
PLAYWRIGHT_JUNIT_OUTPUT_NAME=report.xml arch -x86_64 npx playwright test --reporter=junit,list
- name: Add binaries to Rolling Release Repo
if: ${{ github.event_name == 'push' }}
# We only want to upload rolling binaries if they are a commit to master
# Otherwise we want to not upload if it's a PR or manually triggered build
run: |
cd ./script/rolling-release-scripts
npm install
node ./rolling-release-binary-upload.js
- name: Upload Video Artifacts
uses: actions/upload-artifact@v3
with:
name: ${{ matrix.os }} Videos
path: ./tests/videos/**

View File

@ -15,10 +15,10 @@ jobs:
runs-on: ubuntu-20.04
steps:
- name: Checkout the latest code
uses: actions/checkout@v2
uses: actions/checkout@v3
- name: Setup node
uses: actions/setup-node@v2-beta
uses: actions/setup-node@v3
with:
node-version: 16
@ -155,7 +155,7 @@ jobs:
steps:
- name: Checkout the latest code
uses: actions/checkout@v2
uses: actions/checkout@v3
- name: Setup NodeJS
uses: actions/setup-node@v3
with:

View File

@ -6,11 +6,39 @@
## [Unreleased]
## 1.108.0
- Restored ability for `less` files in packages to use inline JavaScript inside backticks.
- Fixed a syntax highlighting issue inside the `styleguide` package.
- Fixed an issue with rubygems timing out on ARM Linux workflow.
- Rewrote Tree-sitter scope predicates to use `#is?` and `#is-not?` where applicable.
- Ensure that project-specific setting overrides don't leak to the user's config file when the settings UI is visited.
- Added a feature in `markdown-preview` that adds support for Linguist, Chroma, Rouge, and HighlightJS for language identifiers in fenced code blocks.
- Fixed the `TextMate` `language-toml` grammar to properly support whitespace where-ever it may appear.
- Added a Tree-Sitter grammar for YAML files.
- Added a new core package `pulsar-updater` to help users update Pulsar.
- Added `ppm` and `ppm.cmd` binaries/launchers within ppm. This allows easier integration of correctly named binaries on more systems in more contexts (especially Windows). Existing `apm` and `apm.cmd` binaries/launchers are still there for the time being.
- Added a modern Tree-Sitter grammar for Markdown files.
### Pulsar
Bumped: ppm: Update submodule to 49c8ced8f9552bb4aeb279130 [@DeeDeeG](https://github.com/pulsar-edit/pulsar/pull/654)
- Added: Add the Tree-Sitter Markdown grammar [@savetheclocktower](https://github.com/pulsar-edit/pulsar/pull/659)
- Fixed: [pulsar-updater] Correct deb-get instructions ( + readme change) [@Daeraxa](https://github.com/pulsar-edit/pulsar/pull/669)
- Added: Tree-sitter running fixes [@savetheclocktower](https://github.com/pulsar-edit/pulsar/pull/660)
- Added: Add `pulsar-updater` as a core bundled Package [@confused-Techie](https://github.com/pulsar-edit/pulsar/pull/656)
- Added: Manual Decaf Bundle (`autocomplete-atom-api`, `autoflow`, `deprecation-cop`) Source [@confused-Techie](https://github.com/pulsar-edit/pulsar/pull/664)
- Bumped: [Time Sensitive] Update Cirrus Encrypted token for GitHub Access [@confused-Techie](https://github.com/pulsar-edit/pulsar/pull/666)
- Added: [core]: Transforming Deprecated Math Usage - Support for Variables [@confused-Techie](https://github.com/pulsar-edit/pulsar/pull/653)
- Added: Add Tree-sitter grammar for YAML [@savetheclocktower](https://github.com/pulsar-edit/pulsar/pull/634)
- Fixed: [language-toml] Add whitespace rule to values [@arite](https://github.com/pulsar-edit/pulsar/pull/646)
- Added: [markdown-preview]: Support for nested table objects in Yaml Frontmatter [@confused-Techie](https://github.com/pulsar-edit/pulsar/pull/629)
- Added: [markdown-preview]: Revamp Fenced Code Block Language Identifiers [@confused-Techie](https://github.com/pulsar-edit/pulsar/pull/622)
- Bumped: ppm: Update submodule to 49c8ced8f9552bb4aeb279130 [@DeeDeeG](https://github.com/pulsar-edit/pulsar/pull/654)
- Fixed: [settings-view] Don't let project-specific settings pollute the UI [@savetheclocktower](https://github.com/pulsar-edit/pulsar/pull/655)
- Added: [modern-tree-sitter] Overhaul Tree-sitter scope tests [@savetheclocktower](https://github.com/pulsar-edit/pulsar/pull/652)
- Fixed: fix(arm): use rubygems from APT [@cat-master21](https://github.com/pulsar-edit/pulsar/pull/651)
- Added: [language-*]: Manual Spec Decaf (Part 1) [@confused-Techie](https://github.com/pulsar-edit/pulsar/pull/632)
- Fixed: [styleguide] Fix error when styleguide is shown... [@savetheclocktower](https://github.com/pulsar-edit/pulsar/pull/648)
- Bumped: Bump `less-cache` to 2.0.1 [@savetheclocktower](https://github.com/pulsar-edit/pulsar/pull/644)
### ppm

View File

@ -1,14 +1,18 @@
# Atom Docs
# Pulsar Docs
![Atom](https://cloud.githubusercontent.com/assets/72919/2874231/3af1db48-d3dd-11e3-98dc-6066f8bc766f.png)
Most of the Atom user and developer documentation is contained in the [Atom Flight Manual](https://github.com/atom/flight-manual.atom.io).
Most of the Pulsar/Atom user and developer documentation is contained on the [Pulsar Website](https://pulsar-edit.dev/docs/launch-manual/).
While the Pulsar website does not yet have the Pulsar API documentation, this is partially available within [Pulsar API Documentation](./Pulsar-API-Documentation.md) or otherwise the original docs are available from community members [here](https://atom-flight-manual-archive.github.io/).
There is also general guidance on the internal [stucture and behavior](./architecture/README.md) of Pulsar available.
## Build documentation
Instructions for building Atom on various platforms from source.
* Moved to [the Flight Manual](https://flight-manual.atom.io/hacking-atom/sections/hacking-on-atom-core/)
* Moved to [the Flight Manual](https://pulsar-edit.dev/docs/launch-manual/sections/core-hacking/)
* Linux
* macOS
* Windows
@ -16,5 +20,3 @@ Instructions for building Atom on various platforms from source.
## Other documentation
[Native Profiling on macOS](./native-profiling.md)
The other documentation that was listed here previously has been moved to [the Flight Manual](https://flight-manual.atom.io).

View File

@ -0,0 +1,10 @@
# Pulsar Architecture
This directory contains a collection of files and diagrams, that aim to easily explain some of the core components or behaviors of Pulsar.
Remember that keeping this charts up to date is best effort, and the age of these files should be taken into consideration.
## Contents
- [Startup Overview](./overview.md)
- ['The World'](./the-world.md)
- [Package Preload](./package-preload.md)

File diff suppressed because one or more lines are too long

After

Width:  |  Height:  |  Size: 30 KiB

File diff suppressed because one or more lines are too long

After

Width:  |  Height:  |  Size: 30 KiB

File diff suppressed because one or more lines are too long

After

Width:  |  Height:  |  Size: 18 KiB

View File

@ -0,0 +1,169 @@
# Pulsar's High Level Startup Overview
It's no secret that Pulsar since inherited from Atom, is a big and complex application.
With many discrete, moving aspects, that not all developers have a concrete grasp on.
The goal of this document is to make the architecture of Pulsar, as well as the logical flow
more understandable and approachable.
This will be accomplished through a series of illustrations detailing the functional decomposition and detailed logical flow of Pulsar and it's parts, as well as lists of what's accomplished in each part.
This document is aimed at those roughly familiar with the large scale goals and features of Pulsar, as well as those with a basic understanding of the package model used to power much of Pulsar's functionality.
![Pulsar Overview MermaidJS Image](./assets/pulsar-overview.svg "Pulsar Overview")
<details>
<summary>
MermaidJS to create image above
</summary>
```
flowchart TD
id1("`Initialization
*./src/main-process/main.js*`") --> id2("`Startup
*./src/main-process/start.js*`")
id2 --> id3("`Main Process Tests`")
id2 --> id4("`Application Startup
*./src/main-process/atom-application.js*`")
id2 --> id5("`Startup w/ Squirrel
*./src/main-process/squirrel-update.js*`")
id4 --> id6("`Test Runner
*./src/initialize-test-window.js*`")
id4 --> id7("`Initialize Application Window
*./src/initialize-application-window.js*`")
id7 --> id9("`'The World'
*./src/atom-environment.js*`")
id7 --> id10("`ApplicationDelegate
*./src/application-delegate.js*`")
id7 --> id8("`Clipboard
*./src/clipboard.js*`")
id8 --> id9
id10 --> id9
```
</details>
---
To further outline what occurs in the steps above:
## Initialization
Startup of Pulsar occurs within `./src/main-process/main.js`.
Which Determines:
- `resourcePath`
- `devResourcePath`
- `stableResourcePath`
- `defaultRepositoryPath`
Which Sets:
- Application Start Time
Which Does:
- Initializes Startup of `./src/main-process/start.js`
## Startup
The more general startup handling of Pulsar occurs within `./src/main-process/start.js`.
Which Sets:
- Shell Start Time
- `app.allowRendererProcessReuse`: `false`
- `app.commandLine.appendSwitch('enable-experimental-web-platform-features')`
- `app.commandLine.appendSwitch('force-color-profile', config.get('core.colorProfile'))`
- `app.setAppUserModelId()`
- `app.on('open-file', $)`
- `app.on('open-url', $)`
Which Does:
- Normalizes the `resourcePath` and `devResourcePath`
- Uses `Config` to locate and read the config file
- `atomPaths.setAtomHome()`
- `atomPaths.setUserData()`
- May defer to `./src/main-process/squirrel-update.js` to startup if on Windows
- May defer to `./spec/main-process/mocha-test-runner.js` to startup main process tests
- May call `.open()` on `./src/main-process/atom-application.js`
## Application Startup
The proper startup of the Pulsar Application occurs within `./src/main-process/atom-application.js`.
Which Sets:
- `APPLICATION_STATE_VERSION`
- Global `atomApplication`
Which Does:
- Does setup of the application socket
- Handles deprecated benchmark startup
- Ensures to return a new instance of `AtomApplication`
- Registers basic application commands
- Initializes:
* `ApplicationMenu`
* `AtomProtocolHandler`
* `WindowStack`
* `FileRecoveryService`
* `Config`
* `StorageFolder`
* `AutoUpdateManager`
- May startup the package test runner
- May quit if asked to startup in benchmark mode
- May open previously opened files/folders
- May open new instance of Pulsar
## Initialize Application Window
Begins initialization of an individual Pulsar window, occurs within `./src/initialize-application-window.js`.
Which Determines:
Which Sets:
- Sets the `global.atom` to a new instance of `AtomEnvironment`
Which Does:
- triggers `.preloadPackages()`
- Initializes:
* Clipboard
* AtomEnvironment
* ApplicationDelegate
## 'The World'
'The World' refers to being within the Pulsar application, most of the application occurs within here.
This code lives within `./src/atom-environment.js`.
An important note about being initialized within the world, there is no access to the `atom`
global, until the initial constructor completes processing. Meaning great care must be taken
to ensure if `atom` is available within the initialized modules.
Which Sets:
- `AtomEnvironment.version`: `1` | Possibly a reference to `APPLICATION_STATE_VERSION`?
- `AtomEnvironment.saveStateDebounceInterval`: `1000`
Which Does:
- Initializes:
* Clipboard | Inherited from 'Initialize Application Window'
* ApplicationDelegate | Inherited from 'Initialize Application Window'
* DeserializerManager
* ViewRegistry
* NotificationManager
* StateStore
* Config
* KeymapManager
* TooltipManager
* CommandRegistry
* URIHandlerRegistry
* GrammarRegistry
* StyleManager
* PackageManager
* ThemeManager
* MenuManager
* ContextMenuManager
* Project
* CommandInstaller
* ProtocolHandlerInstaller
* TextEditorRegistry
* Workspace
* AutoUpdateManager
* WindowEventHandler
* HistoryManager

View File

@ -0,0 +1,66 @@
# Package Preload
Pulsar's packages are preloaded, very early on within the startup cycle of Pulsar.
As it's called immediatly after the `atom` global is initialized, it's important to understand what steps occur during preloading, and what package's are affected.
![Package Preload Overview](./assets/package-preload.svg "Package Preload Overview")
---
<details>
<summary>
MermaidJS to create image above
</summary>
```
flowchart TD
iaw["`
initialize-application-window.js
Called right after global 'atom' is set
`"] -->
ae["`
AtomEnvironment
.preloadPackages()
`"] -->
pl1["preloadPackages()"] -->|if in packageCache| sg1
subgraph sg1
direction LR
pl2["preloadPackage()"]
pl2 -->|"call .preload()"| tp1["new ThemePackage"]
pl2 -->|"call .preload()"| p1["new Package"]
p1 --> p1Pre["`
this.preload() call:
Does more than advertised here
`"]
p1Pre --> lk1[".loadKeymaps()"]
lk1 --> pcRead1["`Read from packagesCache:
If bundled package && in packagesCache`"]
lk1 --> fileRead1["`Read from file:
If !bundled || !in packagesCache`"]
p1Pre --> lm1[".loadMenus()"]
lm1 --> pcRead1
lm1 --> fileRead1
p1Pre --> acss[".activateCoreStartupServices()"]
p1Pre --> rmm[".requireMainModule()"]
acss --> rmm
p1Pre --> ls1[".loadSettings()"]
ls1 --> pcRead1
ls1 --> fileRead1
p1Pre --> ak1[".activateKeymaps()"]
p1Pre --> am1[".activateMenus()"]
tp1 --> tp1Pre[".preload()"]
tp1Pre --> csrol1[".reigsterConfigSchemaFromMetadata()"]
end
```
</details>

View File

@ -0,0 +1,101 @@
# 'The World'
While it's difficult to convey the full scope of how Pulsar works internally, just like the previous page [`overview.md`](./overview.md) detailed the general gist of how Pulsar starts up, this document provides a quick reference to how all the interal parts of Pulsar are connected.
This document is not at all comprehensive, and must ensure to be continually updated. Additionally, this image does not track outside dependency usage, nor dependence on every single internal module. Focusing mostly on modules that are either required during initialization, or are referenced during the constructor of their respective class.
<details>
<summary>
Details on the creation of this image
</summary>
This image has been created with Plant UML. A Live editor is available [here](https://www.plantuml.com/plantuml/uml).
The code used to create this image:
```uml
@startwbs
* Initialization
** Startup
***< Main Process Tests
*** Startup w/ Squirrel
***< Application Startup
****< Test Runner
**** Initialize Application Window
***** 'The World'
******< Config
******* ScopeDescriptor
****** KemapManager
****** TolltipManager
******* Tooltip
******< CommandRegistry
******< URIHandlerRegistry
****** StyleManager
******* createStylesElement
******* DEPRECATED_SYNTAX_SELECTORS
******< MenuManager
******* MenuHelpers
****** ContextMenuManager
******* MenuHelpers
******* sortMenuItems
******< TextEditorRegistry
******* TextEditor
******* ScopeDescriptor
****** HistoryManager
******< DeserializerManager
******< ViewRegistry
****** NotificationManager
******* Notification
****** StateStore
******< PackageManager
******* Package
******* ThemePackage
******* ModuleCache
****** ThemeManager
******* LessCompileCache
****** Project
******* watchPath
******* DefaultDirectoryProvider
******* Model
******* GitRepositoryProvider
******< CommandInstaller
******< ProtocolHandlerInstaller
******< AutoUpdateManager
******< WindowEventHandler
******* listen
******< GrammarRegistry
*******< NodeTreeSitterLanguageMode
******** TreeIndenter
********< TextMateLanguageMode
******** TokenizedLine
********< ScopeDescriptor
******** matcherForSelector
********< Token
******* WASMTreeSitterLanguageNode
******** Parser
******* TextMateLanguageMode
******** TokenizedLine
********< TokenIterator
******** ScopeDescriptor
********< NullGrammar
*******< ScopeDescriptor
******* Token
****** Workspace
******* DefaultDirectorySearcher
*******< RipgrepDirectorySearcher
******* WorkspaceCenter
*******< createWorkspaceElement
******* PanelContainer
*******< StateStore
******* TextEditor
*******< Panel
******* Task
*******< Dock
@endwbs
```
</details>
---
![Pulsar 'How Everything Connects' UML Image](./assets/how-everything-connects.svg "Pulsar 'How Everything Connects'")

View File

@ -0,0 +1,81 @@
---
status: accepted
date: 2023-08-10
deciders: '@confused-Techie, @Daeraxa, @savetheclocktower, @mauricioszabo'
---
# Remove original AutoUpdate Functionality
## Context and Problem Statement
To provide users with some form of autoupdate functionality, many thought we could
adopt the original method used by the Atom team, and de-facto method within Electron
applications, Squirrel. While Squirrel is great for AutoUpdating, it has some big
issues, mainly it requires that every platform has signed binaries, which can be
prohibitively expensive. Additionally, it only supports Windows and macOS.
## Decision Drivers
* Users need a way to fulfill AutoUpdate functionality
## Considered Options
* Sign Windows and macOS (Already do) binaries, and setup Squirrel.
* Remove AutoUpdate totally.
* Use core package to assist in update installation.
## Decision Outcome
Chosen option: "Use core package to assist in update installation", to allow similar
behavior, we opted to create a core package that could help alert users of updates
to Pulsar being available, while technically not actually preforming any installation.
This new core package was added to Pulsar in [`pulsar-edit/pulsar#656`](https://github.com/pulsar-edit/pulsar/pull/656).
This package alerts and assists in users installing new updates to Pulsar, while no
longer relying on any form of AutoUpdate functionality from Squirrel.
This means, that we can now remove all Squirrel and AutoUpdate functionality that's
built right into Pulsar, hopefully cutting down on startup time.
<!-- This is an optional element. Feel free to remove. -->
### Consequences
* Good, because this allows a semblence of AutoUpdate functionality without ever having to hit our own backend.
* Good, because it allows users to be more in control, and automatically notified of new versions.
* Good, because it sections off the logic to a package that can be disabled or replaced as needed.
* Bad, because it does not actually preform any autoupdates.
## Pros and Cons of the Options
### Sign Windows and macOS binaries, and setup Squirrel
This would return things to the status quo, of Atom AutoUpdate functionality.
* Good, because users would know what to expect.
* Good, because it would provide AutoUpdates to Windows and macOS users.
* Bad, because it would be prohibitively expensive, and would fail to work if we ever
didn't have the funds for expensive binary signing costs.
* Bad, because this would leave Linux users with still zero support for AutoUpdates.
* Bad, because it would add additional complexity into our CI for signing Windows binaries.
### Remove AutoUpdates totally
This is essentially, what we have been doing since we took over Atom. Provided zero methodology
for users to preform autoupdates within Pulsar.
* Good, because it requires zero effort on our end.
* Good, because there are no signing costs.
* Bad, because it provides users no method to easily update.
* Bad, because users would be misinformed about Atom's ability to autoupdate being lost.
* Bad, because autoupdates of some kind is an expected and standard feature in any modern application.
## More Information
This decision is not one taken lightly, and could still stir some controversy on best implementations.
There was also additional concerns about not deleting the code used for Squirrel's AutoUpdate functionality
in case we ever did want to return to that behavior, since it already works perfectly, if properly setup.
For that reason, instead of keeping the code within the repo, below will be details about where the code that comprises
of the Squirrel AutoUpdate logic will be kept within Git, so that it can always be retrieved if ever needed.
* Last Commit Before Removal: `bf60fbe6fc267b737a70d5d39c03cad1629ea128`
* PR Where it was Removed: [`pulsar-edit/pulsar#668`](https://github.com/pulsar-edit/pulsar/pull/668)

View File

@ -2,7 +2,7 @@
"name": "pulsar",
"author": "Pulsar-Edit <admin@pulsar-edit.dev>",
"productName": "Pulsar",
"version": "1.107.1-dev",
"version": "1.108.0-dev",
"description": "A Community-led Hyper-Hackable Text Editor",
"branding": {
"id": "pulsar",

View File

@ -31,8 +31,8 @@ See [RFC 003](https://github.com/atom/atom/blob/master/docs/rfcs/003-consolidate
| **dev-live-reload** | [`./dev-live-reload`](./dev-live-reload) | |
| **encoding-selector** | [`./encoding-selector`](./encoding-selector) | |
| **exception-reporting** | [`./exception-reporting`](./exception-reporting) | |
| **find-and-replace** | [`./find-and-replace`][find-and-replace] | |
| **fuzzy-finder** | [`./fuzzy-finder`][fuzzy-finder] | |
| **find-and-replace** | [`./find-and-replace`](./find-and-replace) | |
| **fuzzy-finder** | [`./fuzzy-finder`](./fuzzy-finder) | |
| **github** | [`pulsar-edit/github`][github] | |
| **git-diff** | [`./git-diff`](./git-diff) | |
| **go-to-line** | [`./go-to-line`](./go-to-line) | |

View File

@ -36,7 +36,6 @@ module.exports = class About {
if (this.views.aboutView) this.views.aboutView.destroy();
this.views.aboutView = null;
if (this.state.updateManager) this.state.updateManager.dispose();
this.setState({ updateManager: null });
this.subscriptions.dispose();
@ -69,8 +68,7 @@ module.exports = class About {
currentAtomVersion: this.state.currentAtomVersion,
currentElectronVersion: this.state.currentElectronVersion,
currentChromeVersion: this.state.currentChromeVersion,
currentNodeVersion: this.state.currentNodeVersion,
availableVersion: this.state.updateManager.getAvailableVersion()
currentNodeVersion: this.state.currentNodeVersion
});
this.handleStateChanges();
}
@ -86,14 +84,9 @@ module.exports = class About {
currentAtomVersion: this.state.currentAtomVersion,
currentElectronVersion: this.state.currentElectronVersion,
currentChromeVersion: this.state.currentChromeVersion,
currentNodeVersion: this.state.currentNodeVersion,
availableVersion: this.state.updateManager.getAvailableVersion()
currentNodeVersion: this.state.currentNodeVersion
});
}
});
this.state.updateManager.onDidChange(() => {
this.didChange();
});
}
};

View File

@ -1,38 +0,0 @@
const { CompositeDisposable } = require('atom');
const etch = require('etch');
const EtchComponent = require('../etch-component');
const $ = etch.dom;
module.exports = class AboutStatusBar extends EtchComponent {
constructor() {
super();
this.subscriptions = new CompositeDisposable();
this.subscriptions.add(
atom.tooltips.add(this.element, {
title:
'An update will be installed the next time Pulsar is relaunched.<br/><br/>Click the squirrel icon for more information.'
})
);
}
handleClick() {
atom.workspace.open('atom://about');
}
render() {
return $.div(
{
className: 'about-release-notes inline-block',
onclick: this.handleClick.bind(this)
},
$.span({ type: 'button', className: 'icon icon-squirrel' })
);
}
destroy() {
super.destroy();
this.subscriptions.dispose();
}
};

View File

@ -3,7 +3,6 @@ const etch = require('etch');
const { shell } = require('electron');
const AtomLogo = require('./atom-logo');
const EtchComponent = require('../etch-component');
const UpdateView = require('./update-view');
const $ = etch.dom;
@ -31,7 +30,7 @@ module.exports = class AboutView extends EtchComponent {
handleReleaseNotesClick(e) {
e.preventDefault();
shell.openExternal(
this.props.updateManager.getReleaseNotesURLForAvailableVersion() //update-manager.js will need updating when we decide how to do the changelog
this.props.updateManager.getReleaseNotesURLForCurrentVersion()
);
}
@ -52,7 +51,15 @@ module.exports = class AboutView extends EtchComponent {
handleHowToUpdateClick(e) {
e.preventDefault();
shell.openExternal(
'https://pulsar-edit.dev/docs/launch-manual/sections/getting-started/#installing-pulsar'
'https://github.com/pulsar-edit/pulsar/tree/master/packages/pulsar-updater#readme'
);
}
executeUpdateAction(e) {
e.preventDefault();
atom.commands.dispatch(
atom.views.getView(atom.workspace),
'pulsar-updater:check-for-update'
);
}
@ -160,12 +167,31 @@ module.exports = class AboutView extends EtchComponent {
)
),
$(UpdateView, {
updateManager: this.props.updateManager,
availableVersion: this.props.availableVersion,
viewUpdateReleaseNotes: this.handleReleaseNotesClick.bind(this),
viewUpdateInstructions: this.handleHowToUpdateClick.bind(this)
}),
$.div(
{ className: 'about-updates group-start' },
$.div(
{ className: 'about-updates-box' },
$.div(
{ className: 'about-updates-status' },
$.div(
{ className: 'about-updates-item app-unsupported' },
$.span(
{ className: 'about-updates-label is-strong' },
'Updates have been moved to the package ', $.code({}, 'pulsar-updater'), '.',
$.br()
),
$.a(
{
className: 'about-updates-instructions',
onclick: this.handleHowToUpdateClick.bind(this)
},
'How to update'
)
)
),
this.renderUpdateChecker()
)
),
$.div(
{ className: 'about-actions group-item' },
@ -201,6 +227,29 @@ module.exports = class AboutView extends EtchComponent {
);
}
renderUpdateChecker() {
if (atom.packages.isPackageDisabled("pulsar-updater")) {
return $.div(
{ className: 'about-updates-item app-unsupported' },
$.span(
{ className: 'about-updates-label is-strong' },
'Enable `pulsar-updater` to check for updates'
)
);
} else {
return $.button(
{
className: 'btn about-update-action-button',
onclick: this.executeUpdateAction.bind(this),
style: {
display: 'block'
}
},
'Check Now'
);
}
}
serialize() {
return {
deserializer: this.constructor.name,

View File

@ -1,181 +0,0 @@
const etch = require('etch');
const EtchComponent = require('../etch-component');
const UpdateManager = require('../update-manager');
const $ = etch.dom;
module.exports = class UpdateView extends EtchComponent {
constructor(props) {
super(props);
if (
this.props.updateManager.getAutoUpdatesEnabled() &&
this.props.updateManager.getState() === UpdateManager.State.Idle
) {
this.props.updateManager.checkForUpdate();
}
}
handleAutoUpdateCheckbox(e) {
atom.config.set('core.automaticallyUpdate', e.target.checked);
}
shouldUpdateActionButtonBeDisabled() {
let { state } = this.props.updateManager;
return (
state === UpdateManager.State.CheckingForUpdate ||
state === UpdateManager.State.DownloadingUpdate
);
}
executeUpdateAction() {
if (
this.props.updateManager.state ===
UpdateManager.State.UpdateAvailableToInstall
) {
this.props.updateManager.restartAndInstallUpdate();
} else {
this.props.updateManager.checkForUpdate();
}
}
renderUpdateStatus() {
let updateStatus = '';
switch (this.props.updateManager.state) {
case UpdateManager.State.Idle:
updateStatus = $.div(
{
className:
'about-updates-item is-shown about-default-update-message'
},
this.props.updateManager.getAutoUpdatesEnabled()
? 'Pulsar will check for updates automatically'
: 'Automatic updates are disabled please check manually'
);
break;
case UpdateManager.State.CheckingForUpdate:
updateStatus = $.div(
{ className: 'about-updates-item app-checking-for-updates' },
$.span(
{ className: 'about-updates-label icon icon-search' },
'Checking for updates...'
)
);
break;
case UpdateManager.State.DownloadingUpdate:
updateStatus = $.div(
{ className: 'about-updates-item app-downloading-update' },
$.span({ className: 'loading loading-spinner-tiny inline-block' }),
$.span({ className: 'about-updates-label' }, 'Downloading update')
);
break;
case UpdateManager.State.UpdateAvailableToInstall:
updateStatus = $.div(
{ className: 'about-updates-item app-update-available-to-install' },
$.span(
{ className: 'about-updates-label icon icon-squirrel' },
'New update'
),
$.span(
{ className: 'about-updates-version' },
this.props.availableVersion
),
$.a(
{
className: 'about-updates-release-notes',
onclick: this.props.viewUpdateReleaseNotes
},
'Release Notes'
)
);
break;
case UpdateManager.State.UpToDate:
updateStatus = $.div(
{ className: 'about-updates-item app-up-to-date' },
$.span({ className: 'icon icon-check' }),
$.span(
{ className: 'about-updates-label is-strong' },
'Pulsar is up to date!'
)
);
break;
case UpdateManager.State.Unsupported:
updateStatus = $.div(
{ className: 'about-updates-item app-unsupported' },
$.span(
{ className: 'about-updates-label is-strong' },
'Your system does not support automatic updates'
),
$.a(
{
className: 'about-updates-instructions',
onclick: this.props.viewUpdateInstructions
},
'How to update'
)
);
break;
case UpdateManager.State.Error:
updateStatus = $.div(
{ className: 'about-updates-item app-update-error' },
$.span({ className: 'icon icon-x' }),
$.span(
{ className: 'about-updates-label app-error-message is-strong' },
this.props.updateManager.getErrorMessage()
)
);
break;
}
return updateStatus;
}
render() {
return $.div(
{ className: 'about-updates group-start' },
$.div(
{ className: 'about-updates-box' },
$.div({ className: 'about-updates-status' }, this.renderUpdateStatus()),
$.button(
{
className: 'btn about-update-action-button',
disabled: this.shouldUpdateActionButtonBeDisabled(),
onclick: this.executeUpdateAction.bind(this),
style: {
display:
this.props.updateManager.state ===
UpdateManager.State.Unsupported
? 'none'
: 'block'
}
},
this.props.updateManager.state === 'update-available'
? 'Restart and install'
: 'Check now'
)
),
$.div(
{
className: 'about-auto-updates',
style: {
display:
this.props.updateManager.state === UpdateManager.State.Unsupported
? 'none'
: 'block'
}
},
$.label(
{},
$.input({
className: 'input-checkbox',
type: 'checkbox',
checked: this.props.updateManager.getAutoUpdatesEnabled(),
onchange: this.handleAutoUpdateCheckbox.bind(this)
}),
$.span({}, 'Automatically download updates')
)
)
);
}
};

View File

@ -1,69 +1,24 @@
const { CompositeDisposable } = require('atom');
const semver = require('semver');
const UpdateManager = require('./update-manager');
const About = require('./about');
const StatusBarView = require('./components/about-status-bar');
let updateManager;
// The local storage key for the available update version.
const AvailableUpdateVersion = 'about:version-available';
const AboutURI = 'atom://about';
module.exports = {
activate() {
this.subscriptions = new CompositeDisposable();
this.createModel();
let availableVersion = window.localStorage.getItem(AvailableUpdateVersion);
if (
atom.getReleaseChannel() === 'dev' ||
(availableVersion && semver.lte(availableVersion, atom.getVersion()))
) {
this.clearUpdateState();
}
this.subscriptions.add(
updateManager.onDidChange(() => {
if (
updateManager.getState() ===
UpdateManager.State.UpdateAvailableToInstall
) {
window.localStorage.setItem(
AvailableUpdateVersion,
updateManager.getAvailableVersion()
);
this.showStatusBarIfNeeded();
}
})
);
this.subscriptions.add(
atom.commands.add('atom-workspace', 'about:clear-update-state', () => {
this.clearUpdateState();
})
);
},
deactivate() {
this.model.destroy();
if (this.statusBarTile) this.statusBarTile.destroy();
if (updateManager) {
updateManager.dispose();
updateManager = undefined;
}
},
clearUpdateState() {
window.localStorage.removeItem(AvailableUpdateVersion);
},
consumeStatusBar(statusBar) {
this.statusBar = statusBar;
this.showStatusBarIfNeeded();
},
deserializeAboutView(state) {
if (!this.model) {
this.createModel();
@ -83,27 +38,6 @@ module.exports = {
currentNodeVersion: process.version,
updateManager: updateManager
});
},
isUpdateAvailable() {
let availableVersion = window.localStorage.getItem(AvailableUpdateVersion);
return availableVersion && semver.gt(availableVersion, atom.getVersion());
},
showStatusBarIfNeeded() {
if (this.isUpdateAvailable() && this.statusBar) {
let statusBarView = new StatusBarView();
if (this.statusBarTile) {
this.statusBarTile.destroy();
}
this.statusBarTile = this.statusBar.addRightTile({
item: statusBarView,
priority: -100
});
return this.statusBarTile;
}
}
};

View File

@ -1,127 +1,13 @@
const { Emitter, CompositeDisposable } = require('atom');
const Unsupported = 'unsupported';
const Idle = 'idle';
const CheckingForUpdate = 'checking';
const DownloadingUpdate = 'downloading';
const UpdateAvailableToInstall = 'update-available';
const UpToDate = 'no-update-available';
const ErrorState = 'error';
let UpdateManager = class UpdateManager {
constructor() {
this.emitter = new Emitter();
this.currentVersion = atom.getVersion();
this.availableVersion = atom.getVersion();
this.resetState();
this.listenForAtomEvents();
}
listenForAtomEvents() {
this.subscriptions = new CompositeDisposable();
this.subscriptions.add(
atom.autoUpdater.onDidBeginCheckingForUpdate(() => {
this.setState(CheckingForUpdate);
}),
atom.autoUpdater.onDidBeginDownloadingUpdate(() => {
this.setState(DownloadingUpdate);
}),
atom.autoUpdater.onDidCompleteDownloadingUpdate(({ releaseVersion }) => {
this.setAvailableVersion(releaseVersion);
}),
atom.autoUpdater.onUpdateNotAvailable(() => {
this.setState(UpToDate);
}),
atom.autoUpdater.onUpdateError(() => {
this.setState(ErrorState);
}),
atom.config.observe('core.automaticallyUpdate', value => {
this.autoUpdatesEnabled = value;
this.emitDidChange();
})
);
// TODO: When https://github.com/atom/electron/issues/4587 is closed we can add this support.
// atom.autoUpdater.onUpdateAvailable =>
// @find('.about-updates-item').removeClass('is-shown')
// @updateAvailable.addClass('is-shown')
}
dispose() {
this.subscriptions.dispose();
}
onDidChange(callback) {
return this.emitter.on('did-change', callback);
}
emitDidChange() {
this.emitter.emit('did-change');
}
getAutoUpdatesEnabled() {
return (
this.autoUpdatesEnabled && this.state !== UpdateManager.State.Unsupported
);
}
setAutoUpdatesEnabled(enabled) {
return atom.config.set('core.automaticallyUpdate', enabled);
}
getErrorMessage() {
return atom.autoUpdater.getErrorMessage();
}
getState() {
return this.state;
}
setState(state) {
this.state = state;
this.emitDidChange();
}
resetState() {
this.state = atom.autoUpdater.platformSupportsUpdates()
? atom.autoUpdater.getState()
: Unsupported;
this.emitDidChange();
}
getAvailableVersion() {
return this.availableVersion;
}
setAvailableVersion(version) {
this.availableVersion = version;
if (this.availableVersion !== this.currentVersion) {
this.state = UpdateAvailableToInstall;
} else {
this.state = UpToDate;
}
this.emitDidChange();
}
checkForUpdate() {
atom.autoUpdater.checkForUpdate();
}
restartAndInstallUpdate() {
atom.autoUpdater.restartAndInstallUpdate();
}
getReleaseNotesURLForCurrentVersion() {
return this.getReleaseNotesURLForVersion(this.currentVersion);
}
getReleaseNotesURLForAvailableVersion() {
return this.getReleaseNotesURLForVersion(this.availableVersion);
}
getReleaseNotesURLForVersion(appVersion) {
// Dev versions will not have a releases page
if (appVersion.indexOf('dev') > -1) {
@ -138,14 +24,4 @@ let UpdateManager = class UpdateManager {
}
};
UpdateManager.State = {
Unsupported: Unsupported,
Idle: Idle,
CheckingForUpdate: CheckingForUpdate,
DownloadingUpdate: DownloadingUpdate,
UpdateAvailableToInstall: UpdateAvailableToInstall,
UpToDate: UpToDate,
Error: ErrorState
};
module.exports = UpdateManager;

View File

@ -9,8 +9,7 @@
"version": "1.9.1",
"license": "MIT",
"dependencies": {
"etch": "^0.14.1",
"semver": "^7.5.2"
"etch": "^0.14.1"
},
"engines": {
"atom": ">=1.7 <2.0.0"
@ -20,36 +19,6 @@
"version": "0.14.1",
"resolved": "https://registry.npmjs.org/etch/-/etch-0.14.1.tgz",
"integrity": "sha512-+IwqSDBhaQFMUHJu4L/ir0dhDoW5IIihg4Z9lzsIxxne8V0PlSg0gnk2STaKWjGJQnDR4cxpA+a/dORX9kycTA=="
},
"node_modules/lru-cache": {
"version": "6.0.0",
"resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-6.0.0.tgz",
"integrity": "sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA==",
"dependencies": {
"yallist": "^4.0.0"
},
"engines": {
"node": ">=10"
}
},
"node_modules/semver": {
"version": "7.5.2",
"resolved": "https://registry.npmjs.org/semver/-/semver-7.5.2.tgz",
"integrity": "sha512-SoftuTROv/cRjCze/scjGyiDtcUyxw1rgYQSZY7XTmtR5hX+dm76iDbTH8TkLPHCQmlbQVSSbNZCPM2hb0knnQ==",
"dependencies": {
"lru-cache": "^6.0.0"
},
"bin": {
"semver": "bin/semver.js"
},
"engines": {
"node": ">=10"
}
},
"node_modules/yallist": {
"version": "4.0.0",
"resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz",
"integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A=="
}
}
}

View File

@ -10,15 +10,7 @@
"atom": ">=1.7 <2.0.0"
},
"dependencies": {
"etch": "^0.14.1",
"semver": "^7.3.8"
},
"consumedServices": {
"status-bar": {
"versions": {
"^1.0.0": "consumeStatusBar"
}
}
"etch": "^0.14.1"
},
"deserializers": {
"AboutView": "deserializeAboutView"

View File

@ -2,15 +2,6 @@ describe('About', () => {
let workspaceElement;
beforeEach(async () => {
let storage = {};
spyOn(window.localStorage, 'setItem').andCallFake((key, value) => {
storage[key] = value;
});
spyOn(window.localStorage, 'getItem').andCallFake(key => {
return storage[key];
});
workspaceElement = atom.views.getView(atom.workspace);
await atom.packages.activatePackage('about');
});
@ -100,4 +91,16 @@ describe('About', () => {
expect(atom.clipboard.read()).toBe(process.version);
});
});
describe('check for update appears', () => {
it('when "pulsar-updater" is enabled', async () => {
atom.packages.activatePackage('pulsar-updater');
await atom.workspace.open('atom://about');
jasmine.attachToDOM(workspaceElement);
let aboutElement = workspaceElement.querySelector('.about');
let updateContainer = aboutElement.querySelector('.about-update-action-button');
expect(updateContainer.innerText).toBe('Check Now');
});
});
});

View File

@ -1,184 +0,0 @@
const { conditionPromise } = require('./helpers/async-spec-helpers');
const MockUpdater = require('./mocks/updater');
describe('the status bar', () => {
let atomVersion;
let workspaceElement;
beforeEach(async () => {
let storage = {};
spyOn(window.localStorage, 'setItem').andCallFake((key, value) => {
storage[key] = value;
});
spyOn(window.localStorage, 'getItem').andCallFake(key => {
return storage[key];
});
spyOn(atom, 'getVersion').andCallFake(() => {
return atomVersion;
});
workspaceElement = atom.views.getView(atom.workspace);
await atom.packages.activatePackage('status-bar');
await atom.workspace.open('sample.js');
jasmine.attachToDOM(workspaceElement);
});
afterEach(async () => {
await atom.packages.deactivatePackage('about');
await atom.packages.deactivatePackage('status-bar');
});
describe('on a stable version', function() {
beforeEach(async () => {
atomVersion = '1.2.3';
await atom.packages.activatePackage('about');
});
describe('with no update', () => {
it('does not show the view', () => {
expect(workspaceElement).not.toContain('.about-release-notes');
});
});
describe('with an update', () => {
it('shows the view when the update finishes downloading', () => {
MockUpdater.finishDownloadingUpdate('42.0.0');
expect(workspaceElement).toContain('.about-release-notes');
});
describe('clicking on the status', () => {
it('opens the about page', async () => {
MockUpdater.finishDownloadingUpdate('42.0.0');
workspaceElement.querySelector('.about-release-notes').click();
await conditionPromise(() =>
workspaceElement.querySelector('.about')
);
expect(workspaceElement.querySelector('.about')).toExist();
});
});
it('continues to show the squirrel until Pulsar is updated to the new version', async () => {
MockUpdater.finishDownloadingUpdate('42.0.0');
expect(workspaceElement).toContain('.about-release-notes');
await atom.packages.deactivatePackage('about');
expect(workspaceElement).not.toContain('.about-release-notes');
await atom.packages.activatePackage('about');
await Promise.resolve(); // Service consumption hooks are deferred until the next tick
expect(workspaceElement).toContain('.about-release-notes');
await atom.packages.deactivatePackage('about');
expect(workspaceElement).not.toContain('.about-release-notes');
atomVersion = '42.0.0';
await atom.packages.activatePackage('about');
await Promise.resolve(); // Service consumption hooks are deferred until the next tick
expect(workspaceElement).not.toContain('.about-release-notes');
});
it('does not show the view if Pulsar is updated to a newer version than notified', async () => {
MockUpdater.finishDownloadingUpdate('42.0.0');
await atom.packages.deactivatePackage('about');
atomVersion = '43.0.0';
await atom.packages.activatePackage('about');
await Promise.resolve(); // Service consumption hooks are deferred until the next tick
expect(workspaceElement).not.toContain('.about-release-notes');
});
});
});
describe('on a beta version', function() {
beforeEach(async () => {
atomVersion = '1.2.3-beta4';
await atom.packages.activatePackage('about');
});
describe('with no update', () => {
it('does not show the view', () => {
expect(workspaceElement).not.toContain('.about-release-notes');
});
});
describe('with an update', () => {
it('shows the view when the update finishes downloading', () => {
MockUpdater.finishDownloadingUpdate('42.0.0');
expect(workspaceElement).toContain('.about-release-notes');
});
describe('clicking on the status', () => {
it('opens the about page', async () => {
MockUpdater.finishDownloadingUpdate('42.0.0');
workspaceElement.querySelector('.about-release-notes').click();
await conditionPromise(() =>
workspaceElement.querySelector('.about')
);
expect(workspaceElement.querySelector('.about')).toExist();
});
});
it('continues to show the squirrel until Pulsar is updated to the new version', async () => {
MockUpdater.finishDownloadingUpdate('42.0.0');
expect(workspaceElement).toContain('.about-release-notes');
await atom.packages.deactivatePackage('about');
expect(workspaceElement).not.toContain('.about-release-notes');
await atom.packages.activatePackage('about');
await Promise.resolve(); // Service consumption hooks are deferred until the next tick
expect(workspaceElement).toContain('.about-release-notes');
await atom.packages.deactivatePackage('about');
expect(workspaceElement).not.toContain('.about-release-notes');
atomVersion = '42.0.0';
await atom.packages.activatePackage('about');
await Promise.resolve(); // Service consumption hooks are deferred until the next tick
expect(workspaceElement).not.toContain('.about-release-notes');
});
it('does not show the view if Pulsar is updated to a newer version than notified', async () => {
MockUpdater.finishDownloadingUpdate('42.0.0');
await atom.packages.deactivatePackage('about');
atomVersion = '43.0.0';
await atom.packages.activatePackage('about');
await Promise.resolve(); // Service consumption hooks are deferred until the next tick
expect(workspaceElement).not.toContain('.about-release-notes');
});
});
});
describe('on a development version', function() {
beforeEach(async () => {
atomVersion = '1.2.3-dev-0123abcd';
await atom.packages.activatePackage('about');
});
describe('with no update', () => {
it('does not show the view', () => {
expect(workspaceElement).not.toContain('.about-release-notes');
});
});
describe('with a previously downloaded update', () => {
it('does not show the view', () => {
window.localStorage.setItem('about:version-available', '42.0.0');
expect(workspaceElement).not.toContain('.about-release-notes');
});
});
});
});

View File

@ -1,23 +0,0 @@
module.exports = {
updateError() {
atom.autoUpdater.emitter.emit('update-error');
},
checkForUpdate() {
atom.autoUpdater.emitter.emit('did-begin-checking-for-update');
},
updateNotAvailable() {
atom.autoUpdater.emitter.emit('update-not-available');
},
downloadUpdate() {
atom.autoUpdater.emitter.emit('did-begin-downloading-update');
},
finishDownloadingUpdate(releaseVersion) {
atom.autoUpdater.emitter.emit('did-complete-downloading-update', {
releaseVersion
});
}
};

View File

@ -1,387 +0,0 @@
const { shell } = require('electron');
const main = require('../lib/main');
const AboutView = require('../lib/components/about-view');
const UpdateView = require('../lib/components/update-view');
const MockUpdater = require('./mocks/updater');
describe('UpdateView', () => {
let aboutElement;
let updateManager;
let workspaceElement;
let scheduler;
beforeEach(async () => {
let storage = {};
spyOn(window.localStorage, 'setItem').andCallFake((key, value) => {
storage[key] = value;
});
spyOn(window.localStorage, 'getItem').andCallFake(key => {
return storage[key];
});
workspaceElement = atom.views.getView(atom.workspace);
await atom.packages.activatePackage('about');
spyOn(atom.autoUpdater, 'getState').andReturn('idle');
spyOn(atom.autoUpdater, 'checkForUpdate');
spyOn(atom.autoUpdater, 'platformSupportsUpdates').andReturn(true);
});
describe('when the About page is open', () => {
beforeEach(async () => {
jasmine.attachToDOM(workspaceElement);
await atom.workspace.open('atom://about');
aboutElement = workspaceElement.querySelector('.about');
updateManager = main.model.state.updateManager;
scheduler = AboutView.getScheduler();
});
describe('when the updates are not supported by the platform', () => {
beforeEach(async () => {
atom.autoUpdater.platformSupportsUpdates.andReturn(false);
updateManager.resetState();
await scheduler.getNextUpdatePromise();
});
it('hides the auto update UI and shows the update instructions link', async () => {
expect(
aboutElement.querySelector('.about-update-action-button')
).not.toBeVisible();
expect(
aboutElement.querySelector('.about-auto-updates')
).not.toBeVisible();
});
it('opens the update instructions page when the instructions link is clicked', async () => {
spyOn(shell, 'openExternal');
let link = aboutElement.querySelector(
'.app-unsupported .about-updates-instructions'
);
link.click();
let args = shell.openExternal.mostRecentCall.args;
expect(shell.openExternal).toHaveBeenCalled();
expect(args[0]).toContain('installing-pulsar');
});
});
describe('when updates are supported by the platform', () => {
beforeEach(async () => {
atom.autoUpdater.platformSupportsUpdates.andReturn(true);
updateManager.resetState();
await scheduler.getNextUpdatePromise();
});
it('shows the auto update UI', () => {
expect(aboutElement.querySelector('.about-updates')).toBeVisible();
});
it('shows the correct panels when the app checks for updates and there is no update available', async () => {
expect(
aboutElement.querySelector('.about-default-update-message')
).toBeVisible();
MockUpdater.checkForUpdate();
await scheduler.getNextUpdatePromise();
expect(aboutElement.querySelector('.app-up-to-date')).not.toBeVisible();
expect(
aboutElement.querySelector('.app-checking-for-updates')
).toBeVisible();
MockUpdater.updateNotAvailable();
await scheduler.getNextUpdatePromise();
expect(aboutElement.querySelector('.app-up-to-date')).toBeVisible();
expect(
aboutElement.querySelector('.app-checking-for-updates')
).not.toBeVisible();
});
it('shows the correct panels when the app checks for updates and encounters an error', async () => {
expect(
aboutElement.querySelector('.about-default-update-message')
).toBeVisible();
MockUpdater.checkForUpdate();
await scheduler.getNextUpdatePromise();
expect(aboutElement.querySelector('.app-up-to-date')).not.toBeVisible();
expect(
aboutElement.querySelector('.app-checking-for-updates')
).toBeVisible();
spyOn(atom.autoUpdater, 'getErrorMessage').andReturn(
'an error message'
);
MockUpdater.updateError();
await scheduler.getNextUpdatePromise();
expect(aboutElement.querySelector('.app-update-error')).toBeVisible();
expect(
aboutElement.querySelector('.app-error-message').textContent
).toBe('an error message');
expect(
aboutElement.querySelector('.app-checking-for-updates')
).not.toBeVisible();
expect(
aboutElement.querySelector('.about-update-action-button').disabled
).toBe(false);
expect(
aboutElement.querySelector('.about-update-action-button').textContent
).toBe('Check now');
});
it('shows the correct panels and button states when the app checks for updates and an update is downloaded', async () => {
expect(
aboutElement.querySelector('.about-default-update-message')
).toBeVisible();
expect(
aboutElement.querySelector('.about-update-action-button').disabled
).toBe(false);
expect(
aboutElement.querySelector('.about-update-action-button').textContent
).toBe('Check now');
MockUpdater.checkForUpdate();
await scheduler.getNextUpdatePromise();
expect(aboutElement.querySelector('.app-up-to-date')).not.toBeVisible();
expect(
aboutElement.querySelector('.app-checking-for-updates')
).toBeVisible();
expect(
aboutElement.querySelector('.about-update-action-button').disabled
).toBe(true);
expect(
aboutElement.querySelector('.about-update-action-button').textContent
).toBe('Check now');
MockUpdater.downloadUpdate();
await scheduler.getNextUpdatePromise();
expect(
aboutElement.querySelector('.app-checking-for-updates')
).not.toBeVisible();
expect(
aboutElement.querySelector('.app-downloading-update')
).toBeVisible();
// TODO: at some point it would be nice to be able to cancel an update download, and then this would be a cancel button
expect(
aboutElement.querySelector('.about-update-action-button').disabled
).toBe(true);
expect(
aboutElement.querySelector('.about-update-action-button').textContent
).toBe('Check now');
MockUpdater.finishDownloadingUpdate('42.0.0');
await scheduler.getNextUpdatePromise();
expect(
aboutElement.querySelector('.app-downloading-update')
).not.toBeVisible();
expect(
aboutElement.querySelector('.app-update-available-to-install')
).toBeVisible();
expect(
aboutElement.querySelector(
'.app-update-available-to-install .about-updates-version'
).textContent
).toBe('42.0.0');
expect(
aboutElement.querySelector('.about-update-action-button').disabled
).toBe(false);
expect(
aboutElement.querySelector('.about-update-action-button').textContent
).toBe('Restart and install');
});
it('opens the release notes for the downloaded release when the release notes link are clicked', async () => {
MockUpdater.finishDownloadingUpdate('1.2.3');
await scheduler.getNextUpdatePromise();
spyOn(shell, 'openExternal');
let link = aboutElement.querySelector(
'.app-update-available-to-install .about-updates-release-notes'
);
link.click();
let args = shell.openExternal.mostRecentCall.args;
expect(shell.openExternal).toHaveBeenCalled();
expect(args[0]).toContain('/v1.2.3');
});
it('executes checkForUpdate() when the check for update button is clicked', () => {
let button = aboutElement.querySelector('.about-update-action-button');
button.click();
expect(atom.autoUpdater.checkForUpdate).toHaveBeenCalled();
});
it('executes restartAndInstallUpdate() when the restart and install button is clicked', async () => {
spyOn(atom.autoUpdater, 'restartAndInstallUpdate');
MockUpdater.finishDownloadingUpdate('42.0.0');
await scheduler.getNextUpdatePromise();
let button = aboutElement.querySelector('.about-update-action-button');
button.click();
expect(atom.autoUpdater.restartAndInstallUpdate).toHaveBeenCalled();
});
it("starts in the same state as atom's AutoUpdateManager", async () => {
atom.autoUpdater.getState.andReturn('downloading');
updateManager.resetState();
await scheduler.getNextUpdatePromise();
expect(
aboutElement.querySelector('.app-checking-for-updates')
).not.toBeVisible();
expect(
aboutElement.querySelector('.app-downloading-update')
).toBeVisible();
expect(
aboutElement.querySelector('.about-update-action-button').disabled
).toBe(true);
expect(
aboutElement.querySelector('.about-update-action-button').textContent
).toBe('Check now');
});
// describe('when core.automaticallyUpdate is toggled', () => {
// beforeEach(async () => {
// expect(atom.config.get('core.automaticallyUpdate')).toBe(true);
// atom.autoUpdater.checkForUpdate.reset();
// });
//
// it('shows the auto update UI', async () => {
// expect(
// aboutElement.querySelector('.about-auto-updates input').checked
// ).toBe(true);
// expect(
// aboutElement.querySelector('.about-default-update-message')
// ).toBeVisible();
// expect(
// aboutElement.querySelector('.about-default-update-message')
// .textContent
// ).toBe('Pulsar will check for updates automatically');
//
// atom.config.set('core.automaticallyUpdate', false);
// await scheduler.getNextUpdatePromise();
//
// expect(
// aboutElement.querySelector('.about-auto-updates input').checked
// ).toBe(false);
// expect(
// aboutElement.querySelector('.about-default-update-message')
// ).toBeVisible();
// expect(
// aboutElement.querySelector('.about-default-update-message')
// .textContent
// ).toBe('Automatic updates are disabled please check manually');
// });
//
// it('updates config and the UI when the checkbox is used to toggle', async () => {
// expect(
// aboutElement.querySelector('.about-auto-updates input').checked
// ).toBe(true);
//
// aboutElement.querySelector('.about-auto-updates input').click();
// await scheduler.getNextUpdatePromise();
//
// expect(atom.config.get('core.automaticallyUpdate')).toBe(false);
// expect(
// aboutElement.querySelector('.about-auto-updates input').checked
// ).toBe(false);
// expect(
// aboutElement.querySelector('.about-default-update-message')
// ).toBeVisible();
// expect(
// aboutElement.querySelector('.about-default-update-message')
// .textContent
// ).toBe('Automatic updates are disabled please check manually');
//
// aboutElement.querySelector('.about-auto-updates input').click();
// await scheduler.getNextUpdatePromise();
//
// expect(atom.config.get('core.automaticallyUpdate')).toBe(true);
// expect(
// aboutElement.querySelector('.about-auto-updates input').checked
// ).toBe(true);
// expect(
// aboutElement.querySelector('.about-default-update-message')
// ).toBeVisible();
// expect(
// aboutElement.querySelector('.about-default-update-message')
// .textContent
// ).toBe('Pulsar will check for updates automatically');
// });
//
// describe('checking for updates', function() {
// afterEach(() => {
// this.updateView = null;
// });
//
// it('checks for update when the about page is shown', () => {
// expect(atom.autoUpdater.checkForUpdate).not.toHaveBeenCalled();
//
// this.updateView = new UpdateView({
// updateManager: updateManager,
// availableVersion: '9999.0.0',
// viewUpdateReleaseNotes: () => {}
// });
//
// expect(atom.autoUpdater.checkForUpdate).toHaveBeenCalled();
// });
//
// it('does not check for update when the about page is shown and the update manager is not in the idle state', () => {
// atom.autoUpdater.getState.andReturn('downloading');
// updateManager.resetState();
// expect(atom.autoUpdater.checkForUpdate).not.toHaveBeenCalled();
//
// this.updateView = new UpdateView({
// updateManager: updateManager,
// availableVersion: '9999.0.0',
// viewUpdateReleaseNotes: () => {}
// });
//
// expect(atom.autoUpdater.checkForUpdate).not.toHaveBeenCalled();
// });
//
// it('does not check for update when the about page is shown and auto updates are turned off', () => {
// atom.config.set('core.automaticallyUpdate', false);
// expect(atom.autoUpdater.checkForUpdate).not.toHaveBeenCalled();
//
// this.updateView = new UpdateView({
// updateManager: updateManager,
// availableVersion: '9999.0.0',
// viewUpdateReleaseNotes: () => {}
// });
//
// expect(atom.autoUpdater.checkForUpdate).not.toHaveBeenCalled();
// });
// });
// });
});
});
describe('when the About page is not open and an update is downloaded', () => {
it('should display the new version when it is opened', async () => {
MockUpdater.finishDownloadingUpdate('42.0.0');
jasmine.attachToDOM(workspaceElement);
await atom.workspace.open('atom://about');
aboutElement = workspaceElement.querySelector('.about');
updateManager = main.model.state.updateManager;
scheduler = AboutView.getScheduler();
expect(
aboutElement.querySelector('.app-update-available-to-install')
).toBeVisible();
expect(
aboutElement.querySelector(
'.app-update-available-to-install .about-updates-version'
).textContent
).toBe('42.0.0');
expect(
aboutElement.querySelector('.about-update-action-button').disabled
).toBe(false);
expect(
aboutElement.querySelector('.about-update-action-button').textContent
).toBe('Restart and install');
});
});
});

View File

@ -94,7 +94,7 @@
.about-updates {
width: 100%;
max-width: 500px;
//max-width: 510px;
}
.about-updates-box {
@ -109,7 +109,7 @@
.about-updates-status {
flex: 1;
margin-left: .5em;
text-align: left;
text-align: center;
}
.about-updates-item,

View File

@ -1,108 +0,0 @@
temp = require 'temp'
describe "Atom API autocompletions", ->
[editor, provider] = []
getCompletions = ->
cursor = editor.getLastCursor()
start = cursor.getBeginningOfCurrentWordBufferPosition()
end = cursor.getBufferPosition()
prefix = editor.getTextInRange([start, end])
request =
editor: editor
bufferPosition: end
scopeDescriptor: cursor.getScopeDescriptor()
prefix: prefix
provider.getSuggestions(request)
beforeEach ->
waitsForPromise -> atom.packages.activatePackage('autocomplete-atom-api')
runs ->
provider = atom.packages.getActivePackage('autocomplete-atom-api').mainModule.getProvider()
waitsFor -> Object.keys(provider.completions).length > 0
waitsFor -> provider.packageDirectories?.length > 0
waitsForPromise -> atom.workspace.open('test.js')
runs -> editor = atom.workspace.getActiveTextEditor()
it "only includes completions in files that are in an Atom package or Atom core", ->
emptyProjectPath = temp.mkdirSync('atom-project-')
atom.project.setPaths([emptyProjectPath])
waitsForPromise -> atom.workspace.open('empty.js')
runs ->
expect(provider.packageDirectories.length).toBe 0
editor = atom.workspace.getActiveTextEditor()
editor.setText('atom.')
editor.setCursorBufferPosition([0, Infinity])
expect(getCompletions()).toBeUndefined()
it "only includes completions in .atom/init", ->
emptyProjectPath = temp.mkdirSync('some-guy')
atom.project.setPaths([emptyProjectPath])
waitsForPromise -> atom.workspace.open('.atom/init.coffee')
runs ->
expect(provider.packageDirectories.length).toBe 0
editor = atom.workspace.getActiveTextEditor()
editor.setText('atom.')
editor.setCursorBufferPosition([0, Infinity])
expect(getCompletions()).not.toBeUndefined()
it "does not fail when no editor path", ->
emptyProjectPath = temp.mkdirSync('some-guy')
atom.project.setPaths([emptyProjectPath])
waitsForPromise -> atom.workspace.open()
runs ->
expect(provider.packageDirectories.length).toBe 0
editor = atom.workspace.getActiveTextEditor()
editor.setText('atom.')
editor.setCursorBufferPosition([0, Infinity])
expect(getCompletions()).toBeUndefined()
it "includes properties and functions on the atom global", ->
editor.setText('atom.')
editor.setCursorBufferPosition([0, Infinity])
expect(getCompletions().length).toBe 53
expect(getCompletions()[0].text).toBe 'clipboard'
editor.setText('var c = atom.')
editor.setCursorBufferPosition([0, Infinity])
expect(getCompletions().length).toBe 53
expect(getCompletions()[0].text).toBe 'clipboard'
editor.setText('atom.c')
editor.setCursorBufferPosition([0, Infinity])
expect(getCompletions().length).toBe 7
expect(getCompletions()[0].text).toBe 'clipboard'
expect(getCompletions()[0].type).toBe 'property'
expect(getCompletions()[0].leftLabel).toBe 'Clipboard'
expect(getCompletions()[1].text).toBe 'commands'
expect(getCompletions()[2].text).toBe 'config'
expect(getCompletions()[6].snippet).toBe 'confirm(${1:options})'
expect(getCompletions()[6].type).toBe 'method'
expect(getCompletions()[6].leftLabel).toBe 'Number'
expect(getCompletions()[6].descriptionMoreURL).toBe 'https://atom.io/docs/api/latest/AtomEnvironment#instance-confirm'
it "includes methods on atom global properties", ->
editor.setText('atom.clipboard.')
editor.setCursorBufferPosition([0, Infinity])
expect(getCompletions().length).toBe 3
expect(getCompletions()[0].text).toBe 'read()'
expect(getCompletions()[1].text).toBe 'readWithMetadata()'
expect(getCompletions()[2].snippet).toBe 'write(${1:text}, ${2:metadata})'
editor.setText('atom.clipboard.rea')
editor.setCursorBufferPosition([0, Infinity])
expect(getCompletions().length).toBe 2
expect(getCompletions()[0].text).toBe 'read()'
expect(getCompletions()[1].text).toBe 'readWithMetadata()'

View File

@ -0,0 +1,119 @@
const temp = require('temp');
describe("Atom API autocompletions", () => {
let [editor, provider] = [];
const getCompletions = function() {
const cursor = editor.getLastCursor();
const start = cursor.getBeginningOfCurrentWordBufferPosition();
const end = cursor.getBufferPosition();
const prefix = editor.getTextInRange([start, end]);
const request = {
editor,
bufferPosition: end,
scopeDescriptor: cursor.getScopeDescriptor(),
prefix
};
return provider.getSuggestions(request);
};
beforeEach(() => {
waitsForPromise(() => atom.packages.activatePackage('autocomplete-atom-api'));
runs(() => provider = atom.packages.getActivePackage('autocomplete-atom-api').mainModule.getProvider());
waitsFor(() => Object.keys(provider.completions).length > 0);
waitsFor(() => provider.packageDirectories?.length > 0);
waitsForPromise(() => atom.workspace.open('test.js'));
runs(() => editor = atom.workspace.getActiveTextEditor());
});
it("only includes completions in files that are in an Atom package or Atom core", () => {
const emptyProjectPath = temp.mkdirSync('atom-project-');
atom.project.setPaths([emptyProjectPath]);
waitsForPromise(() => atom.workspace.open('empty.js'));
runs(() => {
expect(provider.packageDirectories.length).toBe(0);
editor = atom.workspace.getActiveTextEditor();
editor.setText('atom.');
editor.setCursorBufferPosition([0, Infinity]);
expect(getCompletions()).toBeUndefined();
});
});
it("only includes completions in .atom/init", () => {
const emptyProjectPath = temp.mkdirSync('some-guy');
atom.project.setPaths([emptyProjectPath]);
waitsForPromise(() => atom.workspace.open('.atom/init.coffee'));
runs(() => {
expect(provider.packageDirectories.length).toBe(0);
editor = atom.workspace.getActiveTextEditor();
editor.setText('atom.');
editor.setCursorBufferPosition([0, Infinity]);
expect(getCompletions()).not.toBeUndefined();
});
});
it("does not fail when no editor path", () => {
const emptyProjectPath = temp.mkdirSync('some-guy');
atom.project.setPaths([emptyProjectPath]);
waitsForPromise(() => atom.workspace.open());
runs(() => {
expect(provider.packageDirectories.length).toBe(0);
editor = atom.workspace.getActiveTextEditor();
editor.setText('atom.');
editor.setCursorBufferPosition([0, Infinity]);
expect(getCompletions()).toBeUndefined();
});
});
it("includes properties and functions on the atom global", () => {
editor.setText('atom.');
editor.setCursorBufferPosition([0, Infinity]);
expect(getCompletions().length).toBe(53);
expect(getCompletions()[0].text).toBe('clipboard');
editor.setText('var c = atom.');
editor.setCursorBufferPosition([0, Infinity]);
expect(getCompletions().length).toBe(53);
expect(getCompletions()[0].text).toBe('clipboard');
editor.setText('atom.c');
editor.setCursorBufferPosition([0, Infinity]);
expect(getCompletions().length).toBe(7);
expect(getCompletions()[0].text).toBe('clipboard');
expect(getCompletions()[0].type).toBe('property');
expect(getCompletions()[0].leftLabel).toBe('Clipboard');
expect(getCompletions()[1].text).toBe('commands');
expect(getCompletions()[2].text).toBe('config');
expect(getCompletions()[6].snippet).toBe('confirm(${1:options})');
expect(getCompletions()[6].type).toBe('method');
expect(getCompletions()[6].leftLabel).toBe('Number');
expect(getCompletions()[6].descriptionMoreURL).toBe('https://atom.io/docs/api/latest/AtomEnvironment#instance-confirm');
});
it("includes methods on atom global properties", () => {
editor.setText('atom.clipboard.');
editor.setCursorBufferPosition([0, Infinity]);
expect(getCompletions().length).toBe(3);
expect(getCompletions()[0].text).toBe('read()');
expect(getCompletions()[1].text).toBe('readWithMetadata()');
expect(getCompletions()[2].snippet).toBe('write(${1:text}, ${2:metadata})');
editor.setText('atom.clipboard.rea');
editor.setCursorBufferPosition([0, Infinity]);
expect(getCompletions().length).toBe(2);
expect(getCompletions()[0].text).toBe('read()');
expect(getCompletions()[1].text).toBe('readWithMetadata()');
});
});

File diff suppressed because it is too large Load Diff

View File

@ -141,7 +141,9 @@ module.exports = class SuggestionListElement {
marked(item.descriptionMarkdown, {
gfm: true,
breaks: true,
sanitize: false
sanitize: false,
mangle: false,
headerIds: false
})
)
this.setDescriptionMoreLink(item)

View File

@ -4,7 +4,7 @@ const SuggestionListElement = require('./suggestion-list-element')
module.exports =
class SuggestionList {
constructor () {
constructor() {
this.wordPrefixRegex = null
this.cancel = this.cancel.bind(this)
this.confirm = this.confirm.bind(this)
@ -16,9 +16,10 @@ class SuggestionList {
this.hide = this.hide.bind(this)
this.destroyOverlay = this.destroyOverlay.bind(this)
this.activeEditor = null
this.lastActiveAt = 0
}
initialize () {
initialize() {
this.emitter = new Emitter()
this.subscriptions = new CompositeDisposable()
@ -37,7 +38,7 @@ class SuggestionList {
}))
}
get suggestionListElement () {
get suggestionListElement() {
if (!this._suggestionListElement) {
this._suggestionListElement = new SuggestionListElement(this)
}
@ -45,7 +46,7 @@ class SuggestionList {
return this._suggestionListElement
}
addBindings (editor) {
addBindings(editor) {
if (this.bindings && this.bindings.dispose) {
this.bindings.dispose()
}
@ -124,47 +125,47 @@ class SuggestionList {
Section: Event Triggers
*/
cancel () {
cancel() {
return this.emitter.emit('did-cancel')
}
confirm (match) {
confirm(match) {
return this.emitter.emit('did-confirm', match)
}
confirmSelection () {
confirmSelection() {
return this.emitter.emit('did-confirm-selection')
}
confirmSelectionIfNonDefault (event) {
confirmSelectionIfNonDefault(event) {
return this.emitter.emit('did-confirm-selection-if-non-default', event)
}
select (suggestion) {
select(suggestion) {
return this.emitter.emit('did-select', suggestion)
}
selectNext () {
selectNext() {
return this.emitter.emit('did-select-next')
}
selectPrevious () {
selectPrevious() {
return this.emitter.emit('did-select-previous')
}
selectPageUp () {
selectPageUp() {
return this.emitter.emit('did-select-page-up')
}
selectPageDown () {
selectPageDown() {
return this.emitter.emit('did-select-page-down')
}
selectTop () {
selectTop() {
return this.emitter.emit('did-select-top')
}
selectBottom () {
selectBottom() {
return this.emitter.emit('did-select-bottom')
}
@ -172,67 +173,67 @@ class SuggestionList {
Section: Events
*/
onDidConfirmSelection (fn) {
onDidConfirmSelection(fn) {
return this.emitter.on('did-confirm-selection', fn)
}
onDidconfirmSelectionIfNonDefault (fn) {
onDidconfirmSelectionIfNonDefault(fn) {
return this.emitter.on('did-confirm-selection-if-non-default', fn)
}
onDidConfirm (fn) {
onDidConfirm(fn) {
return this.emitter.on('did-confirm', fn)
}
onDidSelect (fn) {
onDidSelect(fn) {
return this.emitter.on('did-select', fn)
}
onDidSelectNext (fn) {
onDidSelectNext(fn) {
return this.emitter.on('did-select-next', fn)
}
onDidSelectPrevious (fn) {
onDidSelectPrevious(fn) {
return this.emitter.on('did-select-previous', fn)
}
onDidSelectPageUp (fn) {
onDidSelectPageUp(fn) {
return this.emitter.on('did-select-page-up', fn)
}
onDidSelectPageDown (fn) {
onDidSelectPageDown(fn) {
return this.emitter.on('did-select-page-down', fn)
}
onDidSelectTop (fn) {
onDidSelectTop(fn) {
return this.emitter.on('did-select-top', fn)
}
onDidSelectBottom (fn) {
onDidSelectBottom(fn) {
return this.emitter.on('did-select-bottom', fn)
}
onDidCancel (fn) {
onDidCancel(fn) {
return this.emitter.on('did-cancel', fn)
}
onDidDispose (fn) {
onDidDispose(fn) {
return this.emitter.on('did-dispose', fn)
}
onDidChangeItems (fn) {
onDidChangeItems(fn) {
return this.emitter.on('did-change-items', fn)
}
onDidChangeItem (fn) {
onDidChangeItem(fn) {
return this.emitter.on('did-change-item', fn)
}
isActive () {
isActive() {
return (this.activeEditor != null)
}
show (editor, options) {
show(editor, options) {
if (atom.config.get('autocomplete-plus.suggestionListFollows') === 'Cursor') {
return this.showAtCursorPosition(editor, options)
} else {
@ -250,7 +251,7 @@ class SuggestionList {
}
}
showAtBeginningOfPrefix (editor, prefix, followRawPrefix = false) {
showAtBeginningOfPrefix(editor, prefix, followRawPrefix = false) {
let bufferPosition
if (editor) {
bufferPosition = editor.getCursorBufferPosition()
@ -275,6 +276,7 @@ class SuggestionList {
this.overlayDecoration = editor.decorateMarker(marker, {type: 'overlay', item: this.suggestionListElement, position: 'tail', class: 'autocomplete-plus'})
const editorElement = atom.views.getView(this.activeEditor)
if (editorElement && editorElement.classList) {
this.lastActiveAt = performance.now()
editorElement.classList.add('autocomplete-active')
}
@ -283,7 +285,7 @@ class SuggestionList {
}
}
showAtCursorPosition (editor) {
showAtCursorPosition(editor) {
if (this.activeEditor === editor || (editor == null)) { return }
this.destroyOverlay()
@ -295,6 +297,7 @@ class SuggestionList {
this.activeEditor = editor
const editorElement = atom.views.getView(this.activeEditor)
if (editorElement && editorElement.classList) {
this.lastActiveAt = performance.now()
editorElement.classList.add('autocomplete-active')
}
@ -303,7 +306,7 @@ class SuggestionList {
}
}
hide () {
hide() {
this.destroyOverlay()
if (this.activeEditor === null) {
return
@ -317,7 +320,7 @@ class SuggestionList {
return this.activeEditor
}
destroyOverlay () {
destroyOverlay() {
if (this.suggestionMarker && this.suggestionMarker.destroy) {
this.suggestionMarker.destroy()
} else if (this.overlayDecoration && this.overlayDecoration.destroy) {
@ -325,7 +328,11 @@ class SuggestionList {
}
const editorElement = atom.views.getView(this.activeEditor)
if (editorElement && editorElement.classList) {
let timestamp = this.lastActiveAt
atom.views.updateDocument(() => {
// A newer timestamp here means that the menu is open again and we
// shouldn't remove this class name anymore.
if (this.lastActiveAt > timestamp) return
editorElement.classList.remove('autocomplete-active')
})
}
@ -334,12 +341,12 @@ class SuggestionList {
return this.overlayDecoration
}
changeItems (items) {
changeItems(items) {
this.items = items
return this.emitter.emit('did-change-items', this.items)
}
replaceItem (oldSuggestion, newSuggestion) {
replaceItem(oldSuggestion, newSuggestion) {
if (newSuggestion == null) {
return
}
@ -368,7 +375,7 @@ class SuggestionList {
}
// Public: Clean up, stop listening to events
dispose () {
dispose() {
if (this.subscriptions) {
this.subscriptions.dispose()
}

View File

@ -1,639 +0,0 @@
describe "Autoflow package", ->
[autoflow, editor, editorElement] = []
tabLength = 4
describe "autoflow:reflow-selection", ->
beforeEach ->
activationPromise = null
waitsForPromise ->
atom.workspace.open()
runs ->
editor = atom.workspace.getActiveTextEditor()
editorElement = atom.views.getView(editor)
atom.config.set('editor.preferredLineLength', 30)
atom.config.set('editor.tabLength', tabLength)
activationPromise = atom.packages.activatePackage('autoflow')
atom.commands.dispatch editorElement, 'autoflow:reflow-selection'
waitsForPromise ->
activationPromise
it "uses the preferred line length based on the editor's scope", ->
atom.config.set('editor.preferredLineLength', 4, scopeSelector: '.text.plain.null-grammar')
editor.setText("foo bar")
editor.selectAll()
atom.commands.dispatch editorElement, 'autoflow:reflow-selection'
expect(editor.getText()).toBe """
foo
bar
"""
it "rearranges line breaks in the current selection to ensure lines are shorter than config.editor.preferredLineLength honoring tabLength", ->
editor.setText "\t\tThis is the first paragraph and it is longer than the preferred line length so it should be reflowed.\n\n\t\tThis is a short paragraph.\n\n\t\tAnother long paragraph, it should also be reflowed with the use of this single command."
editor.selectAll()
atom.commands.dispatch editorElement, 'autoflow:reflow-selection'
exedOut = editor.getText().replace(/\t/g, Array(tabLength+1).join 'X')
expect(exedOut).toBe "XXXXXXXXThis is the first\nXXXXXXXXparagraph and it is\nXXXXXXXXlonger than the\nXXXXXXXXpreferred line length\nXXXXXXXXso it should be\nXXXXXXXXreflowed.\n\nXXXXXXXXThis is a short\nXXXXXXXXparagraph.\n\nXXXXXXXXAnother long\nXXXXXXXXparagraph, it should\nXXXXXXXXalso be reflowed with\nXXXXXXXXthe use of this single\nXXXXXXXXcommand."
it "rearranges line breaks in the current selection to ensure lines are shorter than config.editor.preferredLineLength", ->
editor.setText """
This is the first paragraph and it is longer than the preferred line length so it should be reflowed.
This is a short paragraph.
Another long paragraph, it should also be reflowed with the use of this single command.
"""
editor.selectAll()
atom.commands.dispatch editorElement, 'autoflow:reflow-selection'
expect(editor.getText()).toBe """
This is the first paragraph
and it is longer than the
preferred line length so it
should be reflowed.
This is a short paragraph.
Another long paragraph, it
should also be reflowed with
the use of this single
command.
"""
it "is not confused when the selection boundary is between paragraphs", ->
editor.setText """
v--- SELECTION STARTS AT THE BEGINNING OF THE NEXT LINE (pos 1,0)
The preceding newline should not be considered part of this paragraph.
The newline at the end of this paragraph should be preserved and not
converted into a space.
^--- SELECTION ENDS AT THE BEGINNING OF THE PREVIOUS LINE (pos 6,0)
"""
editor.setCursorBufferPosition([1, 0])
editor.selectToBufferPosition([6, 0])
atom.commands.dispatch editorElement, 'autoflow:reflow-selection'
expect(editor.getText()).toBe """
v--- SELECTION STARTS AT THE BEGINNING OF THE NEXT LINE (pos 1,0)
The preceding newline should
not be considered part of this
paragraph.
The newline at the end of this
paragraph should be preserved
and not converted into a
space.
^--- SELECTION ENDS AT THE BEGINNING OF THE PREVIOUS LINE (pos 6,0)
"""
it "reflows the current paragraph if nothing is selected", ->
editor.setText """
This is a preceding paragraph, which shouldn't be modified by a reflow of the following paragraph.
The quick brown fox jumps over the lazy
dog. The preceding sentence contains every letter
in the entire English alphabet, which has absolutely no relevance
to this test.
This is a following paragraph, which shouldn't be modified by a reflow of the preciding paragraph.
"""
editor.setCursorBufferPosition([3, 5])
atom.commands.dispatch editorElement, 'autoflow:reflow-selection'
expect(editor.getText()).toBe """
This is a preceding paragraph, which shouldn't be modified by a reflow of the following paragraph.
The quick brown fox jumps over
the lazy dog. The preceding
sentence contains every letter
in the entire English
alphabet, which has absolutely
no relevance to this test.
This is a following paragraph, which shouldn't be modified by a reflow of the preciding paragraph.
"""
it "allows for single words that exceed the preferred wrap column length", ->
editor.setText("this-is-a-super-long-word-that-shouldn't-break-autoflow and these are some smaller words")
editor.selectAll()
atom.commands.dispatch editorElement, 'autoflow:reflow-selection'
expect(editor.getText()).toBe """
this-is-a-super-long-word-that-shouldn't-break-autoflow
and these are some smaller
words
"""
describe "reflowing text", ->
beforeEach ->
autoflow = require("../lib/autoflow")
it 'respects current paragraphs', ->
text = '''
Lorem ipsum dolor sit amet, consectetur adipiscing elit. Phasellus gravida nibh id magna ullamcorper sagittis. Maecenas
et enim eu orci tincidunt adipiscing
aliquam ligula.
Lorem ipsum dolor sit amet, consectetur adipiscing elit.
Phasellus gravida
nibh id magna ullamcorper
tincidunt adipiscing lacinia a dui. Etiam quis erat dolor.
rutrum nisl fermentum rhoncus. Duis blandit ligula facilisis fermentum.
'''
res = '''
Lorem ipsum dolor sit amet, consectetur adipiscing elit. Phasellus gravida nibh
id magna ullamcorper sagittis. Maecenas et enim eu orci tincidunt adipiscing
aliquam ligula.
Lorem ipsum dolor sit amet, consectetur adipiscing elit. Phasellus gravida nibh
id magna ullamcorper tincidunt adipiscing lacinia a dui. Etiam quis erat dolor.
rutrum nisl fermentum rhoncus. Duis blandit ligula facilisis fermentum.
'''
expect(autoflow.reflow(text, wrapColumn: 80)).toEqual res
it 'respects indentation', ->
text = '''
Lorem ipsum dolor sit amet, consectetur adipiscing elit. Phasellus gravida nibh id magna ullamcorper sagittis. Maecenas
et enim eu orci tincidunt adipiscing
aliquam ligula.
Lorem ipsum dolor sit amet, consectetur adipiscing elit.
Phasellus gravida
nibh id magna ullamcorper
tincidunt adipiscing lacinia a dui. Etiam quis erat dolor.
rutrum nisl fermentum rhoncus. Duis blandit ligula facilisis fermentum
'''
res = '''
Lorem ipsum dolor sit amet, consectetur adipiscing elit. Phasellus gravida nibh
id magna ullamcorper sagittis. Maecenas et enim eu orci tincidunt adipiscing
aliquam ligula.
Lorem ipsum dolor sit amet, consectetur adipiscing elit. Phasellus gravida
nibh id magna ullamcorper tincidunt adipiscing lacinia a dui. Etiam quis
erat dolor. rutrum nisl fermentum rhoncus. Duis blandit ligula facilisis
fermentum
'''
expect(autoflow.reflow(text, wrapColumn: 80)).toEqual res
it 'respects prefixed text (comments!)', ->
text = '''
Lorem ipsum dolor sit amet, consectetur adipiscing elit. Phasellus gravida nibh id magna ullamcorper sagittis. Maecenas
et enim eu orci tincidunt adipiscing
aliquam ligula.
# Lorem ipsum dolor sit amet, consectetur adipiscing elit.
# Phasellus gravida
# nibh id magna ullamcorper
# tincidunt adipiscing lacinia a dui. Etiam quis erat dolor.
# rutrum nisl fermentum rhoncus. Duis blandit ligula facilisis fermentum
'''
res = '''
Lorem ipsum dolor sit amet, consectetur adipiscing elit. Phasellus gravida nibh
id magna ullamcorper sagittis. Maecenas et enim eu orci tincidunt adipiscing
aliquam ligula.
# Lorem ipsum dolor sit amet, consectetur adipiscing elit. Phasellus gravida
# nibh id magna ullamcorper tincidunt adipiscing lacinia a dui. Etiam quis
# erat dolor. rutrum nisl fermentum rhoncus. Duis blandit ligula facilisis
# fermentum
'''
expect(autoflow.reflow(text, wrapColumn: 80)).toEqual res
it 'respects multiple prefixes (js/c comments)', ->
text = '''
// Lorem ipsum dolor sit amet, consectetur adipiscing elit. Phasellus gravida
et enim eu orci tincidunt adipiscing
aliquam ligula.
'''
res = '''
// Lorem ipsum dolor sit amet, consectetur adipiscing elit. Phasellus gravida et
// enim eu orci tincidunt adipiscing aliquam ligula.
'''
expect(autoflow.reflow(text, wrapColumn: 80)).toEqual res
it 'properly handles * prefix', ->
text = '''
* Lorem ipsum dolor sit amet, consectetur adipiscing elit. Phasellus gravida
et enim eu orci tincidunt adipiscing
aliquam ligula.
* soidjfiojsoidj foi
'''
res = '''
* Lorem ipsum dolor sit amet, consectetur adipiscing elit. Phasellus gravida et
* enim eu orci tincidunt adipiscing aliquam ligula.
* soidjfiojsoidj foi
'''
expect(autoflow.reflow(text, wrapColumn: 80)).toEqual res
it "does not throw invalid regular expression errors (regression)", ->
text = '''
*** Lorem ipsum dolor sit amet
'''
expect(autoflow.reflow(text, wrapColumn: 80)).toEqual text
it 'handles different initial indentation', ->
text = '''
Magna ea magna fugiat nisi minim in id duis. Culpa sit sint consequat quis elit magna pariatur incididunt
proident laborum deserunt est aliqua reprehenderit. Occaecat et ex non do Lorem irure adipisicing mollit excepteur
eu ullamco consectetur. Ex ex Lorem duis labore quis ad exercitation elit dolor non adipisicing. Pariatur commodo ullamco
culpa dolor sunt enim. Ullamco dolore do ea nulla ut commodo minim consequat cillum ad velit quis.
'''
res = '''
Magna ea magna fugiat nisi minim in id duis. Culpa sit sint consequat quis elit
magna pariatur incididunt proident laborum deserunt est aliqua reprehenderit.
Occaecat et ex non do Lorem irure adipisicing mollit excepteur eu ullamco
consectetur. Ex ex Lorem duis labore quis ad exercitation elit dolor non
adipisicing. Pariatur commodo ullamco culpa dolor sunt enim. Ullamco dolore do
ea nulla ut commodo minim consequat cillum ad velit quis.
'''
expect(autoflow.reflow(text, wrapColumn: 80)).toEqual res
it 'properly handles CRLF', ->
text = "This is the first line and it is longer than the preferred line length so it should be reflowed.\r\nThis is a short line which should\r\nbe reflowed with the following line.\rAnother long line, it should also be reflowed with everything above it when it is all reflowed."
res =
'''
This is the first line and it is longer than the preferred line length so it
should be reflowed. This is a short line which should be reflowed with the
following line. Another long line, it should also be reflowed with everything
above it when it is all reflowed.
'''
expect(autoflow.reflow(text, wrapColumn: 80)).toEqual res
it 'handles cyrillic text', ->
text = '''
В начале июля, в чрезвычайно жаркое время, под вечер, один молодой человек вышел из своей каморки, которую нанимал от жильцов в С-м переулке, на улицу и медленно, как бы в нерешимости, отправился к К-ну мосту.
'''
res = '''
В начале июля, в чрезвычайно жаркое время, под вечер, один молодой человек вышел
из своей каморки, которую нанимал от жильцов в С-м переулке, на улицу и
медленно, как бы в нерешимости, отправился к К-ну мосту.
'''
expect(autoflow.reflow(text, wrapColumn: 80)).toEqual res
it 'handles `yo` character properly', ->
# Because there're known problems with this character in major regex engines
text = 'Ё Ё Ё'
res = '''
Ё
Ё
Ё
'''
expect(autoflow.reflow(text, wrapColumn: 2)).toEqual res
it 'properly reflows // comments ', ->
text =
'''
// Beard pinterest actually brunch brooklyn jean shorts YOLO. Knausgaard sriracha banh mi, cold-pressed retro whatever ethical man braid asymmetrical fingerstache narwhal. Intelligentsia wolf photo booth, tumblr pinterest quinoa leggings four loko poutine. DIY tattooed drinking vinegar, wolf retro actually aesthetic austin keffiyeh marfa beard. Marfa trust fund salvia sartorial letterpress, keffiyeh plaid butcher. Swag try-hard dreamcatcher direct trade, tacos pickled fanny pack literally meh pinterest slow-carb. Meditation microdosing distillery 8-bit humblebrag migas.
'''
res =
'''
// Beard pinterest actually brunch brooklyn jean shorts YOLO. Knausgaard
// sriracha banh mi, cold-pressed retro whatever ethical man braid asymmetrical
// fingerstache narwhal. Intelligentsia wolf photo booth, tumblr pinterest
// quinoa leggings four loko poutine. DIY tattooed drinking vinegar, wolf retro
// actually aesthetic austin keffiyeh marfa beard. Marfa trust fund salvia
// sartorial letterpress, keffiyeh plaid butcher. Swag try-hard dreamcatcher
// direct trade, tacos pickled fanny pack literally meh pinterest slow-carb.
// Meditation microdosing distillery 8-bit humblebrag migas.
'''
expect(autoflow.reflow(text, wrapColumn: 80)).toEqual res
it 'properly reflows /* comments ', ->
text =
'''
/* Beard pinterest actually brunch brooklyn jean shorts YOLO. Knausgaard sriracha banh mi, cold-pressed retro whatever ethical man braid asymmetrical fingerstache narwhal. Intelligentsia wolf photo booth, tumblr pinterest quinoa leggings four loko poutine. DIY tattooed drinking vinegar, wolf retro actually aesthetic austin keffiyeh marfa beard. Marfa trust fund salvia sartorial letterpress, keffiyeh plaid butcher. Swag try-hard dreamcatcher direct trade, tacos pickled fanny pack literally meh pinterest slow-carb. Meditation microdosing distillery 8-bit humblebrag migas. */
'''
res =
'''
/* Beard pinterest actually brunch brooklyn jean shorts YOLO. Knausgaard
sriracha banh mi, cold-pressed retro whatever ethical man braid asymmetrical
fingerstache narwhal. Intelligentsia wolf photo booth, tumblr pinterest
quinoa leggings four loko poutine. DIY tattooed drinking vinegar, wolf retro
actually aesthetic austin keffiyeh marfa beard. Marfa trust fund salvia
sartorial letterpress, keffiyeh plaid butcher. Swag try-hard dreamcatcher
direct trade, tacos pickled fanny pack literally meh pinterest slow-carb.
Meditation microdosing distillery 8-bit humblebrag migas. */
'''
expect(autoflow.reflow(text, wrapColumn: 80)).toEqual res
it 'properly reflows pound comments ', ->
text =
'''
# Beard pinterest actually brunch brooklyn jean shorts YOLO. Knausgaard sriracha banh mi, cold-pressed retro whatever ethical man braid asymmetrical fingerstache narwhal. Intelligentsia wolf photo booth, tumblr pinterest quinoa leggings four loko poutine. DIY tattooed drinking vinegar, wolf retro actually aesthetic austin keffiyeh marfa beard. Marfa trust fund salvia sartorial letterpress, keffiyeh plaid butcher. Swag try-hard dreamcatcher direct trade, tacos pickled fanny pack literally meh pinterest slow-carb. Meditation microdosing distillery 8-bit humblebrag migas.
'''
res =
'''
# Beard pinterest actually brunch brooklyn jean shorts YOLO. Knausgaard sriracha
# banh mi, cold-pressed retro whatever ethical man braid asymmetrical
# fingerstache narwhal. Intelligentsia wolf photo booth, tumblr pinterest quinoa
# leggings four loko poutine. DIY tattooed drinking vinegar, wolf retro actually
# aesthetic austin keffiyeh marfa beard. Marfa trust fund salvia sartorial
# letterpress, keffiyeh plaid butcher. Swag try-hard dreamcatcher direct trade,
# tacos pickled fanny pack literally meh pinterest slow-carb. Meditation
# microdosing distillery 8-bit humblebrag migas.
'''
expect(autoflow.reflow(text, wrapColumn: 80)).toEqual res
it 'properly reflows - list items ', ->
text =
'''
- Beard pinterest actually brunch brooklyn jean shorts YOLO. Knausgaard sriracha banh mi, cold-pressed retro whatever ethical man braid asymmetrical fingerstache narwhal. Intelligentsia wolf photo booth, tumblr pinterest quinoa leggings four loko poutine. DIY tattooed drinking vinegar, wolf retro actually aesthetic austin keffiyeh marfa beard. Marfa trust fund salvia sartorial letterpress, keffiyeh plaid butcher. Swag try-hard dreamcatcher direct trade, tacos pickled fanny pack literally meh pinterest slow-carb. Meditation microdosing distillery 8-bit humblebrag migas.
'''
res =
'''
- Beard pinterest actually brunch brooklyn jean shorts YOLO. Knausgaard sriracha
banh mi, cold-pressed retro whatever ethical man braid asymmetrical
fingerstache narwhal. Intelligentsia wolf photo booth, tumblr pinterest quinoa
leggings four loko poutine. DIY tattooed drinking vinegar, wolf retro actually
aesthetic austin keffiyeh marfa beard. Marfa trust fund salvia sartorial
letterpress, keffiyeh plaid butcher. Swag try-hard dreamcatcher direct trade,
tacos pickled fanny pack literally meh pinterest slow-carb. Meditation
microdosing distillery 8-bit humblebrag migas.
'''
expect(autoflow.reflow(text, wrapColumn: 80)).toEqual res
it 'properly reflows % comments ', ->
text =
'''
% Beard pinterest actually brunch brooklyn jean shorts YOLO. Knausgaard sriracha banh mi, cold-pressed retro whatever ethical man braid asymmetrical fingerstache narwhal. Intelligentsia wolf photo booth, tumblr pinterest quinoa leggings four loko poutine. DIY tattooed drinking vinegar, wolf retro actually aesthetic austin keffiyeh marfa beard. Marfa trust fund salvia sartorial letterpress, keffiyeh plaid butcher. Swag try-hard dreamcatcher direct trade, tacos pickled fanny pack literally meh pinterest slow-carb. Meditation microdosing distillery 8-bit humblebrag migas.
'''
res =
'''
% Beard pinterest actually brunch brooklyn jean shorts YOLO. Knausgaard sriracha
% banh mi, cold-pressed retro whatever ethical man braid asymmetrical
% fingerstache narwhal. Intelligentsia wolf photo booth, tumblr pinterest quinoa
% leggings four loko poutine. DIY tattooed drinking vinegar, wolf retro actually
% aesthetic austin keffiyeh marfa beard. Marfa trust fund salvia sartorial
% letterpress, keffiyeh plaid butcher. Swag try-hard dreamcatcher direct trade,
% tacos pickled fanny pack literally meh pinterest slow-carb. Meditation
% microdosing distillery 8-bit humblebrag migas.
'''
expect(autoflow.reflow(text, wrapColumn: 80)).toEqual res
it "properly reflows roxygen comments ", ->
text =
'''
#' Beard pinterest actually brunch brooklyn jean shorts YOLO. Knausgaard sriracha banh mi, cold-pressed retro whatever ethical man braid asymmetrical fingerstache narwhal. Intelligentsia wolf photo booth, tumblr pinterest quinoa leggings four loko poutine. DIY tattooed drinking vinegar, wolf retro actually aesthetic austin keffiyeh marfa beard. Marfa trust fund salvia sartorial letterpress, keffiyeh plaid butcher. Swag try-hard dreamcatcher direct trade, tacos pickled fanny pack literally meh pinterest slow-carb. Meditation microdosing distillery 8-bit humblebrag migas.
'''
res =
'''
#' Beard pinterest actually brunch brooklyn jean shorts YOLO. Knausgaard
#' sriracha banh mi, cold-pressed retro whatever ethical man braid asymmetrical
#' fingerstache narwhal. Intelligentsia wolf photo booth, tumblr pinterest
#' quinoa leggings four loko poutine. DIY tattooed drinking vinegar, wolf retro
#' actually aesthetic austin keffiyeh marfa beard. Marfa trust fund salvia
#' sartorial letterpress, keffiyeh plaid butcher. Swag try-hard dreamcatcher
#' direct trade, tacos pickled fanny pack literally meh pinterest slow-carb.
#' Meditation microdosing distillery 8-bit humblebrag migas.
'''
expect(autoflow.reflow(text, wrapColumn: 80)).toEqual res
it "properly reflows -- comments ", ->
text =
'''
-- Beard pinterest actually brunch brooklyn jean shorts YOLO. Knausgaard sriracha banh mi, cold-pressed retro whatever ethical man braid asymmetrical fingerstache narwhal. Intelligentsia wolf photo booth, tumblr pinterest quinoa leggings four loko poutine. DIY tattooed drinking vinegar, wolf retro actually aesthetic austin keffiyeh marfa beard. Marfa trust fund salvia sartorial letterpress, keffiyeh plaid butcher. Swag try-hard dreamcatcher direct trade, tacos pickled fanny pack literally meh pinterest slow-carb. Meditation microdosing distillery 8-bit humblebrag migas.
'''
res =
'''
-- Beard pinterest actually brunch brooklyn jean shorts YOLO. Knausgaard
-- sriracha banh mi, cold-pressed retro whatever ethical man braid asymmetrical
-- fingerstache narwhal. Intelligentsia wolf photo booth, tumblr pinterest
-- quinoa leggings four loko poutine. DIY tattooed drinking vinegar, wolf retro
-- actually aesthetic austin keffiyeh marfa beard. Marfa trust fund salvia
-- sartorial letterpress, keffiyeh plaid butcher. Swag try-hard dreamcatcher
-- direct trade, tacos pickled fanny pack literally meh pinterest slow-carb.
-- Meditation microdosing distillery 8-bit humblebrag migas.
'''
expect(autoflow.reflow(text, wrapColumn: 80)).toEqual res
it "properly reflows ||| comments ", ->
text =
'''
||| Beard pinterest actually brunch brooklyn jean shorts YOLO. Knausgaard sriracha banh mi, cold-pressed retro whatever ethical man braid asymmetrical fingerstache narwhal. Intelligentsia wolf photo booth, tumblr pinterest quinoa leggings four loko poutine. DIY tattooed drinking vinegar, wolf retro actually aesthetic austin keffiyeh marfa beard. Marfa trust fund salvia sartorial letterpress, keffiyeh plaid butcher. Swag try-hard dreamcatcher direct trade, tacos pickled fanny pack literally meh pinterest slow-carb. Meditation microdosing distillery 8-bit humblebrag migas.
'''
res =
'''
||| Beard pinterest actually brunch brooklyn jean shorts YOLO. Knausgaard
||| sriracha banh mi, cold-pressed retro whatever ethical man braid asymmetrical
||| fingerstache narwhal. Intelligentsia wolf photo booth, tumblr pinterest
||| quinoa leggings four loko poutine. DIY tattooed drinking vinegar, wolf retro
||| actually aesthetic austin keffiyeh marfa beard. Marfa trust fund salvia
||| sartorial letterpress, keffiyeh plaid butcher. Swag try-hard dreamcatcher
||| direct trade, tacos pickled fanny pack literally meh pinterest slow-carb.
||| Meditation microdosing distillery 8-bit humblebrag migas.
'''
expect(autoflow.reflow(text, wrapColumn: 80)).toEqual res
it 'properly reflows ;; comments ', ->
text =
'''
;; Beard pinterest actually brunch brooklyn jean shorts YOLO. Knausgaard sriracha banh mi, cold-pressed retro whatever ethical man braid asymmetrical fingerstache narwhal. Intelligentsia wolf photo booth, tumblr pinterest quinoa leggings four loko poutine. DIY tattooed drinking vinegar, wolf retro actually aesthetic austin keffiyeh marfa beard. Marfa trust fund salvia sartorial letterpress, keffiyeh plaid butcher. Swag try-hard dreamcatcher direct trade, tacos pickled fanny pack literally meh pinterest slow-carb. Meditation microdosing distillery 8-bit humblebrag migas.
'''
res =
'''
;; Beard pinterest actually brunch brooklyn jean shorts YOLO. Knausgaard
;; sriracha banh mi, cold-pressed retro whatever ethical man braid asymmetrical
;; fingerstache narwhal. Intelligentsia wolf photo booth, tumblr pinterest
;; quinoa leggings four loko poutine. DIY tattooed drinking vinegar, wolf retro
;; actually aesthetic austin keffiyeh marfa beard. Marfa trust fund salvia
;; sartorial letterpress, keffiyeh plaid butcher. Swag try-hard dreamcatcher
;; direct trade, tacos pickled fanny pack literally meh pinterest slow-carb.
;; Meditation microdosing distillery 8-bit humblebrag migas.
'''
expect(autoflow.reflow(text, wrapColumn: 80)).toEqual res
it 'does not treat lines starting with a single semicolon as ;; comments', ->
text =
'''
;! Beard pinterest actually brunch brooklyn jean shorts YOLO. Knausgaard sriracha banh mi, cold-pressed retro whatever ethical man braid asymmetrical fingerstache narwhal. Intelligentsia wolf photo booth, tumblr pinterest quinoa leggings four loko poutine. DIY tattooed drinking vinegar, wolf retro actually aesthetic austin keffiyeh marfa beard. Marfa trust fund salvia sartorial letterpress, keffiyeh plaid butcher. Swag try-hard dreamcatcher direct trade, tacos pickled fanny pack literally meh pinterest slow-carb. Meditation microdosing distillery 8-bit humblebrag migas.
'''
res =
'''
;! Beard pinterest actually brunch brooklyn jean shorts YOLO. Knausgaard
sriracha banh mi, cold-pressed retro whatever ethical man braid asymmetrical
fingerstache narwhal. Intelligentsia wolf photo booth, tumblr pinterest quinoa
leggings four loko poutine. DIY tattooed drinking vinegar, wolf retro actually
aesthetic austin keffiyeh marfa beard. Marfa trust fund salvia sartorial
letterpress, keffiyeh plaid butcher. Swag try-hard dreamcatcher direct trade,
tacos pickled fanny pack literally meh pinterest slow-carb. Meditation
microdosing distillery 8-bit humblebrag migas.
'''
expect(autoflow.reflow(text, wrapColumn: 80)).toEqual res
it 'properly reflows > ascii email inclusions ', ->
text =
'''
> Beard pinterest actually brunch brooklyn jean shorts YOLO. Knausgaard sriracha banh mi, cold-pressed retro whatever ethical man braid asymmetrical fingerstache narwhal. Intelligentsia wolf photo booth, tumblr pinterest quinoa leggings four loko poutine. DIY tattooed drinking vinegar, wolf retro actually aesthetic austin keffiyeh marfa beard. Marfa trust fund salvia sartorial letterpress, keffiyeh plaid butcher. Swag try-hard dreamcatcher direct trade, tacos pickled fanny pack literally meh pinterest slow-carb. Meditation microdosing distillery 8-bit humblebrag migas.
'''
res =
'''
> Beard pinterest actually brunch brooklyn jean shorts YOLO. Knausgaard sriracha
> banh mi, cold-pressed retro whatever ethical man braid asymmetrical
> fingerstache narwhal. Intelligentsia wolf photo booth, tumblr pinterest quinoa
> leggings four loko poutine. DIY tattooed drinking vinegar, wolf retro actually
> aesthetic austin keffiyeh marfa beard. Marfa trust fund salvia sartorial
> letterpress, keffiyeh plaid butcher. Swag try-hard dreamcatcher direct trade,
> tacos pickled fanny pack literally meh pinterest slow-carb. Meditation
> microdosing distillery 8-bit humblebrag migas.
'''
expect(autoflow.reflow(text, wrapColumn: 80)).toEqual res
it "doesn't allow special characters to surpass wrapColumn", ->
test =
'''
Imagine that I'm writing some LaTeX code. I start a comment, but change my mind. %
Now I'm just kind of trucking along, doing some math and stuff. For instance, $3 + 4 = 7$. But maybe I'm getting really crazy and I use subtraction. It's kind of an obscure technique, but often it goes a bit like this: let $x = 2 + 2$, so $x - 1 = 3$ (quick maths).
That's OK I guess, but now look at this cool thing called set theory: $\\{n + 42 : n \\in \\mathbb{N}\\}$. Wow. Neat. But we all know why we're really here. If you peer deep down into your heart, and you stare into the depths of yourself: is $P = NP$? Beware, though; many have tried and failed to answer this question. It is by no means for the faint of heart.
'''
res =
'''
Imagine that I'm writing some LaTeX code. I start a comment, but change my mind.
%
Now I'm just kind of trucking along, doing some math and stuff. For instance, $3
+ 4 = 7$. But maybe I'm getting really crazy and I use subtraction. It's kind of
an obscure technique, but often it goes a bit like this: let $x = 2 + 2$, so $x
- 1 = 3$ (quick maths).
That's OK I guess, but now look at this cool thing called set theory: $\\{n + 42
: n \\in \\mathbb{N}\\}$. Wow. Neat. But we all know why we're really here. If you
peer deep down into your heart, and you stare into the depths of yourself: is $P
= NP$? Beware, though; many have tried and failed to answer this question. It is
by no means for the faint of heart.
'''
expect(autoflow.reflow(test, wrapColumn: 80)).toEqual res
describe 'LaTeX', ->
it 'properly reflows text around LaTeX tags', ->
text =
'''
\\begin{verbatim}
Lorem ipsum dolor sit amet, nisl odio amet, et tempor netus neque at at blandit, vel vestibulum libero dolor, semper lobortis ligula praesent. Eget condimentum integer, porta sagittis nam, fusce vitae a vitae augue. Nec semper quis sed ut, est porttitor praesent. Nisl velit quam dolore velit quam, elementum neque pellentesque pulvinar et vestibulum.
\\end{verbatim}
'''
res =
'''
\\begin{verbatim}
Lorem ipsum dolor sit amet, nisl odio amet, et tempor netus neque at at
blandit, vel vestibulum libero dolor, semper lobortis ligula praesent. Eget
condimentum integer, porta sagittis nam, fusce vitae a vitae augue. Nec
semper quis sed ut, est porttitor praesent. Nisl velit quam dolore velit
quam, elementum neque pellentesque pulvinar et vestibulum.
\\end{verbatim}
'''
expect(autoflow.reflow(text, wrapColumn: 80)).toEqual res
it 'properly reflows text inside LaTeX tags', ->
text =
'''
\\item{
Lorem ipsum dolor sit amet, nisl odio amet, et tempor netus neque at at blandit, vel vestibulum libero dolor, semper lobortis ligula praesent. Eget condimentum integer, porta sagittis nam, fusce vitae a vitae augue. Nec semper quis sed ut, est porttitor praesent. Nisl velit quam dolore velit quam, elementum neque pellentesque pulvinar et vestibulum.
}
'''
res =
'''
\\item{
Lorem ipsum dolor sit amet, nisl odio amet, et tempor netus neque at at
blandit, vel vestibulum libero dolor, semper lobortis ligula praesent. Eget
condimentum integer, porta sagittis nam, fusce vitae a vitae augue. Nec
semper quis sed ut, est porttitor praesent. Nisl velit quam dolore velit
quam, elementum neque pellentesque pulvinar et vestibulum.
}
'''
expect(autoflow.reflow(text, wrapColumn: 80)).toEqual res
it 'properly reflows text inside nested LaTeX tags', ->
text =
'''
\\begin{enumerate}[label=(\\alph*)]
\\item{
Lorem ipsum dolor sit amet, nisl odio amet, et tempor netus neque at at blandit, vel vestibulum libero dolor, semper lobortis ligula praesent. Eget condimentum integer, porta sagittis nam, fusce vitae a vitae augue. Nec semper quis sed ut, est porttitor praesent. Nisl velit quam dolore velit quam, elementum neque pellentesque pulvinar et vestibulum.
}
\\end{enumerate}
'''
res =
'''
\\begin{enumerate}[label=(\\alph*)]
\\item{
Lorem ipsum dolor sit amet, nisl odio amet, et tempor netus neque at at
blandit, vel vestibulum libero dolor, semper lobortis ligula praesent.
Eget condimentum integer, porta sagittis nam, fusce vitae a vitae augue.
Nec semper quis sed ut, est porttitor praesent. Nisl velit quam dolore
velit quam, elementum neque pellentesque pulvinar et vestibulum.
}
\\end{enumerate}
'''
expect(autoflow.reflow(text, wrapColumn: 80)).toEqual res
it 'does not attempt to reflow a selection that contains only LaTeX tags and nothing else', ->
text =
'''
\\begin{enumerate}
\\end{enumerate}
'''
expect(autoflow.reflow(text, wrapColumn: 5)).toEqual text

View File

@ -0,0 +1,683 @@
describe("Autoflow package", () => {
let [autoflow, editor, editorElement] = [];
const tabLength = 4;
describe("autoflow:reflow-selection", () => {
beforeEach(() => {
let activationPromise = null;
waitsForPromise(() => atom.workspace.open());
runs(() => {
editor = atom.workspace.getActiveTextEditor();
editorElement = atom.views.getView(editor);
atom.config.set('editor.preferredLineLength', 30);
atom.config.set('editor.tabLength', tabLength);
activationPromise = atom.packages.activatePackage('autoflow');
atom.commands.dispatch(editorElement, 'autoflow:reflow-selection');
});
waitsForPromise(() => activationPromise);
});
it("uses the preferred line length based on the editor's scope", () => {
atom.config.set('editor.preferredLineLength', 4, {scopeSelector: '.text.plain.null-grammar'});
editor.setText("foo bar");
editor.selectAll();
atom.commands.dispatch(editorElement, 'autoflow:reflow-selection');
expect(editor.getText()).toBe(`\
foo
bar\
`
);
});
it("rearranges line breaks in the current selection to ensure lines are shorter than config.editor.preferredLineLength honoring tabLength", () => {
editor.setText("\t\tThis is the first paragraph and it is longer than the preferred line length so it should be reflowed.\n\n\t\tThis is a short paragraph.\n\n\t\tAnother long paragraph, it should also be reflowed with the use of this single command.");
editor.selectAll();
atom.commands.dispatch(editorElement, 'autoflow:reflow-selection');
const exedOut = editor.getText().replace(/\t/g, Array(tabLength+1).join('X'));
expect(exedOut).toBe("XXXXXXXXThis is the first\nXXXXXXXXparagraph and it is\nXXXXXXXXlonger than the\nXXXXXXXXpreferred line length\nXXXXXXXXso it should be\nXXXXXXXXreflowed.\n\nXXXXXXXXThis is a short\nXXXXXXXXparagraph.\n\nXXXXXXXXAnother long\nXXXXXXXXparagraph, it should\nXXXXXXXXalso be reflowed with\nXXXXXXXXthe use of this single\nXXXXXXXXcommand.");
});
it("rearranges line breaks in the current selection to ensure lines are shorter than config.editor.preferredLineLength", () => {
editor.setText(`\
This is the first paragraph and it is longer than the preferred line length so it should be reflowed.
This is a short paragraph.
Another long paragraph, it should also be reflowed with the use of this single command.\
`
);
editor.selectAll();
atom.commands.dispatch(editorElement, 'autoflow:reflow-selection');
expect(editor.getText()).toBe(`\
This is the first paragraph
and it is longer than the
preferred line length so it
should be reflowed.
This is a short paragraph.
Another long paragraph, it
should also be reflowed with
the use of this single
command.\
`
);
});
it("is not confused when the selection boundary is between paragraphs", () => {
editor.setText(`\
v--- SELECTION STARTS AT THE BEGINNING OF THE NEXT LINE (pos 1,0)
The preceding newline should not be considered part of this paragraph.
The newline at the end of this paragraph should be preserved and not
converted into a space.
^--- SELECTION ENDS AT THE BEGINNING OF THE PREVIOUS LINE (pos 6,0)\
`
);
editor.setCursorBufferPosition([1, 0]);
editor.selectToBufferPosition([6, 0]);
atom.commands.dispatch(editorElement, 'autoflow:reflow-selection');
expect(editor.getText()).toBe(`\
v--- SELECTION STARTS AT THE BEGINNING OF THE NEXT LINE (pos 1,0)
The preceding newline should
not be considered part of this
paragraph.
The newline at the end of this
paragraph should be preserved
and not converted into a
space.
^--- SELECTION ENDS AT THE BEGINNING OF THE PREVIOUS LINE (pos 6,0)\
`
);
});
it("reflows the current paragraph if nothing is selected", () => {
editor.setText(`\
This is a preceding paragraph, which shouldn't be modified by a reflow of the following paragraph.
The quick brown fox jumps over the lazy
dog. The preceding sentence contains every letter
in the entire English alphabet, which has absolutely no relevance
to this test.
This is a following paragraph, which shouldn't be modified by a reflow of the preciding paragraph.
\
`
);
editor.setCursorBufferPosition([3, 5]);
atom.commands.dispatch(editorElement, 'autoflow:reflow-selection');
expect(editor.getText()).toBe(`\
This is a preceding paragraph, which shouldn't be modified by a reflow of the following paragraph.
The quick brown fox jumps over
the lazy dog. The preceding
sentence contains every letter
in the entire English
alphabet, which has absolutely
no relevance to this test.
This is a following paragraph, which shouldn't be modified by a reflow of the preciding paragraph.
\
`
);
});
it("allows for single words that exceed the preferred wrap column length", () => {
editor.setText("this-is-a-super-long-word-that-shouldn't-break-autoflow and these are some smaller words");
editor.selectAll();
atom.commands.dispatch(editorElement, 'autoflow:reflow-selection');
expect(editor.getText()).toBe(`\
this-is-a-super-long-word-that-shouldn't-break-autoflow
and these are some smaller
words\
`
);
});
});
describe("reflowing text", () => {
beforeEach(() => autoflow = require("../lib/autoflow"));
it('respects current paragraphs', () => {
const text = `\
Lorem ipsum dolor sit amet, consectetur adipiscing elit. Phasellus gravida nibh id magna ullamcorper sagittis. Maecenas
et enim eu orci tincidunt adipiscing
aliquam ligula.
Lorem ipsum dolor sit amet, consectetur adipiscing elit.
Phasellus gravida
nibh id magna ullamcorper
tincidunt adipiscing lacinia a dui. Etiam quis erat dolor.
rutrum nisl fermentum rhoncus. Duis blandit ligula facilisis fermentum.\
`;
const res = `\
Lorem ipsum dolor sit amet, consectetur adipiscing elit. Phasellus gravida nibh
id magna ullamcorper sagittis. Maecenas et enim eu orci tincidunt adipiscing
aliquam ligula.
Lorem ipsum dolor sit amet, consectetur adipiscing elit. Phasellus gravida nibh
id magna ullamcorper tincidunt adipiscing lacinia a dui. Etiam quis erat dolor.
rutrum nisl fermentum rhoncus. Duis blandit ligula facilisis fermentum.\
`;
expect(autoflow.reflow(text, {wrapColumn: 80})).toEqual(res);
});
it('respects indentation', () => {
const text = `\
Lorem ipsum dolor sit amet, consectetur adipiscing elit. Phasellus gravida nibh id magna ullamcorper sagittis. Maecenas
et enim eu orci tincidunt adipiscing
aliquam ligula.
Lorem ipsum dolor sit amet, consectetur adipiscing elit.
Phasellus gravida
nibh id magna ullamcorper
tincidunt adipiscing lacinia a dui. Etiam quis erat dolor.
rutrum nisl fermentum rhoncus. Duis blandit ligula facilisis fermentum\
`;
const res = `\
Lorem ipsum dolor sit amet, consectetur adipiscing elit. Phasellus gravida nibh
id magna ullamcorper sagittis. Maecenas et enim eu orci tincidunt adipiscing
aliquam ligula.
Lorem ipsum dolor sit amet, consectetur adipiscing elit. Phasellus gravida
nibh id magna ullamcorper tincidunt adipiscing lacinia a dui. Etiam quis
erat dolor. rutrum nisl fermentum rhoncus. Duis blandit ligula facilisis
fermentum\
`;
expect(autoflow.reflow(text, {wrapColumn: 80})).toEqual(res);
});
it('respects prefixed text (comments!)', () => {
const text = `\
Lorem ipsum dolor sit amet, consectetur adipiscing elit. Phasellus gravida nibh id magna ullamcorper sagittis. Maecenas
et enim eu orci tincidunt adipiscing
aliquam ligula.
# Lorem ipsum dolor sit amet, consectetur adipiscing elit.
# Phasellus gravida
# nibh id magna ullamcorper
# tincidunt adipiscing lacinia a dui. Etiam quis erat dolor.
# rutrum nisl fermentum rhoncus. Duis blandit ligula facilisis fermentum\
`;
const res = `\
Lorem ipsum dolor sit amet, consectetur adipiscing elit. Phasellus gravida nibh
id magna ullamcorper sagittis. Maecenas et enim eu orci tincidunt adipiscing
aliquam ligula.
# Lorem ipsum dolor sit amet, consectetur adipiscing elit. Phasellus gravida
# nibh id magna ullamcorper tincidunt adipiscing lacinia a dui. Etiam quis
# erat dolor. rutrum nisl fermentum rhoncus. Duis blandit ligula facilisis
# fermentum\
`;
expect(autoflow.reflow(text, {wrapColumn: 80})).toEqual(res);
});
it('respects multiple prefixes (js/c comments)', () => {
const text = `\
// Lorem ipsum dolor sit amet, consectetur adipiscing elit. Phasellus gravida
et enim eu orci tincidunt adipiscing
aliquam ligula.\
`;
const res = `\
// Lorem ipsum dolor sit amet, consectetur adipiscing elit. Phasellus gravida et
// enim eu orci tincidunt adipiscing aliquam ligula.\
`;
expect(autoflow.reflow(text, {wrapColumn: 80})).toEqual(res);
});
it('properly handles * prefix', () => {
const text = `\
* Lorem ipsum dolor sit amet, consectetur adipiscing elit. Phasellus gravida
et enim eu orci tincidunt adipiscing
aliquam ligula.
* soidjfiojsoidj foi\
`;
const res = `\
* Lorem ipsum dolor sit amet, consectetur adipiscing elit. Phasellus gravida et
* enim eu orci tincidunt adipiscing aliquam ligula.
* soidjfiojsoidj foi\
`;
expect(autoflow.reflow(text, {wrapColumn: 80})).toEqual(res);
});
it("does not throw invalid regular expression errors (regression)", () => {
const text = `\
*** Lorem ipsum dolor sit amet\
`;
expect(autoflow.reflow(text, {wrapColumn: 80})).toEqual(text);
});
it('handles different initial indentation', () => {
const text = `\
Magna ea magna fugiat nisi minim in id duis. Culpa sit sint consequat quis elit magna pariatur incididunt
proident laborum deserunt est aliqua reprehenderit. Occaecat et ex non do Lorem irure adipisicing mollit excepteur
eu ullamco consectetur. Ex ex Lorem duis labore quis ad exercitation elit dolor non adipisicing. Pariatur commodo ullamco
culpa dolor sunt enim. Ullamco dolore do ea nulla ut commodo minim consequat cillum ad velit quis.\
`;
const res = `\
Magna ea magna fugiat nisi minim in id duis. Culpa sit sint consequat quis elit
magna pariatur incididunt proident laborum deserunt est aliqua reprehenderit.
Occaecat et ex non do Lorem irure adipisicing mollit excepteur eu ullamco
consectetur. Ex ex Lorem duis labore quis ad exercitation elit dolor non
adipisicing. Pariatur commodo ullamco culpa dolor sunt enim. Ullamco dolore do
ea nulla ut commodo minim consequat cillum ad velit quis.\
`;
expect(autoflow.reflow(text, {wrapColumn: 80})).toEqual(res);
});
it('properly handles CRLF', () => {
const text = "This is the first line and it is longer than the preferred line length so it should be reflowed.\r\nThis is a short line which should\r\nbe reflowed with the following line.\rAnother long line, it should also be reflowed with everything above it when it is all reflowed.";
const res =
`\
This is the first line and it is longer than the preferred line length so it
should be reflowed. This is a short line which should be reflowed with the
following line. Another long line, it should also be reflowed with everything
above it when it is all reflowed.\
`;
expect(autoflow.reflow(text, {wrapColumn: 80})).toEqual(res);
});
it('handles cyrillic text', () => {
const text = `\
В начале июля, в чрезвычайно жаркое время, под вечер, один молодой человек вышел из своей каморки, которую нанимал от жильцов в С-м переулке, на улицу и медленно, как бы в нерешимости, отправился к К-ну мосту.\
`;
const res = `\
В начале июля, в чрезвычайно жаркое время, под вечер, один молодой человек вышел
из своей каморки, которую нанимал от жильцов в С-м переулке, на улицу и
медленно, как бы в нерешимости, отправился к К-ну мосту.\
`;
expect(autoflow.reflow(text, {wrapColumn: 80})).toEqual(res);
});
it('handles `yo` character properly', () => {
// Because there're known problems with this character in major regex engines
const text = 'Ё Ё Ё';
const res = `\
Ё
Ё
Ё\
`;
expect(autoflow.reflow(text, {wrapColumn: 2})).toEqual(res);
});
it('properly reflows // comments ', () => {
const text =
`\
// Beard pinterest actually brunch brooklyn jean shorts YOLO. Knausgaard sriracha banh mi, cold-pressed retro whatever ethical man braid asymmetrical fingerstache narwhal. Intelligentsia wolf photo booth, tumblr pinterest quinoa leggings four loko poutine. DIY tattooed drinking vinegar, wolf retro actually aesthetic austin keffiyeh marfa beard. Marfa trust fund salvia sartorial letterpress, keffiyeh plaid butcher. Swag try-hard dreamcatcher direct trade, tacos pickled fanny pack literally meh pinterest slow-carb. Meditation microdosing distillery 8-bit humblebrag migas.\
`;
const res =
`\
// Beard pinterest actually brunch brooklyn jean shorts YOLO. Knausgaard
// sriracha banh mi, cold-pressed retro whatever ethical man braid asymmetrical
// fingerstache narwhal. Intelligentsia wolf photo booth, tumblr pinterest
// quinoa leggings four loko poutine. DIY tattooed drinking vinegar, wolf retro
// actually aesthetic austin keffiyeh marfa beard. Marfa trust fund salvia
// sartorial letterpress, keffiyeh plaid butcher. Swag try-hard dreamcatcher
// direct trade, tacos pickled fanny pack literally meh pinterest slow-carb.
// Meditation microdosing distillery 8-bit humblebrag migas.\
`;
expect(autoflow.reflow(text, {wrapColumn: 80})).toEqual(res);
});
it('properly reflows /* comments ', () => {
const text =
`\
/* Beard pinterest actually brunch brooklyn jean shorts YOLO. Knausgaard sriracha banh mi, cold-pressed retro whatever ethical man braid asymmetrical fingerstache narwhal. Intelligentsia wolf photo booth, tumblr pinterest quinoa leggings four loko poutine. DIY tattooed drinking vinegar, wolf retro actually aesthetic austin keffiyeh marfa beard. Marfa trust fund salvia sartorial letterpress, keffiyeh plaid butcher. Swag try-hard dreamcatcher direct trade, tacos pickled fanny pack literally meh pinterest slow-carb. Meditation microdosing distillery 8-bit humblebrag migas. */\
`;
const res =
`\
/* Beard pinterest actually brunch brooklyn jean shorts YOLO. Knausgaard
sriracha banh mi, cold-pressed retro whatever ethical man braid asymmetrical
fingerstache narwhal. Intelligentsia wolf photo booth, tumblr pinterest
quinoa leggings four loko poutine. DIY tattooed drinking vinegar, wolf retro
actually aesthetic austin keffiyeh marfa beard. Marfa trust fund salvia
sartorial letterpress, keffiyeh plaid butcher. Swag try-hard dreamcatcher
direct trade, tacos pickled fanny pack literally meh pinterest slow-carb.
Meditation microdosing distillery 8-bit humblebrag migas. */\
`;
expect(autoflow.reflow(text, {wrapColumn: 80})).toEqual(res);
});
it('properly reflows pound comments ', () => {
const text =
`\
# Beard pinterest actually brunch brooklyn jean shorts YOLO. Knausgaard sriracha banh mi, cold-pressed retro whatever ethical man braid asymmetrical fingerstache narwhal. Intelligentsia wolf photo booth, tumblr pinterest quinoa leggings four loko poutine. DIY tattooed drinking vinegar, wolf retro actually aesthetic austin keffiyeh marfa beard. Marfa trust fund salvia sartorial letterpress, keffiyeh plaid butcher. Swag try-hard dreamcatcher direct trade, tacos pickled fanny pack literally meh pinterest slow-carb. Meditation microdosing distillery 8-bit humblebrag migas.\
`;
const res =
`\
# Beard pinterest actually brunch brooklyn jean shorts YOLO. Knausgaard sriracha
# banh mi, cold-pressed retro whatever ethical man braid asymmetrical
# fingerstache narwhal. Intelligentsia wolf photo booth, tumblr pinterest quinoa
# leggings four loko poutine. DIY tattooed drinking vinegar, wolf retro actually
# aesthetic austin keffiyeh marfa beard. Marfa trust fund salvia sartorial
# letterpress, keffiyeh plaid butcher. Swag try-hard dreamcatcher direct trade,
# tacos pickled fanny pack literally meh pinterest slow-carb. Meditation
# microdosing distillery 8-bit humblebrag migas.\
`;
expect(autoflow.reflow(text, {wrapColumn: 80})).toEqual(res);
});
it('properly reflows - list items ', () => {
const text =
`\
- Beard pinterest actually brunch brooklyn jean shorts YOLO. Knausgaard sriracha banh mi, cold-pressed retro whatever ethical man braid asymmetrical fingerstache narwhal. Intelligentsia wolf photo booth, tumblr pinterest quinoa leggings four loko poutine. DIY tattooed drinking vinegar, wolf retro actually aesthetic austin keffiyeh marfa beard. Marfa trust fund salvia sartorial letterpress, keffiyeh plaid butcher. Swag try-hard dreamcatcher direct trade, tacos pickled fanny pack literally meh pinterest slow-carb. Meditation microdosing distillery 8-bit humblebrag migas.\
`;
const res =
`\
- Beard pinterest actually brunch brooklyn jean shorts YOLO. Knausgaard sriracha
banh mi, cold-pressed retro whatever ethical man braid asymmetrical
fingerstache narwhal. Intelligentsia wolf photo booth, tumblr pinterest quinoa
leggings four loko poutine. DIY tattooed drinking vinegar, wolf retro actually
aesthetic austin keffiyeh marfa beard. Marfa trust fund salvia sartorial
letterpress, keffiyeh plaid butcher. Swag try-hard dreamcatcher direct trade,
tacos pickled fanny pack literally meh pinterest slow-carb. Meditation
microdosing distillery 8-bit humblebrag migas.\
`;
expect(autoflow.reflow(text, {wrapColumn: 80})).toEqual(res);
});
it('properly reflows % comments ', () => {
const text =
`\
% Beard pinterest actually brunch brooklyn jean shorts YOLO. Knausgaard sriracha banh mi, cold-pressed retro whatever ethical man braid asymmetrical fingerstache narwhal. Intelligentsia wolf photo booth, tumblr pinterest quinoa leggings four loko poutine. DIY tattooed drinking vinegar, wolf retro actually aesthetic austin keffiyeh marfa beard. Marfa trust fund salvia sartorial letterpress, keffiyeh plaid butcher. Swag try-hard dreamcatcher direct trade, tacos pickled fanny pack literally meh pinterest slow-carb. Meditation microdosing distillery 8-bit humblebrag migas.\
`;
const res =
`\
% Beard pinterest actually brunch brooklyn jean shorts YOLO. Knausgaard sriracha
% banh mi, cold-pressed retro whatever ethical man braid asymmetrical
% fingerstache narwhal. Intelligentsia wolf photo booth, tumblr pinterest quinoa
% leggings four loko poutine. DIY tattooed drinking vinegar, wolf retro actually
% aesthetic austin keffiyeh marfa beard. Marfa trust fund salvia sartorial
% letterpress, keffiyeh plaid butcher. Swag try-hard dreamcatcher direct trade,
% tacos pickled fanny pack literally meh pinterest slow-carb. Meditation
% microdosing distillery 8-bit humblebrag migas.\
`;
expect(autoflow.reflow(text, {wrapColumn: 80})).toEqual(res);
});
it("properly reflows roxygen comments ", () => {
const text =
`\
#' Beard pinterest actually brunch brooklyn jean shorts YOLO. Knausgaard sriracha banh mi, cold-pressed retro whatever ethical man braid asymmetrical fingerstache narwhal. Intelligentsia wolf photo booth, tumblr pinterest quinoa leggings four loko poutine. DIY tattooed drinking vinegar, wolf retro actually aesthetic austin keffiyeh marfa beard. Marfa trust fund salvia sartorial letterpress, keffiyeh plaid butcher. Swag try-hard dreamcatcher direct trade, tacos pickled fanny pack literally meh pinterest slow-carb. Meditation microdosing distillery 8-bit humblebrag migas.\
`;
const res =
`\
#' Beard pinterest actually brunch brooklyn jean shorts YOLO. Knausgaard
#' sriracha banh mi, cold-pressed retro whatever ethical man braid asymmetrical
#' fingerstache narwhal. Intelligentsia wolf photo booth, tumblr pinterest
#' quinoa leggings four loko poutine. DIY tattooed drinking vinegar, wolf retro
#' actually aesthetic austin keffiyeh marfa beard. Marfa trust fund salvia
#' sartorial letterpress, keffiyeh plaid butcher. Swag try-hard dreamcatcher
#' direct trade, tacos pickled fanny pack literally meh pinterest slow-carb.
#' Meditation microdosing distillery 8-bit humblebrag migas.\
`;
expect(autoflow.reflow(text, {wrapColumn: 80})).toEqual(res);
});
it("properly reflows -- comments ", () => {
const text =
`\
-- Beard pinterest actually brunch brooklyn jean shorts YOLO. Knausgaard sriracha banh mi, cold-pressed retro whatever ethical man braid asymmetrical fingerstache narwhal. Intelligentsia wolf photo booth, tumblr pinterest quinoa leggings four loko poutine. DIY tattooed drinking vinegar, wolf retro actually aesthetic austin keffiyeh marfa beard. Marfa trust fund salvia sartorial letterpress, keffiyeh plaid butcher. Swag try-hard dreamcatcher direct trade, tacos pickled fanny pack literally meh pinterest slow-carb. Meditation microdosing distillery 8-bit humblebrag migas.\
`;
const res =
`\
-- Beard pinterest actually brunch brooklyn jean shorts YOLO. Knausgaard
-- sriracha banh mi, cold-pressed retro whatever ethical man braid asymmetrical
-- fingerstache narwhal. Intelligentsia wolf photo booth, tumblr pinterest
-- quinoa leggings four loko poutine. DIY tattooed drinking vinegar, wolf retro
-- actually aesthetic austin keffiyeh marfa beard. Marfa trust fund salvia
-- sartorial letterpress, keffiyeh plaid butcher. Swag try-hard dreamcatcher
-- direct trade, tacos pickled fanny pack literally meh pinterest slow-carb.
-- Meditation microdosing distillery 8-bit humblebrag migas.\
`;
expect(autoflow.reflow(text, {wrapColumn: 80})).toEqual(res);
});
it("properly reflows ||| comments ", () => {
const text =
`\
||| Beard pinterest actually brunch brooklyn jean shorts YOLO. Knausgaard sriracha banh mi, cold-pressed retro whatever ethical man braid asymmetrical fingerstache narwhal. Intelligentsia wolf photo booth, tumblr pinterest quinoa leggings four loko poutine. DIY tattooed drinking vinegar, wolf retro actually aesthetic austin keffiyeh marfa beard. Marfa trust fund salvia sartorial letterpress, keffiyeh plaid butcher. Swag try-hard dreamcatcher direct trade, tacos pickled fanny pack literally meh pinterest slow-carb. Meditation microdosing distillery 8-bit humblebrag migas.\
`;
const res =
`\
||| Beard pinterest actually brunch brooklyn jean shorts YOLO. Knausgaard
||| sriracha banh mi, cold-pressed retro whatever ethical man braid asymmetrical
||| fingerstache narwhal. Intelligentsia wolf photo booth, tumblr pinterest
||| quinoa leggings four loko poutine. DIY tattooed drinking vinegar, wolf retro
||| actually aesthetic austin keffiyeh marfa beard. Marfa trust fund salvia
||| sartorial letterpress, keffiyeh plaid butcher. Swag try-hard dreamcatcher
||| direct trade, tacos pickled fanny pack literally meh pinterest slow-carb.
||| Meditation microdosing distillery 8-bit humblebrag migas.\
`;
expect(autoflow.reflow(text, {wrapColumn: 80})).toEqual(res);
});
it('properly reflows ;; comments ', () => {
const text =
`\
;; Beard pinterest actually brunch brooklyn jean shorts YOLO. Knausgaard sriracha banh mi, cold-pressed retro whatever ethical man braid asymmetrical fingerstache narwhal. Intelligentsia wolf photo booth, tumblr pinterest quinoa leggings four loko poutine. DIY tattooed drinking vinegar, wolf retro actually aesthetic austin keffiyeh marfa beard. Marfa trust fund salvia sartorial letterpress, keffiyeh plaid butcher. Swag try-hard dreamcatcher direct trade, tacos pickled fanny pack literally meh pinterest slow-carb. Meditation microdosing distillery 8-bit humblebrag migas.\
`;
const res =
`\
;; Beard pinterest actually brunch brooklyn jean shorts YOLO. Knausgaard
;; sriracha banh mi, cold-pressed retro whatever ethical man braid asymmetrical
;; fingerstache narwhal. Intelligentsia wolf photo booth, tumblr pinterest
;; quinoa leggings four loko poutine. DIY tattooed drinking vinegar, wolf retro
;; actually aesthetic austin keffiyeh marfa beard. Marfa trust fund salvia
;; sartorial letterpress, keffiyeh plaid butcher. Swag try-hard dreamcatcher
;; direct trade, tacos pickled fanny pack literally meh pinterest slow-carb.
;; Meditation microdosing distillery 8-bit humblebrag migas.\
`;
expect(autoflow.reflow(text, {wrapColumn: 80})).toEqual(res);
});
it('does not treat lines starting with a single semicolon as ;; comments', () => {
const text =
`\
;! Beard pinterest actually brunch brooklyn jean shorts YOLO. Knausgaard sriracha banh mi, cold-pressed retro whatever ethical man braid asymmetrical fingerstache narwhal. Intelligentsia wolf photo booth, tumblr pinterest quinoa leggings four loko poutine. DIY tattooed drinking vinegar, wolf retro actually aesthetic austin keffiyeh marfa beard. Marfa trust fund salvia sartorial letterpress, keffiyeh plaid butcher. Swag try-hard dreamcatcher direct trade, tacos pickled fanny pack literally meh pinterest slow-carb. Meditation microdosing distillery 8-bit humblebrag migas.\
`;
const res =
`\
;! Beard pinterest actually brunch brooklyn jean shorts YOLO. Knausgaard
sriracha banh mi, cold-pressed retro whatever ethical man braid asymmetrical
fingerstache narwhal. Intelligentsia wolf photo booth, tumblr pinterest quinoa
leggings four loko poutine. DIY tattooed drinking vinegar, wolf retro actually
aesthetic austin keffiyeh marfa beard. Marfa trust fund salvia sartorial
letterpress, keffiyeh plaid butcher. Swag try-hard dreamcatcher direct trade,
tacos pickled fanny pack literally meh pinterest slow-carb. Meditation
microdosing distillery 8-bit humblebrag migas.\
`;
expect(autoflow.reflow(text, {wrapColumn: 80})).toEqual(res);
});
it('properly reflows > ascii email inclusions ', () => {
const text =
`\
> Beard pinterest actually brunch brooklyn jean shorts YOLO. Knausgaard sriracha banh mi, cold-pressed retro whatever ethical man braid asymmetrical fingerstache narwhal. Intelligentsia wolf photo booth, tumblr pinterest quinoa leggings four loko poutine. DIY tattooed drinking vinegar, wolf retro actually aesthetic austin keffiyeh marfa beard. Marfa trust fund salvia sartorial letterpress, keffiyeh plaid butcher. Swag try-hard dreamcatcher direct trade, tacos pickled fanny pack literally meh pinterest slow-carb. Meditation microdosing distillery 8-bit humblebrag migas.\
`;
const res =
`\
> Beard pinterest actually brunch brooklyn jean shorts YOLO. Knausgaard sriracha
> banh mi, cold-pressed retro whatever ethical man braid asymmetrical
> fingerstache narwhal. Intelligentsia wolf photo booth, tumblr pinterest quinoa
> leggings four loko poutine. DIY tattooed drinking vinegar, wolf retro actually
> aesthetic austin keffiyeh marfa beard. Marfa trust fund salvia sartorial
> letterpress, keffiyeh plaid butcher. Swag try-hard dreamcatcher direct trade,
> tacos pickled fanny pack literally meh pinterest slow-carb. Meditation
> microdosing distillery 8-bit humblebrag migas.\
`;
expect(autoflow.reflow(text, {wrapColumn: 80})).toEqual(res);
});
it("doesn't allow special characters to surpass wrapColumn", () => {
const test =
`\
Imagine that I'm writing some LaTeX code. I start a comment, but change my mind. %
Now I'm just kind of trucking along, doing some math and stuff. For instance, $3 + 4 = 7$. But maybe I'm getting really crazy and I use subtraction. It's kind of an obscure technique, but often it goes a bit like this: let $x = 2 + 2$, so $x - 1 = 3$ (quick maths).
That's OK I guess, but now look at this cool thing called set theory: $\\{n + 42 : n \\in \\mathbb{N}\\}$. Wow. Neat. But we all know why we're really here. If you peer deep down into your heart, and you stare into the depths of yourself: is $P = NP$? Beware, though; many have tried and failed to answer this question. It is by no means for the faint of heart.\
`;
const res =
`\
Imagine that I'm writing some LaTeX code. I start a comment, but change my mind.
%
Now I'm just kind of trucking along, doing some math and stuff. For instance, $3
+ 4 = 7$. But maybe I'm getting really crazy and I use subtraction. It's kind of
an obscure technique, but often it goes a bit like this: let $x = 2 + 2$, so $x
- 1 = 3$ (quick maths).
That's OK I guess, but now look at this cool thing called set theory: $\\{n + 42
: n \\in \\mathbb{N}\\}$. Wow. Neat. But we all know why we're really here. If you
peer deep down into your heart, and you stare into the depths of yourself: is $P
= NP$? Beware, though; many have tried and failed to answer this question. It is
by no means for the faint of heart.\
`;
expect(autoflow.reflow(test, {wrapColumn: 80})).toEqual(res);
});
describe('LaTeX', () => {
it('properly reflows text around LaTeX tags', () => {
const text =
`\
\\begin{verbatim}
Lorem ipsum dolor sit amet, nisl odio amet, et tempor netus neque at at blandit, vel vestibulum libero dolor, semper lobortis ligula praesent. Eget condimentum integer, porta sagittis nam, fusce vitae a vitae augue. Nec semper quis sed ut, est porttitor praesent. Nisl velit quam dolore velit quam, elementum neque pellentesque pulvinar et vestibulum.
\\end{verbatim}\
`;
const res =
`\
\\begin{verbatim}
Lorem ipsum dolor sit amet, nisl odio amet, et tempor netus neque at at
blandit, vel vestibulum libero dolor, semper lobortis ligula praesent. Eget
condimentum integer, porta sagittis nam, fusce vitae a vitae augue. Nec
semper quis sed ut, est porttitor praesent. Nisl velit quam dolore velit
quam, elementum neque pellentesque pulvinar et vestibulum.
\\end{verbatim}\
`;
expect(autoflow.reflow(text, {wrapColumn: 80})).toEqual(res);
});
it('properly reflows text inside LaTeX tags', () => {
const text =
`\
\\item{
Lorem ipsum dolor sit amet, nisl odio amet, et tempor netus neque at at blandit, vel vestibulum libero dolor, semper lobortis ligula praesent. Eget condimentum integer, porta sagittis nam, fusce vitae a vitae augue. Nec semper quis sed ut, est porttitor praesent. Nisl velit quam dolore velit quam, elementum neque pellentesque pulvinar et vestibulum.
}\
`;
const res =
`\
\\item{
Lorem ipsum dolor sit amet, nisl odio amet, et tempor netus neque at at
blandit, vel vestibulum libero dolor, semper lobortis ligula praesent. Eget
condimentum integer, porta sagittis nam, fusce vitae a vitae augue. Nec
semper quis sed ut, est porttitor praesent. Nisl velit quam dolore velit
quam, elementum neque pellentesque pulvinar et vestibulum.
}\
`;
expect(autoflow.reflow(text, {wrapColumn: 80})).toEqual(res);
});
it('properly reflows text inside nested LaTeX tags', () => {
const text =
`\
\\begin{enumerate}[label=(\\alph*)]
\\item{
Lorem ipsum dolor sit amet, nisl odio amet, et tempor netus neque at at blandit, vel vestibulum libero dolor, semper lobortis ligula praesent. Eget condimentum integer, porta sagittis nam, fusce vitae a vitae augue. Nec semper quis sed ut, est porttitor praesent. Nisl velit quam dolore velit quam, elementum neque pellentesque pulvinar et vestibulum.
}
\\end{enumerate}\
`;
const res =
`\
\\begin{enumerate}[label=(\\alph*)]
\\item{
Lorem ipsum dolor sit amet, nisl odio amet, et tempor netus neque at at
blandit, vel vestibulum libero dolor, semper lobortis ligula praesent.
Eget condimentum integer, porta sagittis nam, fusce vitae a vitae augue.
Nec semper quis sed ut, est porttitor praesent. Nisl velit quam dolore
velit quam, elementum neque pellentesque pulvinar et vestibulum.
}
\\end{enumerate}\
`;
expect(autoflow.reflow(text, {wrapColumn: 80})).toEqual(res);
});
it('does not attempt to reflow a selection that contains only LaTeX tags and nothing else', () => {
const text =
`\
\\begin{enumerate}
\\end{enumerate}\
`;
expect(autoflow.reflow(text, {wrapColumn: 5})).toEqual(text);
});
});
});
});

View File

@ -1,36 +0,0 @@
DeprecationCopView = require '../lib/deprecation-cop-view'
describe "DeprecationCop", ->
[activationPromise, workspaceElement] = []
beforeEach ->
workspaceElement = atom.views.getView(atom.workspace)
activationPromise = atom.packages.activatePackage('deprecation-cop')
expect(atom.workspace.getActivePane().getActiveItem()).not.toExist()
describe "when the deprecation-cop:view event is triggered", ->
it "displays the deprecation cop pane", ->
atom.commands.dispatch workspaceElement, 'deprecation-cop:view'
waitsForPromise ->
activationPromise
deprecationCopView = null
waitsFor ->
deprecationCopView = atom.workspace.getActivePane().getActiveItem()
runs ->
expect(deprecationCopView instanceof DeprecationCopView).toBeTruthy()
describe "deactivating the package", ->
it "removes the deprecation cop pane item", ->
atom.commands.dispatch workspaceElement, 'deprecation-cop:view'
waitsForPromise ->
activationPromise
waitsForPromise ->
Promise.resolve(atom.packages.deactivatePackage('deprecation-cop')) # Wrapped for Promise & non-Promise deactivate
runs ->
expect(atom.workspace.getActivePane().getActiveItem()).not.toExist()

View File

@ -0,0 +1,33 @@
const DeprecationCopView = require('../lib/deprecation-cop-view');
describe("DeprecationCop", () => {
let [activationPromise, workspaceElement] = [];
beforeEach(() => {
workspaceElement = atom.views.getView(atom.workspace);
activationPromise = atom.packages.activatePackage('deprecation-cop');
expect(atom.workspace.getActivePane().getActiveItem()).not.toExist();
});
describe("when the deprecation-cop:view event is triggered", () => it("displays the deprecation cop pane", () => {
atom.commands.dispatch(workspaceElement, 'deprecation-cop:view');
waitsForPromise(() => activationPromise);
let deprecationCopView = null;
waitsFor(() => deprecationCopView = atom.workspace.getActivePane().getActiveItem());
runs(() => expect(deprecationCopView instanceof DeprecationCopView).toBeTruthy());
}));
describe("deactivating the package", () => it("removes the deprecation cop pane item", () => {
atom.commands.dispatch(workspaceElement, 'deprecation-cop:view');
waitsForPromise(() => activationPromise);
waitsForPromise(() => Promise.resolve(atom.packages.deactivatePackage('deprecation-cop'))); // Wrapped for Promise & non-Promise deactivate
runs(() => expect(atom.workspace.getActivePane().getActiveItem()).not.toExist());
}));
});

View File

@ -1,72 +0,0 @@
path = require 'path'
Grim = require 'grim'
DeprecationCopView = require '../lib/deprecation-cop-view'
_ = require 'underscore-plus'
describe "DeprecationCopStatusBarView", ->
[deprecatedMethod, statusBarView, workspaceElement] = []
beforeEach ->
# jasmine.Clock.useMock() cannot mock _.debounce
# http://stackoverflow.com/questions/13707047/spec-for-async-functions-using-jasmine
spyOn(_, 'debounce').andCallFake (func) ->
-> func.apply(this, arguments)
jasmine.snapshotDeprecations()
workspaceElement = atom.views.getView(atom.workspace)
jasmine.attachToDOM(workspaceElement)
waitsForPromise -> atom.packages.activatePackage('status-bar')
waitsForPromise -> atom.packages.activatePackage('deprecation-cop')
waitsFor ->
statusBarView = workspaceElement.querySelector('.deprecation-cop-status')
afterEach ->
jasmine.restoreDeprecationsSnapshot()
it "adds the status bar view when activated", ->
expect(statusBarView).toExist()
expect(statusBarView.textContent).toBe '0 deprecations'
expect(statusBarView).not.toShow()
it "increments when there are deprecated methods", ->
deprecatedMethod = -> Grim.deprecate("This isn't used")
anotherDeprecatedMethod = -> Grim.deprecate("This either")
expect(statusBarView.style.display).toBe 'none'
expect(statusBarView.offsetHeight).toBe(0)
deprecatedMethod()
expect(statusBarView.textContent).toBe '1 deprecation'
expect(statusBarView.offsetHeight).toBeGreaterThan(0)
deprecatedMethod()
expect(statusBarView.textContent).toBe '2 deprecations'
expect(statusBarView.offsetHeight).toBeGreaterThan(0)
anotherDeprecatedMethod()
expect(statusBarView.textContent).toBe '3 deprecations'
expect(statusBarView.offsetHeight).toBeGreaterThan(0)
# TODO: Remove conditional when the new StyleManager deprecation APIs reach stable.
if atom.styles.getDeprecations?
it "increments when there are deprecated selectors", ->
atom.styles.addStyleSheet("""
atom-text-editor::shadow { color: red; }
""", sourcePath: 'file-1')
expect(statusBarView.textContent).toBe '1 deprecation'
expect(statusBarView).toBeVisible()
atom.styles.addStyleSheet("""
atom-text-editor::shadow { color: blue; }
""", sourcePath: 'file-2')
expect(statusBarView.textContent).toBe '2 deprecations'
expect(statusBarView).toBeVisible()
it 'opens deprecation cop tab when clicked', ->
expect(atom.workspace.getActivePane().getActiveItem()).not.toExist()
waitsFor (done) ->
atom.workspace.onDidOpen ({item}) ->
expect(item instanceof DeprecationCopView).toBe true
done()
statusBarView.click()

View File

@ -0,0 +1,79 @@
const path = require('path');
const Grim = require('grim');
const DeprecationCopView = require('../lib/deprecation-cop-view');
const _ = require('underscore-plus');
describe("DeprecationCopStatusBarView", () => {
let [deprecatedMethod, statusBarView, workspaceElement] = [];
beforeEach(() => {
// jasmine.Clock.useMock() cannot mock _.debounce
// http://stackoverflow.com/questions/13707047/spec-for-async-functions-using-jasmine
spyOn(_, 'debounce').andCallFake(func => (() => { return func.apply(this, arguments); }));
jasmine.snapshotDeprecations();
workspaceElement = atom.views.getView(atom.workspace);
jasmine.attachToDOM(workspaceElement);
waitsForPromise(() => atom.packages.activatePackage('status-bar'));
waitsForPromise(() => atom.packages.activatePackage('deprecation-cop'));
waitsFor(() => statusBarView = workspaceElement.querySelector('.deprecation-cop-status'));
});
afterEach(() => jasmine.restoreDeprecationsSnapshot());
it("adds the status bar view when activated", () => {
expect(statusBarView).toExist();
expect(statusBarView.textContent).toBe('0 deprecations');
expect(statusBarView).not.toShow();
});
it("increments when there are deprecated methods", () => {
deprecatedMethod = () => Grim.deprecate("This isn't used");
const anotherDeprecatedMethod = () => Grim.deprecate("This either");
expect(statusBarView.style.display).toBe('none');
expect(statusBarView.offsetHeight).toBe(0);
deprecatedMethod();
expect(statusBarView.textContent).toBe('1 deprecation');
expect(statusBarView.offsetHeight).toBeGreaterThan(0);
deprecatedMethod();
expect(statusBarView.textContent).toBe('2 deprecations');
expect(statusBarView.offsetHeight).toBeGreaterThan(0);
anotherDeprecatedMethod();
expect(statusBarView.textContent).toBe('3 deprecations');
expect(statusBarView.offsetHeight).toBeGreaterThan(0);
});
// TODO: Remove conditional when the new StyleManager deprecation APIs reach stable.
if (atom.styles.getDeprecations != null) {
it("increments when there are deprecated selectors", () => {
atom.styles.addStyleSheet(`\
atom-text-editor::shadow { color: red; }\
`, {sourcePath: 'file-1'});
expect(statusBarView.textContent).toBe('1 deprecation');
expect(statusBarView).toBeVisible();
atom.styles.addStyleSheet(`\
atom-text-editor::shadow { color: blue; }\
`, {sourcePath: 'file-2'});
expect(statusBarView.textContent).toBe('2 deprecations');
expect(statusBarView).toBeVisible();
});
}
it('opens deprecation cop tab when clicked', () => {
expect(atom.workspace.getActivePane().getActiveItem()).not.toExist();
waitsFor(function(done) {
atom.workspace.onDidOpen(function({item}) {
expect(item instanceof DeprecationCopView).toBe(true);
done();
});
statusBarView.click();
});
});
});

View File

@ -1,93 +0,0 @@
Grim = require 'grim'
path = require 'path'
_ = require 'underscore-plus'
etch = require 'etch'
describe "DeprecationCopView", ->
[deprecationCopView, workspaceElement] = []
beforeEach ->
spyOn(_, 'debounce').andCallFake (func) ->
-> func.apply(this, arguments)
workspaceElement = atom.views.getView(atom.workspace)
jasmine.attachToDOM(workspaceElement)
jasmine.snapshotDeprecations()
Grim.clearDeprecations()
deprecatedMethod = -> Grim.deprecate("A test deprecation. This isn't used")
deprecatedMethod()
spyOn(Grim, 'deprecate') # Don't fail tests if when using deprecated APIs in deprecation cop's activation
activationPromise = atom.packages.activatePackage('deprecation-cop')
atom.commands.dispatch workspaceElement, 'deprecation-cop:view'
waitsForPromise ->
activationPromise
waitsFor -> deprecationCopView = atom.workspace.getActivePane().getActiveItem()
runs ->
jasmine.unspy(Grim, 'deprecate')
afterEach ->
jasmine.restoreDeprecationsSnapshot()
it "displays deprecated methods", ->
expect(deprecationCopView.element.textContent).toMatch /Deprecated calls/
expect(deprecationCopView.element.textContent).toMatch /This isn't used/
# TODO: Remove conditional when the new StyleManager deprecation APIs reach stable.
if atom.styles.getDeprecations?
it "displays deprecated selectors", ->
atom.styles.addStyleSheet("atom-text-editor::shadow { color: red }", sourcePath: path.join('some-dir', 'packages', 'package-1', 'file-1.css'))
atom.styles.addStyleSheet("atom-text-editor::shadow { color: yellow }", context: 'atom-text-editor', sourcePath: path.join('some-dir', 'packages', 'package-1', 'file-2.css'))
atom.styles.addStyleSheet('atom-text-editor::shadow { color: blue }', sourcePath: path.join('another-dir', 'packages', 'package-2', 'file-3.css'))
atom.styles.addStyleSheet('atom-text-editor::shadow { color: gray }', sourcePath: path.join('another-dir', 'node_modules', 'package-3', 'file-4.css'))
promise = etch.getScheduler().getNextUpdatePromise()
waitsForPromise -> promise
runs ->
packageItems = deprecationCopView.element.querySelectorAll("ul.selectors > li")
expect(packageItems.length).toBe(3)
expect(packageItems[0].textContent).toMatch /package-1/
expect(packageItems[1].textContent).toMatch /package-2/
expect(packageItems[2].textContent).toMatch /Other/
packageDeprecationItems = packageItems[0].querySelectorAll("li.source-file")
expect(packageDeprecationItems.length).toBe(2)
expect(packageDeprecationItems[0].textContent).toMatch /atom-text-editor/
expect(packageDeprecationItems[0].querySelector("a").href).toMatch('some-dir/packages/package-1/file-1.css')
expect(packageDeprecationItems[1].textContent).toMatch /:host/
expect(packageDeprecationItems[1].querySelector("a").href).toMatch('some-dir/packages/package-1/file-2.css')
it 'skips stack entries which go through node_modules/ files when determining package name', ->
stack = [
{
"functionName": "function0"
"location": path.normalize "/Users/user/.atom/packages/package1/node_modules/atom-space-pen-viewslib/space-pen.js:55:66"
"fileName": path.normalize "/Users/user/.atom/packages/package1/node_modules/atom-space-pen-views/lib/space-pen.js"
}
{
"functionName": "function1"
"location": path.normalize "/Users/user/.atom/packages/package1/node_modules/atom-space-pen-viewslib/space-pen.js:15:16"
"fileName": path.normalize "/Users/user/.atom/packages/package1/node_modules/atom-space-pen-views/lib/space-pen.js"
}
{
"functionName": "function2"
"location": path.normalize "/Users/user/.atom/packages/package2/lib/module.js:13:14"
"fileName": path.normalize "/Users/user/.atom/packages/package2/lib/module.js"
}
]
packagePathsByPackageName = new Map([
['package1', path.normalize("/Users/user/.atom/packages/package1")],
['package2', path.normalize("/Users/user/.atom/packages/package2")]
])
spyOn(deprecationCopView, 'getPackagePathsByPackageName').andReturn(packagePathsByPackageName)
packageName = deprecationCopView.getPackageName(stack)
expect(packageName).toBe("package2")

View File

@ -0,0 +1,97 @@
const Grim = require('grim');
const path = require('path');
const _ = require('underscore-plus');
const etch = require('etch');
describe("DeprecationCopView", () => {
let [deprecationCopView, workspaceElement] = [];
beforeEach(() => {
spyOn(_, 'debounce').andCallFake(func => (() => { return func.apply(this, arguments); }));
workspaceElement = atom.views.getView(atom.workspace);
jasmine.attachToDOM(workspaceElement);
jasmine.snapshotDeprecations();
Grim.clearDeprecations();
const deprecatedMethod = () => Grim.deprecate("A test deprecation. This isn't used");
deprecatedMethod();
spyOn(Grim, 'deprecate'); // Don't fail tests if when using deprecated APIs in deprecation cop's activation
const activationPromise = atom.packages.activatePackage('deprecation-cop');
atom.commands.dispatch(workspaceElement, 'deprecation-cop:view');
waitsForPromise(() => activationPromise);
waitsFor(() => deprecationCopView = atom.workspace.getActivePane().getActiveItem());
runs(() => jasmine.unspy(Grim, 'deprecate'));
});
afterEach(() => jasmine.restoreDeprecationsSnapshot());
it("displays deprecated methods", () => {
expect(deprecationCopView.element.textContent).toMatch(/Deprecated calls/);
expect(deprecationCopView.element.textContent).toMatch(/This isn't used/);
});
// TODO: Remove conditional when the new StyleManager deprecation APIs reach stable.
if (atom.styles.getDeprecations != null) {
it("displays deprecated selectors", () => {
atom.styles.addStyleSheet("atom-text-editor::shadow { color: red }", {sourcePath: path.join('some-dir', 'packages', 'package-1', 'file-1.css')});
atom.styles.addStyleSheet("atom-text-editor::shadow { color: yellow }", {context: 'atom-text-editor', sourcePath: path.join('some-dir', 'packages', 'package-1', 'file-2.css')});
atom.styles.addStyleSheet('atom-text-editor::shadow { color: blue }', {sourcePath: path.join('another-dir', 'packages', 'package-2', 'file-3.css')});
atom.styles.addStyleSheet('atom-text-editor::shadow { color: gray }', {sourcePath: path.join('another-dir', 'node_modules', 'package-3', 'file-4.css')});
const promise = etch.getScheduler().getNextUpdatePromise();
waitsForPromise(() => promise);
runs(() => {
const packageItems = deprecationCopView.element.querySelectorAll("ul.selectors > li");
expect(packageItems.length).toBe(3);
expect(packageItems[0].textContent).toMatch(/package-1/);
expect(packageItems[1].textContent).toMatch(/package-2/);
expect(packageItems[2].textContent).toMatch(/Other/);
const packageDeprecationItems = packageItems[0].querySelectorAll("li.source-file");
expect(packageDeprecationItems.length).toBe(2);
expect(packageDeprecationItems[0].textContent).toMatch(/atom-text-editor/);
expect(packageDeprecationItems[0].querySelector("a").href).toMatch('some-dir/packages/package-1/file-1.css');
expect(packageDeprecationItems[1].textContent).toMatch(/:host/);
expect(packageDeprecationItems[1].querySelector("a").href).toMatch('some-dir/packages/package-1/file-2.css');
});
});
}
it('skips stack entries which go through node_modules/ files when determining package name', () => {
const stack = [
{
"functionName": "function0",
"location": path.normalize("/Users/user/.atom/packages/package1/node_modules/atom-space-pen-viewslib/space-pen.js:55:66"),
"fileName": path.normalize("/Users/user/.atom/packages/package1/node_modules/atom-space-pen-views/lib/space-pen.js")
},
{
"functionName": "function1",
"location": path.normalize("/Users/user/.atom/packages/package1/node_modules/atom-space-pen-viewslib/space-pen.js:15:16"),
"fileName": path.normalize("/Users/user/.atom/packages/package1/node_modules/atom-space-pen-views/lib/space-pen.js")
},
{
"functionName": "function2",
"location": path.normalize("/Users/user/.atom/packages/package2/lib/module.js:13:14"),
"fileName": path.normalize("/Users/user/.atom/packages/package2/lib/module.js")
}
];
const packagePathsByPackageName = new Map([
['package1', path.normalize("/Users/user/.atom/packages/package1")],
['package2', path.normalize("/Users/user/.atom/packages/package2")]
]);
spyOn(deprecationCopView, 'getPackagePathsByPackageName').andReturn(packagePathsByPackageName);
const packageName = deprecationCopView.getPackageName(stack);
expect(packageName).toBe("package2");
});
});

View File

@ -1,827 +0,0 @@
path = require 'path'
grammarTest = require 'atom-grammar-test'
describe 'TextMate HTML grammar', ->
grammar = null
beforeEach ->
atom.config.set 'core.useTreeSitterParsers', false
waitsForPromise ->
atom.packages.activatePackage('language-html')
runs ->
grammar = atom.grammars.grammarForScopeName('text.html.basic')
it 'parses the grammar', ->
expect(grammar).toBeTruthy()
expect(grammar.scopeName).toBe 'text.html.basic'
describe 'style tags', ->
beforeEach ->
waitsForPromise ->
atom.packages.activatePackage('language-css')
it 'tokenizes the tag attributes', ->
lines = grammar.tokenizeLines '''
<style id="id" class="very-classy">
</style>
'''
expect(lines[0][0]).toEqual value: '<', scopes: ['text.html.basic', 'meta.tag.style.html', 'punctuation.definition.tag.html']
expect(lines[0][1]).toEqual value: 'style', scopes: ['text.html.basic', 'meta.tag.style.html', 'entity.name.tag.style.html']
expect(lines[0][3]).toEqual value: 'id', scopes: ['text.html.basic', 'meta.tag.style.html', 'meta.attribute-with-value.id.html', 'entity.other.attribute-name.id.html']
expect(lines[0][4]).toEqual value: '=', scopes: ['text.html.basic', 'meta.tag.style.html', 'meta.attribute-with-value.id.html', 'punctuation.separator.key-value.html']
expect(lines[0][5]).toEqual value: '"', scopes: ['text.html.basic', 'meta.tag.style.html', 'meta.attribute-with-value.id.html', 'string.quoted.double.html', 'punctuation.definition.string.begin.html']
expect(lines[0][6]).toEqual value: 'id', scopes: ['text.html.basic', 'meta.tag.style.html', 'meta.attribute-with-value.id.html', 'string.quoted.double.html', 'meta.toc-list.id.html']
expect(lines[0][7]).toEqual value: '"', scopes: ['text.html.basic', 'meta.tag.style.html', 'meta.attribute-with-value.id.html', 'string.quoted.double.html', 'punctuation.definition.string.end.html']
expect(lines[0][9]).toEqual value: 'class', scopes: ['text.html.basic', 'meta.tag.style.html', 'meta.attribute-with-value.class.html', 'entity.other.attribute-name.class.html']
expect(lines[0][10]).toEqual value: '=', scopes: ['text.html.basic', 'meta.tag.style.html', 'meta.attribute-with-value.class.html', 'punctuation.separator.key-value.html']
expect(lines[0][11]).toEqual value: '"', scopes: ['text.html.basic', 'meta.tag.style.html', 'meta.attribute-with-value.class.html', 'string.quoted.double.html', 'punctuation.definition.string.begin.html']
expect(lines[0][12]).toEqual value: 'very-classy', scopes: ['text.html.basic', 'meta.tag.style.html', 'meta.attribute-with-value.class.html', 'string.quoted.double.html']
expect(lines[0][13]).toEqual value: '"', scopes: ['text.html.basic', 'meta.tag.style.html', 'meta.attribute-with-value.class.html', 'string.quoted.double.html', 'punctuation.definition.string.end.html']
expect(lines[0][14]).toEqual value: '>', scopes: ['text.html.basic', 'meta.tag.style.html', 'punctuation.definition.tag.html']
expect(lines[1][0]).toEqual value: '</', scopes: ['text.html.basic', 'meta.tag.style.html', 'punctuation.definition.tag.html']
expect(lines[1][1]).toEqual value: 'style', scopes: ['text.html.basic', 'meta.tag.style.html', 'entity.name.tag.style.html']
expect(lines[1][2]).toEqual value: '>', scopes: ['text.html.basic', 'meta.tag.style.html', 'punctuation.definition.tag.html']
it 'tokenizes multiline tag attributes', ->
lines = grammar.tokenizeLines '''
<style id="id"
class="very-classy"
>
</style>
'''
expect(lines[0][0]).toEqual value: '<', scopes: ['text.html.basic', 'meta.tag.style.html', 'punctuation.definition.tag.html']
expect(lines[0][1]).toEqual value: 'style', scopes: ['text.html.basic', 'meta.tag.style.html', 'entity.name.tag.style.html']
expect(lines[0][3]).toEqual value: 'id', scopes: ['text.html.basic', 'meta.tag.style.html', 'meta.attribute-with-value.id.html', 'entity.other.attribute-name.id.html']
expect(lines[0][4]).toEqual value: '=', scopes: ['text.html.basic', 'meta.tag.style.html', 'meta.attribute-with-value.id.html', 'punctuation.separator.key-value.html']
expect(lines[0][5]).toEqual value: '"', scopes: ['text.html.basic', 'meta.tag.style.html', 'meta.attribute-with-value.id.html', 'string.quoted.double.html', 'punctuation.definition.string.begin.html']
expect(lines[0][6]).toEqual value: 'id', scopes: ['text.html.basic', 'meta.tag.style.html', 'meta.attribute-with-value.id.html', 'string.quoted.double.html', 'meta.toc-list.id.html']
expect(lines[0][7]).toEqual value: '"', scopes: ['text.html.basic', 'meta.tag.style.html', 'meta.attribute-with-value.id.html', 'string.quoted.double.html', 'punctuation.definition.string.end.html']
expect(lines[1][1]).toEqual value: 'class', scopes: ['text.html.basic', 'meta.tag.style.html', 'meta.attribute-with-value.class.html', 'entity.other.attribute-name.class.html']
expect(lines[1][5]).toEqual value: '"', scopes: ['text.html.basic', 'meta.tag.style.html', 'meta.attribute-with-value.class.html', 'string.quoted.double.html', 'punctuation.definition.string.end.html']
expect(lines[2][0]).toEqual value: '>', scopes: ['text.html.basic', 'meta.tag.style.html', 'punctuation.definition.tag.html']
expect(lines[3][0]).toEqual value: '</', scopes: ['text.html.basic', 'meta.tag.style.html', 'punctuation.definition.tag.html']
expect(lines[3][1]).toEqual value: 'style', scopes: ['text.html.basic', 'meta.tag.style.html', 'entity.name.tag.style.html']
expect(lines[3][2]).toEqual value: '>', scopes: ['text.html.basic', 'meta.tag.style.html', 'punctuation.definition.tag.html']
it 'tokenizes the content inside the tag as CSS', ->
lines = grammar.tokenizeLines '''
<style class="very-classy">
span { color: red; }
</style>
'''
expect(lines[0][0]).toEqual value: '<', scopes: ['text.html.basic', 'meta.tag.style.html', 'punctuation.definition.tag.html']
expect(lines[1][0]).toEqual value: ' ', scopes: ['text.html.basic', 'meta.tag.style.html', 'source.css.embedded.html']
expect(lines[1][1]).toEqual value: 'span', scopes: ['text.html.basic', 'meta.tag.style.html', 'source.css.embedded.html', 'meta.selector.css', 'entity.name.tag.css']
expect(lines[2][0]).toEqual value: '</', scopes: ['text.html.basic', 'meta.tag.style.html', 'punctuation.definition.tag.html']
it 'tokenizes multiline tags', ->
lines = grammar.tokenizeLines '''
<style
class="very-classy">
span { color: red; }
</style>
'''
expect(lines[0][0]).toEqual value: '<', scopes: ['text.html.basic', 'meta.tag.style.html', 'punctuation.definition.tag.html']
expect(lines[2][1]).toEqual value: 'span', scopes: ['text.html.basic', 'meta.tag.style.html', 'source.css.embedded.html', 'meta.selector.css', 'entity.name.tag.css']
expect(lines[3][0]).toEqual value: '</', scopes: ['text.html.basic', 'meta.tag.style.html', 'punctuation.definition.tag.html']
describe 'script tags', ->
it 'tokenizes the tag attributes', ->
lines = grammar.tokenizeLines '''
<script id="id" type="text/html">
</script>
'''
expect(lines[0][0]).toEqual value: '<', scopes: ['text.html.basic', 'meta.tag.script.html', 'punctuation.definition.tag.html']
expect(lines[0][1]).toEqual value: 'script', scopes: ['text.html.basic', 'meta.tag.script.html', 'entity.name.tag.script.html']
expect(lines[0][3]).toEqual value: 'id', scopes: ['text.html.basic', 'meta.tag.script.html', 'meta.attribute-with-value.id.html', 'entity.other.attribute-name.id.html']
expect(lines[0][4]).toEqual value: '=', scopes: ['text.html.basic', 'meta.tag.script.html', 'meta.attribute-with-value.id.html', 'punctuation.separator.key-value.html']
expect(lines[0][5]).toEqual value: '"', scopes: ['text.html.basic', 'meta.tag.script.html', 'meta.attribute-with-value.id.html', 'string.quoted.double.html', 'punctuation.definition.string.begin.html']
expect(lines[0][6]).toEqual value: 'id', scopes: ['text.html.basic', 'meta.tag.script.html', 'meta.attribute-with-value.id.html', 'string.quoted.double.html', 'meta.toc-list.id.html']
expect(lines[0][7]).toEqual value: '"', scopes: ['text.html.basic', 'meta.tag.script.html', 'meta.attribute-with-value.id.html', 'string.quoted.double.html', 'punctuation.definition.string.end.html']
expect(lines[0][9]).toEqual value: 'type', scopes: ['text.html.basic', 'meta.tag.script.html', 'meta.attribute-with-value.html', 'entity.other.attribute-name.html']
expect(lines[0][10]).toEqual value: '=', scopes: ['text.html.basic', 'meta.tag.script.html', 'meta.attribute-with-value.html', 'punctuation.separator.key-value.html']
expect(lines[0][11]).toEqual value: '"', scopes: ['text.html.basic', 'meta.tag.script.html', 'meta.attribute-with-value.html', 'string.quoted.double.html', 'punctuation.definition.string.begin.html']
expect(lines[0][12]).toEqual value: 'text/html', scopes: ['text.html.basic', 'meta.tag.script.html', 'meta.attribute-with-value.html', 'string.quoted.double.html']
expect(lines[0][13]).toEqual value: '"', scopes: ['text.html.basic', 'meta.tag.script.html', 'meta.attribute-with-value.html', 'string.quoted.double.html', 'punctuation.definition.string.end.html']
expect(lines[0][14]).toEqual value: '>', scopes: ['text.html.basic', 'meta.tag.script.html', 'punctuation.definition.tag.html']
expect(lines[1][0]).toEqual value: '</', scopes: ['text.html.basic', 'meta.tag.script.html', 'punctuation.definition.tag.html']
expect(lines[1][1]).toEqual value: 'script', scopes: ['text.html.basic', 'meta.tag.script.html', 'entity.name.tag.script.html']
expect(lines[1][2]).toEqual value: '>', scopes: ['text.html.basic', 'meta.tag.script.html', 'punctuation.definition.tag.html']
it 'tokenizes multiline tag attributes', ->
lines = grammar.tokenizeLines '''
<script id="id" type="text/html"
class="very-classy"
>
</script>
'''
expect(lines[0][0]).toEqual value: '<', scopes: ['text.html.basic', 'meta.tag.script.html', 'punctuation.definition.tag.html']
expect(lines[0][1]).toEqual value: 'script', scopes: ['text.html.basic', 'meta.tag.script.html', 'entity.name.tag.script.html']
expect(lines[0][3]).toEqual value: 'id', scopes: ['text.html.basic', 'meta.tag.script.html', 'meta.attribute-with-value.id.html', 'entity.other.attribute-name.id.html']
expect(lines[0][4]).toEqual value: '=', scopes: ['text.html.basic', 'meta.tag.script.html', 'meta.attribute-with-value.id.html', 'punctuation.separator.key-value.html']
expect(lines[0][5]).toEqual value: '"', scopes: ['text.html.basic', 'meta.tag.script.html', 'meta.attribute-with-value.id.html', 'string.quoted.double.html', 'punctuation.definition.string.begin.html']
expect(lines[0][6]).toEqual value: 'id', scopes: ['text.html.basic', 'meta.tag.script.html', 'meta.attribute-with-value.id.html', 'string.quoted.double.html', 'meta.toc-list.id.html']
expect(lines[0][7]).toEqual value: '"', scopes: ['text.html.basic', 'meta.tag.script.html', 'meta.attribute-with-value.id.html', 'string.quoted.double.html', 'punctuation.definition.string.end.html']
expect(lines[0][9]).toEqual value: 'type', scopes: ['text.html.basic', 'meta.tag.script.html', 'meta.attribute-with-value.html', 'entity.other.attribute-name.html']
expect(lines[0][10]).toEqual value: '=', scopes: ['text.html.basic', 'meta.tag.script.html', 'meta.attribute-with-value.html', 'punctuation.separator.key-value.html']
expect(lines[0][11]).toEqual value: '"', scopes: ['text.html.basic', 'meta.tag.script.html', 'meta.attribute-with-value.html', 'string.quoted.double.html', 'punctuation.definition.string.begin.html']
expect(lines[0][12]).toEqual value: 'text/html', scopes: ['text.html.basic', 'meta.tag.script.html', 'meta.attribute-with-value.html', 'string.quoted.double.html']
expect(lines[0][13]).toEqual value: '"', scopes: ['text.html.basic', 'meta.tag.script.html', 'meta.attribute-with-value.html', 'string.quoted.double.html', 'punctuation.definition.string.end.html']
expect(lines[1][1]).toEqual value: 'class', scopes: ['text.html.basic', 'meta.tag.script.html', 'meta.attribute-with-value.class.html', 'entity.other.attribute-name.class.html']
expect(lines[1][5]).toEqual value: '"', scopes: ['text.html.basic', 'meta.tag.script.html', 'meta.attribute-with-value.class.html', 'string.quoted.double.html', 'punctuation.definition.string.end.html']
expect(lines[2][0]).toEqual value: '>', scopes: ['text.html.basic', 'meta.tag.script.html', 'punctuation.definition.tag.html']
expect(lines[3][0]).toEqual value: '</', scopes: ['text.html.basic', 'meta.tag.script.html', 'punctuation.definition.tag.html']
expect(lines[3][1]).toEqual value: 'script', scopes: ['text.html.basic', 'meta.tag.script.html', 'entity.name.tag.script.html']
expect(lines[3][2]).toEqual value: '>', scopes: ['text.html.basic', 'meta.tag.script.html', 'punctuation.definition.tag.html']
describe 'template script tags', ->
it 'tokenizes the content inside the tag as HTML', ->
lines = grammar.tokenizeLines '''
<script id='id' type='text/template'>
<div>test</div>
</script>
'''
expect(lines[0][0]).toEqual value: '<', scopes: ['text.html.basic', 'meta.tag.script.html', 'punctuation.definition.tag.html']
expect(lines[1][0]).toEqual value: ' ', scopes: ['text.html.basic', 'meta.tag.script.html', 'text.embedded.html']
expect(lines[1][1]).toEqual value: '<', scopes: ['text.html.basic', 'meta.tag.script.html', 'text.embedded.html', 'meta.tag.block.div.html', 'punctuation.definition.tag.begin.html']
expect(lines[2][0]).toEqual value: '</', scopes: ['text.html.basic', 'meta.tag.script.html', 'punctuation.definition.tag.html']
it 'tokenizes multiline tags', ->
lines = grammar.tokenizeLines '''
<script id='id' type='text/template'
class='very-classy'>
<div>test</div>
</script>
'''
expect(lines[0][0]).toEqual value: '<', scopes: ['text.html.basic', 'meta.tag.script.html', 'punctuation.definition.tag.html']
expect(lines[2][1]).toEqual value: '<', scopes: ['text.html.basic', 'meta.tag.script.html', 'text.embedded.html', 'meta.tag.block.div.html', 'punctuation.definition.tag.begin.html']
expect(lines[3][0]).toEqual value: '</', scopes: ['text.html.basic', 'meta.tag.script.html', 'punctuation.definition.tag.html']
describe 'CoffeeScript script tags', ->
beforeEach ->
waitsForPromise ->
atom.packages.activatePackage('language-coffee-script')
it 'tokenizes the content inside the tag as CoffeeScript', ->
lines = grammar.tokenizeLines '''
<script id='id' type='text/coffeescript'>
-> console.log 'hi'
</script>
'''
expect(lines[0][0]).toEqual value: '<', scopes: ['text.html.basic', 'meta.tag.script.html', 'punctuation.definition.tag.html']
expect(lines[1][0]).toEqual value: ' ', scopes: ['text.html.basic', 'meta.tag.script.html', 'source.coffee.embedded.html']
expect(lines[1][1]).toEqual value: '->', scopes: ['text.html.basic', 'meta.tag.script.html', 'source.coffee.embedded.html', 'meta.function.inline.coffee', 'storage.type.function.coffee']
expect(lines[2][0]).toEqual value: '</', scopes: ['text.html.basic', 'meta.tag.script.html', 'punctuation.definition.tag.html']
it 'tokenizes multiline tags', ->
lines = grammar.tokenizeLines '''
<script id='id' type='text/coffeescript'
class='very-classy'>
-> console.log 'hi'
</script>
'''
expect(lines[0][0]).toEqual value: '<', scopes: ['text.html.basic', 'meta.tag.script.html', 'punctuation.definition.tag.html']
expect(lines[2][1]).toEqual value: '->', scopes: ['text.html.basic', 'meta.tag.script.html', 'source.coffee.embedded.html', 'meta.function.inline.coffee', 'storage.type.function.coffee']
expect(lines[3][0]).toEqual value: '</', scopes: ['text.html.basic', 'meta.tag.script.html', 'punctuation.definition.tag.html']
it 'recognizes closing script tags in comments', ->
lines = grammar.tokenizeLines '''
<script id='id' type='text/coffeescript'>
# comment </script>
'''
expect(lines[1][1]).toEqual value: '#', scopes: ['text.html.basic', 'meta.tag.script.html', 'source.coffee.embedded.html', 'comment.line.number-sign.coffee', 'punctuation.definition.comment.coffee']
expect(lines[1][2]).toEqual value: ' comment ', scopes: ['text.html.basic', 'meta.tag.script.html', 'source.coffee.embedded.html', 'comment.line.number-sign.coffee']
expect(lines[1][3]).toEqual value: '</', scopes: ['text.html.basic', 'meta.tag.script.html', 'punctuation.definition.tag.html']
lines = grammar.tokenizeLines '''
<script id='id' type='text/coffeescript'>
###
comment </script>
'''
expect(lines[1][1]).toEqual value: '###', scopes: ['text.html.basic', 'meta.tag.script.html', 'source.coffee.embedded.html', 'comment.block.coffee', 'punctuation.definition.comment.coffee']
expect(lines[2][0]).toEqual value: ' comment ', scopes: ['text.html.basic', 'meta.tag.script.html', 'source.coffee.embedded.html', 'comment.block.coffee']
expect(lines[2][1]).toEqual value: '</', scopes: ['text.html.basic', 'meta.tag.script.html', 'punctuation.definition.tag.html']
describe 'JavaScript script tags', ->
beforeEach ->
waitsForPromise -> atom.packages.activatePackage('language-javascript')
it 'tokenizes the content inside the tag as JavaScript', ->
lines = grammar.tokenizeLines '''
<script id='id' type='text/javascript'>
var hi = 'hi'
</script>
'''
expect(lines[0][0]).toEqual value: '<', scopes: ['text.html.basic', 'meta.tag.script.html', 'punctuation.definition.tag.html']
expect(lines[1][0]).toEqual value: ' ', scopes: ['text.html.basic', 'meta.tag.script.html', 'source.js.embedded.html']
expect(lines[1][1]).toEqual value: 'var', scopes: ['text.html.basic', 'meta.tag.script.html', 'source.js.embedded.html', 'storage.type.var.js']
expect(lines[2][0]).toEqual value: '</', scopes: ['text.html.basic', 'meta.tag.script.html', 'punctuation.definition.tag.html']
it 'tokenizes multiline tags', ->
lines = grammar.tokenizeLines '''
<script id='id'
class='very-classy'>
var hi = 'hi'
</script>
'''
expect(lines[0][0]).toEqual value: '<', scopes: ['text.html.basic', 'meta.tag.script.html', 'punctuation.definition.tag.html']
expect(lines[2][1]).toEqual value: 'var', scopes: ['text.html.basic', 'meta.tag.script.html', 'source.js.embedded.html', 'storage.type.var.js']
expect(lines[3][0]).toEqual value: '</', scopes: ['text.html.basic', 'meta.tag.script.html', 'punctuation.definition.tag.html']
it 'recognizes closing script tags in comments', ->
lines = grammar.tokenizeLines '''
<script id='id' type='text/javascript'>
// comment </script>
'''
expect(lines[1][1]).toEqual value: '//', scopes: ['text.html.basic', 'meta.tag.script.html', 'source.js.embedded.html', 'comment.line.double-slash.js', 'punctuation.definition.comment.js']
expect(lines[1][2]).toEqual value: ' comment ', scopes: ['text.html.basic', 'meta.tag.script.html', 'source.js.embedded.html', 'comment.line.double-slash.js']
expect(lines[1][3]).toEqual value: '</', scopes: ['text.html.basic', 'meta.tag.script.html', 'punctuation.definition.tag.html']
lines = grammar.tokenizeLines '''
<script id='id' type='text/javascript'>
/*
comment </script>
'''
expect(lines[1][1]).toEqual value: '/*', scopes: ['text.html.basic', 'meta.tag.script.html', 'source.js.embedded.html', 'comment.block.js', 'punctuation.definition.comment.begin.js']
expect(lines[2][0]).toEqual value: ' comment ', scopes: ['text.html.basic', 'meta.tag.script.html', 'source.js.embedded.html', 'comment.block.js']
expect(lines[2][1]).toEqual value: '</', scopes: ['text.html.basic', 'meta.tag.script.html', 'punctuation.definition.tag.html']
describe 'comments', ->
it 'tokenizes -- as an error', ->
{tokens} = grammar.tokenizeLine '<!-- some comment --->'
expect(tokens[0]).toEqual value: '<!--', scopes: ['text.html.basic', 'comment.block.html', 'punctuation.definition.comment.html']
expect(tokens[1]).toEqual value: ' some comment -', scopes: ['text.html.basic', 'comment.block.html']
expect(tokens[2]).toEqual value: '-->', scopes: ['text.html.basic', 'comment.block.html', 'punctuation.definition.comment.html']
{tokens} = grammar.tokenizeLine '<!-- -- -->'
expect(tokens[0]).toEqual value: '<!--', scopes: ['text.html.basic', 'comment.block.html', 'punctuation.definition.comment.html']
expect(tokens[1]).toEqual value: ' ', scopes: ['text.html.basic', 'comment.block.html']
expect(tokens[2]).toEqual value: '--', scopes: ['text.html.basic', 'comment.block.html', 'invalid.illegal.bad-comments-or-CDATA.html']
expect(tokens[3]).toEqual value: ' ', scopes: ['text.html.basic', 'comment.block.html']
expect(tokens[4]).toEqual value: '-->', scopes: ['text.html.basic', 'comment.block.html', 'punctuation.definition.comment.html']
grammarTest path.join(__dirname, 'fixtures/syntax_test_html.html')
grammarTest path.join(__dirname, 'fixtures/syntax_test_html_template_fragments.html')
describe 'attributes', ->
it 'recognizes a single attribute with a quoted value', ->
{tokens} = grammar.tokenizeLine '<span class="foo">'
expect(tokens[3]).toEqual value: 'class', scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'meta.attribute-with-value.class.html', 'entity.other.attribute-name.class.html']
expect(tokens[4]).toEqual value: '=', scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'meta.attribute-with-value.class.html', 'punctuation.separator.key-value.html']
expect(tokens[5]).toEqual value: '"', scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'meta.attribute-with-value.class.html', 'string.quoted.double.html', 'punctuation.definition.string.begin.html']
expect(tokens[6]).toEqual value: 'foo', scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'meta.attribute-with-value.class.html', 'string.quoted.double.html']
expect(tokens[7]).toEqual value: '"', scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'meta.attribute-with-value.class.html', 'string.quoted.double.html', 'punctuation.definition.string.end.html']
expect(tokens[8]).toEqual value: '>', scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'punctuation.definition.tag.end.html']
{tokens} = grammar.tokenizeLine "<span class='foo'>"
expect(tokens[3]).toEqual value: 'class', scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'meta.attribute-with-value.class.html', 'entity.other.attribute-name.class.html']
expect(tokens[4]).toEqual value: '=', scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'meta.attribute-with-value.class.html', 'punctuation.separator.key-value.html']
expect(tokens[5]).toEqual value: "'", scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'meta.attribute-with-value.class.html', 'string.quoted.single.html', 'punctuation.definition.string.begin.html']
expect(tokens[6]).toEqual value: 'foo', scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'meta.attribute-with-value.class.html', 'string.quoted.single.html']
expect(tokens[7]).toEqual value: "'", scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'meta.attribute-with-value.class.html', 'string.quoted.single.html', 'punctuation.definition.string.end.html']
expect(tokens[8]).toEqual value: '>', scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'punctuation.definition.tag.end.html']
it 'recognizes a single attribute with spaces around the equals sign', ->
{tokens} = grammar.tokenizeLine '<span class ="foo">'
expect(tokens[3]).toEqual value: 'class', scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'meta.attribute-with-value.class.html', 'entity.other.attribute-name.class.html']
expect(tokens[4]).toEqual value: ' ', scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'meta.attribute-with-value.class.html']
expect(tokens[5]).toEqual value: '=', scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'meta.attribute-with-value.class.html', 'punctuation.separator.key-value.html']
expect(tokens[6]).toEqual value: '"', scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'meta.attribute-with-value.class.html', 'string.quoted.double.html', 'punctuation.definition.string.begin.html']
expect(tokens[7]).toEqual value: 'foo', scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'meta.attribute-with-value.class.html', 'string.quoted.double.html']
expect(tokens[8]).toEqual value: '"', scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'meta.attribute-with-value.class.html', 'string.quoted.double.html', 'punctuation.definition.string.end.html']
expect(tokens[9]).toEqual value: '>', scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'punctuation.definition.tag.end.html']
{tokens} = grammar.tokenizeLine '<span class= "foo">'
expect(tokens[3]).toEqual value: 'class', scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'meta.attribute-with-value.class.html', 'entity.other.attribute-name.class.html']
expect(tokens[4]).toEqual value: '=', scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'meta.attribute-with-value.class.html', 'punctuation.separator.key-value.html']
expect(tokens[5]).toEqual value: ' ', scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'meta.attribute-with-value.class.html']
expect(tokens[6]).toEqual value: '"', scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'meta.attribute-with-value.class.html', 'string.quoted.double.html', 'punctuation.definition.string.begin.html']
expect(tokens[7]).toEqual value: 'foo', scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'meta.attribute-with-value.class.html', 'string.quoted.double.html']
expect(tokens[8]).toEqual value: '"', scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'meta.attribute-with-value.class.html', 'string.quoted.double.html', 'punctuation.definition.string.end.html']
expect(tokens[9]).toEqual value: '>', scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'punctuation.definition.tag.end.html']
{tokens} = grammar.tokenizeLine '<span class = "foo">'
expect(tokens[3]).toEqual value: 'class', scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'meta.attribute-with-value.class.html', 'entity.other.attribute-name.class.html']
expect(tokens[4]).toEqual value: ' ', scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'meta.attribute-with-value.class.html']
expect(tokens[5]).toEqual value: '=', scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'meta.attribute-with-value.class.html', 'punctuation.separator.key-value.html']
expect(tokens[6]).toEqual value: ' ', scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'meta.attribute-with-value.class.html']
expect(tokens[7]).toEqual value: '"', scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'meta.attribute-with-value.class.html', 'string.quoted.double.html', 'punctuation.definition.string.begin.html']
expect(tokens[8]).toEqual value: 'foo', scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'meta.attribute-with-value.class.html', 'string.quoted.double.html']
expect(tokens[9]).toEqual value: '"', scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'meta.attribute-with-value.class.html', 'string.quoted.double.html', 'punctuation.definition.string.end.html']
expect(tokens[10]).toEqual value: '>', scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'punctuation.definition.tag.end.html']
it 'recognizes a single attribute with an unquoted value', ->
{tokens} = grammar.tokenizeLine '<span class=foo-3+5@>'
expect(tokens[3]).toEqual value: 'class', scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'meta.attribute-with-value.class.html', 'entity.other.attribute-name.class.html']
expect(tokens[4]).toEqual value: '=', scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'meta.attribute-with-value.class.html', 'punctuation.separator.key-value.html']
expect(tokens[5]).toEqual value: 'foo-3+5@', scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'meta.attribute-with-value.class.html', 'string.unquoted.html']
expect(tokens[6]).toEqual value: '>', scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'punctuation.definition.tag.end.html']
it 'recognizes a single attribute with no value', ->
{tokens} = grammar.tokenizeLine '<span class>'
expect(tokens[3]).toEqual value: 'class', scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'meta.attribute-without-value.html', 'entity.other.attribute-name.html']
expect(tokens[4]).toEqual value: '>', scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'punctuation.definition.tag.end.html']
it 'recognizes multiple attributes with varying values', ->
{tokens} = grammar.tokenizeLine "<span class='btn' disabled spellcheck=true>"
expect(tokens[3]).toEqual value: 'class', scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'meta.attribute-with-value.class.html', 'entity.other.attribute-name.class.html']
expect(tokens[4]).toEqual value: '=', scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'meta.attribute-with-value.class.html', 'punctuation.separator.key-value.html']
expect(tokens[5]).toEqual value: "'", scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'meta.attribute-with-value.class.html', 'string.quoted.single.html', 'punctuation.definition.string.begin.html']
expect(tokens[6]).toEqual value: 'btn', scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'meta.attribute-with-value.class.html', 'string.quoted.single.html']
expect(tokens[7]).toEqual value: "'", scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'meta.attribute-with-value.class.html', 'string.quoted.single.html', 'punctuation.definition.string.end.html']
expect(tokens[8]).toEqual value: ' ', scopes: ['text.html.basic', 'meta.tag.inline.span.html']
expect(tokens[9]).toEqual value: 'disabled', scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'meta.attribute-without-value.html', 'entity.other.attribute-name.html']
expect(tokens[10]).toEqual value: ' ', scopes: ['text.html.basic', 'meta.tag.inline.span.html']
expect(tokens[11]).toEqual value: 'spellcheck', scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'meta.attribute-with-value.html', 'entity.other.attribute-name.html']
expect(tokens[12]).toEqual value: '=', scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'meta.attribute-with-value.html', 'punctuation.separator.key-value.html']
expect(tokens[13]).toEqual value: 'true', scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'meta.attribute-with-value.html', 'string.unquoted.html']
expect(tokens[14]).toEqual value: '>', scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'punctuation.definition.tag.end.html']
it 'recognizes attributes that are not on the same line as the tag name', ->
lines = grammar.tokenizeLines '''
<span
class="foo"
disabled>
'''
expect(lines[1][1]).toEqual value: 'class', scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'meta.attribute-with-value.class.html', 'entity.other.attribute-name.class.html']
expect(lines[1][2]).toEqual value: '=', scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'meta.attribute-with-value.class.html', 'punctuation.separator.key-value.html']
expect(lines[1][5]).toEqual value: '"', scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'meta.attribute-with-value.class.html', 'string.quoted.double.html', 'punctuation.definition.string.end.html']
expect(lines[2][1]).toEqual value: 'disabled', scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'meta.attribute-without-value.html', 'entity.other.attribute-name.html']
expect(lines[2][2]).toEqual value: '>', scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'punctuation.definition.tag.end.html']
it 'tokenizes only one attribute value in a row', ->
# The following line is invalid per HTML specification, however some browsers parse the 'world' as attribute for compatibility reasons.
{tokens} = grammar.tokenizeLine '<span attr="hello"world>'
expect(tokens[3]).toEqual value: 'attr', scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'meta.attribute-with-value.html', 'entity.other.attribute-name.html']
expect(tokens[4]).toEqual value: '=', scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'meta.attribute-with-value.html', 'punctuation.separator.key-value.html']
expect(tokens[5]).toEqual value: '"', scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'meta.attribute-with-value.html', 'string.quoted.double.html', 'punctuation.definition.string.begin.html']
expect(tokens[6]).toEqual value: 'hello', scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'meta.attribute-with-value.html', 'string.quoted.double.html']
expect(tokens[7]).toEqual value: '"', scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'meta.attribute-with-value.html', 'string.quoted.double.html', 'punctuation.definition.string.end.html']
expect(tokens[8]).toEqual value: 'world', scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'meta.attribute-without-value.html', 'entity.other.attribute-name.html']
expect(tokens[9]).toEqual value: '>', scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'punctuation.definition.tag.end.html']
describe "the 'style' attribute", ->
beforeEach ->
waitsForPromise ->
atom.packages.activatePackage('language-css')
quotes =
'"': 'double'
"'": 'single'
for quote, type of quotes
it "tokenizes #{type}-quoted style attribute values as CSS property lists", ->
{tokens} = grammar.tokenizeLine "<span style=#{quote}display: none;#{quote}>"
expect(tokens[3]).toEqual value: 'style', scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'meta.attribute-with-value.style.html', 'entity.other.attribute-name.style.html']
expect(tokens[5]).toEqual value: quote, scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'meta.attribute-with-value.style.html', "string.quoted.#{type}.html", 'punctuation.definition.string.begin.html']
expect(tokens[6]).toEqual value: 'display', scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'meta.attribute-with-value.style.html', "string.quoted.#{type}.html", 'source.css.style.html', 'meta.property-list.css', 'meta.property-name.css', 'support.type.property-name.css']
expect(tokens[9]).toEqual value: 'none', scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'meta.attribute-with-value.style.html', "string.quoted.#{type}.html", 'source.css.style.html', 'meta.property-list.css', 'meta.property-value.css', 'support.constant.property-value.css']
expect(tokens[10]).toEqual value: ';', scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'meta.attribute-with-value.style.html', "string.quoted.#{type}.html", 'source.css.style.html', 'meta.property-list.css', 'punctuation.terminator.rule.css']
expect(tokens[11]).toEqual value: quote, scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'meta.attribute-with-value.style.html', "string.quoted.#{type}.html", 'punctuation.definition.string.end.html']
expect(tokens[12]).toEqual value: '>', scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'punctuation.definition.tag.end.html']
{tokens} = grammar.tokenizeLine "<span style=#{quote}display: none; z-index: 10;#{quote}>"
expect(tokens[3]).toEqual value: 'style', scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'meta.attribute-with-value.style.html', 'entity.other.attribute-name.style.html']
expect(tokens[5]).toEqual value: quote, scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'meta.attribute-with-value.style.html', "string.quoted.#{type}.html", 'punctuation.definition.string.begin.html']
expect(tokens[6]).toEqual value: 'display', scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'meta.attribute-with-value.style.html', "string.quoted.#{type}.html", 'source.css.style.html', 'meta.property-list.css', 'meta.property-name.css', 'support.type.property-name.css']
expect(tokens[9]).toEqual value: 'none', scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'meta.attribute-with-value.style.html', "string.quoted.#{type}.html", 'source.css.style.html', 'meta.property-list.css', 'meta.property-value.css', 'support.constant.property-value.css']
expect(tokens[10]).toEqual value: ';', scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'meta.attribute-with-value.style.html', "string.quoted.#{type}.html", 'source.css.style.html', 'meta.property-list.css', 'punctuation.terminator.rule.css']
expect(tokens[12]).toEqual value: 'z-index', scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'meta.attribute-with-value.style.html', "string.quoted.#{type}.html", 'source.css.style.html', 'meta.property-list.css', 'meta.property-name.css', 'support.type.property-name.css']
expect(tokens[15]).toEqual value: '10', scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'meta.attribute-with-value.style.html', "string.quoted.#{type}.html", 'source.css.style.html', 'meta.property-list.css', 'meta.property-value.css', 'constant.numeric.css']
expect(tokens[16]).toEqual value: ';', scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'meta.attribute-with-value.style.html', "string.quoted.#{type}.html", 'source.css.style.html', 'meta.property-list.css', 'punctuation.terminator.rule.css']
expect(tokens[17]).toEqual value: quote, scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'meta.attribute-with-value.style.html', "string.quoted.#{type}.html", 'punctuation.definition.string.end.html']
expect(tokens[18]).toEqual value: '>', scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'punctuation.definition.tag.end.html']
it "tokenizes #{type}-quoted multiline attributes", ->
lines = grammar.tokenizeLines """
<span style=#{quote}display: none;
z-index: 10;#{quote}>
"""
expect(lines[0][3]).toEqual value: 'style', scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'meta.attribute-with-value.style.html', 'entity.other.attribute-name.style.html']
expect(lines[0][5]).toEqual value: quote, scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'meta.attribute-with-value.style.html', "string.quoted.#{type}.html", 'punctuation.definition.string.begin.html']
expect(lines[0][6]).toEqual value: 'display', scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'meta.attribute-with-value.style.html', "string.quoted.#{type}.html", 'source.css.style.html', 'meta.property-list.css', 'meta.property-name.css', 'support.type.property-name.css']
expect(lines[0][9]).toEqual value: 'none', scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'meta.attribute-with-value.style.html', "string.quoted.#{type}.html", 'source.css.style.html', 'meta.property-list.css', 'meta.property-value.css', 'support.constant.property-value.css']
expect(lines[0][10]).toEqual value: ';', scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'meta.attribute-with-value.style.html', "string.quoted.#{type}.html", 'source.css.style.html', 'meta.property-list.css', 'punctuation.terminator.rule.css']
expect(lines[1][0]).toEqual value: 'z-index', scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'meta.attribute-with-value.style.html', "string.quoted.#{type}.html", 'source.css.style.html', 'meta.property-list.css', 'meta.property-name.css', 'support.type.property-name.css']
expect(lines[1][3]).toEqual value: '10', scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'meta.attribute-with-value.style.html', "string.quoted.#{type}.html", 'source.css.style.html', 'meta.property-list.css', 'meta.property-value.css', 'constant.numeric.css']
expect(lines[1][4]).toEqual value: ';', scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'meta.attribute-with-value.style.html', "string.quoted.#{type}.html", 'source.css.style.html', 'meta.property-list.css', 'punctuation.terminator.rule.css']
expect(lines[1][5]).toEqual value: quote, scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'meta.attribute-with-value.style.html', "string.quoted.#{type}.html", 'punctuation.definition.string.end.html']
expect(lines[1][6]).toEqual value: '>', scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'punctuation.definition.tag.end.html']
it 'tokenizes incomplete property lists', ->
{tokens} = grammar.tokenizeLine '<span style="display: none">'
expect(tokens[3]).toEqual value: 'style', scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'meta.attribute-with-value.style.html', 'entity.other.attribute-name.style.html']
expect(tokens[5]).toEqual value: '"', scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'meta.attribute-with-value.style.html', 'string.quoted.double.html', 'punctuation.definition.string.begin.html']
expect(tokens[6]).toEqual value: 'display', scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'meta.attribute-with-value.style.html', 'string.quoted.double.html', 'source.css.style.html', 'meta.property-list.css', 'meta.property-name.css', 'support.type.property-name.css']
expect(tokens[9]).toEqual value: 'none', scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'meta.attribute-with-value.style.html', 'string.quoted.double.html', 'source.css.style.html', 'meta.property-list.css', 'meta.property-value.css', 'support.constant.property-value.css']
expect(tokens[10]).toEqual value: '"', scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'meta.attribute-with-value.style.html', 'string.quoted.double.html', 'punctuation.definition.string.end.html']
expect(tokens[11]).toEqual value: '>', scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'punctuation.definition.tag.end.html']
lines = grammar.tokenizeLines """
<span style=#{quote}display: none;
z-index: 10#{quote}>
"""
expect(lines[0][3]).toEqual value: 'style', scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'meta.attribute-with-value.style.html', 'entity.other.attribute-name.style.html']
expect(lines[0][5]).toEqual value: quote, scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'meta.attribute-with-value.style.html', "string.quoted.#{type}.html", 'punctuation.definition.string.begin.html']
expect(lines[0][6]).toEqual value: 'display', scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'meta.attribute-with-value.style.html', "string.quoted.#{type}.html", 'source.css.style.html', 'meta.property-list.css', 'meta.property-name.css', 'support.type.property-name.css']
expect(lines[0][9]).toEqual value: 'none', scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'meta.attribute-with-value.style.html', "string.quoted.#{type}.html", 'source.css.style.html', 'meta.property-list.css', 'meta.property-value.css', 'support.constant.property-value.css']
expect(lines[0][10]).toEqual value: ';', scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'meta.attribute-with-value.style.html', "string.quoted.#{type}.html", 'source.css.style.html', 'meta.property-list.css', 'punctuation.terminator.rule.css']
expect(lines[1][0]).toEqual value: 'z-index', scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'meta.attribute-with-value.style.html', "string.quoted.#{type}.html", 'source.css.style.html', 'meta.property-list.css', 'meta.property-name.css', 'support.type.property-name.css']
expect(lines[1][3]).toEqual value: '10', scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'meta.attribute-with-value.style.html', "string.quoted.#{type}.html", 'source.css.style.html', 'meta.property-list.css', 'meta.property-value.css', 'constant.numeric.css']
expect(lines[1][4]).toEqual value: quote, scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'meta.attribute-with-value.style.html', "string.quoted.#{type}.html", 'punctuation.definition.string.end.html']
expect(lines[1][5]).toEqual value: '>', scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'punctuation.definition.tag.end.html']
it 'ends invalid quoted property lists correctly', ->
{tokens} = grammar.tokenizeLine '<span style="s:">'
expect(tokens[3]).toEqual value: 'style', scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'meta.attribute-with-value.style.html', 'entity.other.attribute-name.style.html']
expect(tokens[5]).toEqual value: '"', scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'meta.attribute-with-value.style.html', 'string.quoted.double.html', 'punctuation.definition.string.begin.html']
expect(tokens[6]).toEqual value: 's', scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'meta.attribute-with-value.style.html', 'string.quoted.double.html', 'source.css.style.html', 'meta.property-list.css', 'meta.property-name.css']
expect(tokens[7]).toEqual value: ':', scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'meta.attribute-with-value.style.html', 'string.quoted.double.html', 'source.css.style.html', 'meta.property-list.css', 'punctuation.separator.key-value.css']
expect(tokens[8]).toEqual value: '"', scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'meta.attribute-with-value.style.html', 'string.quoted.double.html', 'punctuation.definition.string.end.html']
expect(tokens[9]).toEqual value: '>', scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'punctuation.definition.tag.end.html']
it 'tokenizes unquoted property lists', ->
{tokens} = grammar.tokenizeLine '<span style=display:none;></span>'
expect(tokens[3]).toEqual value: 'style', scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'meta.attribute-with-value.style.html', 'entity.other.attribute-name.style.html']
expect(tokens[4]).toEqual value: '=', scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'meta.attribute-with-value.style.html', 'punctuation.separator.key-value.html']
expect(tokens[5]).toEqual value: 'display', scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'meta.attribute-with-value.style.html', 'string.unquoted.html', 'source.css.style.html', 'meta.property-list.css', 'meta.property-name.css', 'support.type.property-name.css']
expect(tokens[7]).toEqual value: 'none', scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'meta.attribute-with-value.style.html', 'string.unquoted.html', 'source.css.style.html', 'meta.property-list.css', 'meta.property-value.css', 'support.constant.property-value.css']
expect(tokens[8]).toEqual value: ';', scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'meta.attribute-with-value.style.html', 'string.unquoted.html', 'source.css.style.html', 'meta.property-list.css', 'punctuation.terminator.rule.css']
expect(tokens[9]).toEqual value: '>', scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'punctuation.definition.tag.end.html']
{tokens} = grammar.tokenizeLine '<span style=display:none;z-index:10></span>'
expect(tokens[3]).toEqual value: 'style', scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'meta.attribute-with-value.style.html', 'entity.other.attribute-name.style.html']
expect(tokens[4]).toEqual value: '=', scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'meta.attribute-with-value.style.html', 'punctuation.separator.key-value.html']
expect(tokens[5]).toEqual value: 'display', scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'meta.attribute-with-value.style.html', 'string.unquoted.html', 'source.css.style.html', 'meta.property-list.css', 'meta.property-name.css', 'support.type.property-name.css']
expect(tokens[7]).toEqual value: 'none', scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'meta.attribute-with-value.style.html', 'string.unquoted.html', 'source.css.style.html', 'meta.property-list.css', 'meta.property-value.css', 'support.constant.property-value.css']
expect(tokens[8]).toEqual value: ';', scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'meta.attribute-with-value.style.html', 'string.unquoted.html', 'source.css.style.html', 'meta.property-list.css', 'punctuation.terminator.rule.css']
expect(tokens[9]).toEqual value: 'z-index', scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'meta.attribute-with-value.style.html', 'string.unquoted.html', 'source.css.style.html', 'meta.property-list.css', 'meta.property-name.css', 'support.type.property-name.css']
expect(tokens[11]).toEqual value: '10', scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'meta.attribute-with-value.style.html', 'string.unquoted.html', 'source.css.style.html', 'meta.property-list.css', 'meta.property-value.css', 'constant.numeric.css']
expect(tokens[12]).toEqual value: '>', scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'punctuation.definition.tag.end.html']
it 'ends invalid unquoted property lists correctly', ->
{tokens} = grammar.tokenizeLine '<span style=s:></span>'
expect(tokens[3]).toEqual value: 'style', scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'meta.attribute-with-value.style.html', 'entity.other.attribute-name.style.html']
expect(tokens[4]).toEqual value: '=', scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'meta.attribute-with-value.style.html', 'punctuation.separator.key-value.html']
expect(tokens[5]).toEqual value: 's', scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'meta.attribute-with-value.style.html', 'string.unquoted.html', 'source.css.style.html', 'meta.property-list.css', 'meta.property-name.css']
expect(tokens[6]).toEqual value: ':', scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'meta.attribute-with-value.style.html', 'string.unquoted.html', 'source.css.style.html', 'meta.property-list.css', 'punctuation.separator.key-value.css']
expect(tokens[7]).toEqual value: '>', scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'punctuation.definition.tag.end.html']
{tokens} = grammar.tokenizeLine '<span style=display: none></span>'
expect(tokens[3]).toEqual value: 'style', scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'meta.attribute-with-value.style.html', 'entity.other.attribute-name.style.html']
expect(tokens[4]).toEqual value: '=', scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'meta.attribute-with-value.style.html', 'punctuation.separator.key-value.html']
expect(tokens[5]).toEqual value: 'display', scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'meta.attribute-with-value.style.html', 'string.unquoted.html', 'source.css.style.html', 'meta.property-list.css', 'meta.property-name.css', 'support.type.property-name.css']
expect(tokens[6]).toEqual value: ':', scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'meta.attribute-with-value.style.html', 'string.unquoted.html', 'source.css.style.html', 'meta.property-list.css', 'punctuation.separator.key-value.css']
expect(tokens[7]).toEqual value: ' ', scopes: ['text.html.basic', 'meta.tag.inline.span.html']
expect(tokens[8]).toEqual value: 'none', scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'meta.attribute-without-value.html', 'entity.other.attribute-name.html']
expect(tokens[9]).toEqual value: '>', scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'punctuation.definition.tag.end.html']
describe 'character references', ->
it 'tokenizes & and characters after it', ->
# NOTE: &a should NOT be tokenized as a character reference as there is no semicolon following it
# We have no way of knowing if there will ever be a semicolon so we play conservatively.
{tokens} = grammar.tokenizeLine '& &amp; &a'
expect(tokens[0]).toEqual value: '& ', scopes: ['text.html.basic']
expect(tokens[1]).toEqual value: '&', scopes: ['text.html.basic', 'constant.character.entity.html', 'punctuation.definition.entity.begin.html']
expect(tokens[2]).toEqual value: 'amp', scopes: ['text.html.basic', 'constant.character.entity.html', 'entity.name.entity.other.html']
expect(tokens[3]).toEqual value: ';', scopes: ['text.html.basic', 'constant.character.entity.html', 'punctuation.definition.entity.end.html']
expect(tokens[4]).toEqual value: ' &a', scopes: ['text.html.basic']
lines = grammar.tokenizeLines '&\n'
expect(lines[0][0]).toEqual value: '&', scopes: ['text.html.basic']
it 'tokenizes hexadecimal and digit character references', ->
{tokens} = grammar.tokenizeLine '&#x00022; &#X00022; &#34;'
expect(tokens[0]).toEqual value: '&', scopes: ['text.html.basic', 'constant.character.entity.html', 'punctuation.definition.entity.begin.html']
expect(tokens[1]).toEqual value: '#x00022', scopes: ['text.html.basic', 'constant.character.entity.html', 'entity.name.entity.other.html']
expect(tokens[2]).toEqual value: ';', scopes: ['text.html.basic', 'constant.character.entity.html', 'punctuation.definition.entity.end.html']
expect(tokens[4]).toEqual value: '&', scopes: ['text.html.basic', 'constant.character.entity.html', 'punctuation.definition.entity.begin.html']
expect(tokens[5]).toEqual value: '#X00022', scopes: ['text.html.basic', 'constant.character.entity.html', 'entity.name.entity.other.html']
expect(tokens[6]).toEqual value: ';', scopes: ['text.html.basic', 'constant.character.entity.html', 'punctuation.definition.entity.end.html']
expect(tokens[8]).toEqual value: '&', scopes: ['text.html.basic', 'constant.character.entity.html', 'punctuation.definition.entity.begin.html']
expect(tokens[9]).toEqual value: '#34', scopes: ['text.html.basic', 'constant.character.entity.html', 'entity.name.entity.other.html']
expect(tokens[10]).toEqual value: ';', scopes: ['text.html.basic', 'constant.character.entity.html', 'punctuation.definition.entity.end.html']
it 'tokenizes invalid ampersands', ->
{tokens} = grammar.tokenizeLine 'PSE&>'
expect(tokens[0]).toEqual value: 'PSE', scopes: ['text.html.basic']
expect(tokens[1]).toEqual value: '&', scopes: ['text.html.basic', 'invalid.illegal.bad-ampersand.html']
expect(tokens[2]).toEqual value: '>', scopes: ['text.html.basic']
{tokens} = grammar.tokenizeLine 'PSE&'
expect(tokens[0]).toEqual value: 'PSE&', scopes: ['text.html.basic']
{tokens} = grammar.tokenizeLine '&<'
expect(tokens[0]).toEqual value: '&<', scopes: ['text.html.basic']
{tokens} = grammar.tokenizeLine '& '
expect(tokens[0]).toEqual value: '& ', scopes: ['text.html.basic']
{tokens} = grammar.tokenizeLine '&'
expect(tokens[0]).toEqual value: '&', scopes: ['text.html.basic']
{tokens} = grammar.tokenizeLine '&&'
expect(tokens[0]).toEqual value: '&&', scopes: ['text.html.basic']
it 'tokenizes character references in attributes', ->
{tokens} = grammar.tokenizeLine '<a href="http://example.com?&amp;">'
expect(tokens[7]).toEqual value: '&', scopes: ['text.html.basic', 'meta.tag.inline.a.html', 'meta.attribute-with-value.html', 'string.quoted.double.html', 'constant.character.entity.html', 'punctuation.definition.entity.begin.html']
expect(tokens[8]).toEqual value: 'amp', scopes: ['text.html.basic', 'meta.tag.inline.a.html', 'meta.attribute-with-value.html', 'string.quoted.double.html', 'constant.character.entity.html', 'entity.name.entity.other.html']
expect(tokens[9]).toEqual value: ';', scopes: ['text.html.basic', 'meta.tag.inline.a.html', 'meta.attribute-with-value.html', 'string.quoted.double.html', 'constant.character.entity.html', 'punctuation.definition.entity.end.html']
it 'does not tokenize query parameters as character references', ->
{tokens} = grammar.tokenizeLine '<a href="http://example.com?one=1&type=json&topic=css">'
expect(tokens[6]).toEqual value: 'http://example.com?one=1&type=json&topic=css', scopes: ['text.html.basic', 'meta.tag.inline.a.html', 'meta.attribute-with-value.html', 'string.quoted.double.html']
it 'does not tokenize multiple ampersands followed by alphabetical characters as character references', ->
{tokens} = grammar.tokenizeLine '<a href="http://example.com?price&something&yummy:&wow">'
expect(tokens[6]).toEqual value: 'http://example.com?price&something&yummy:&wow', scopes: ['text.html.basic', 'meta.tag.inline.a.html', 'meta.attribute-with-value.html', 'string.quoted.double.html']
it 'tokenizes invalid ampersands in attributes', ->
# Note: in order to replicate the following tests' behaviors, make sure you have language-hyperlink disabled
{tokens} = grammar.tokenizeLine '<a href="http://example.com?&">'
expect(tokens[7]).toEqual value: '&', scopes: ['text.html.basic', 'meta.tag.inline.a.html', 'meta.attribute-with-value.html', 'string.quoted.double.html', 'invalid.illegal.bad-ampersand.html']
{tokens} = grammar.tokenizeLine '<a href="http://example.com?&=">'
expect(tokens[7]).toEqual value: '&', scopes: ['text.html.basic', 'meta.tag.inline.a.html', 'meta.attribute-with-value.html', 'string.quoted.double.html', 'invalid.illegal.bad-ampersand.html']
{tokens} = grammar.tokenizeLine '<a href="http://example.com?& ">'
expect(tokens[6]).toEqual value: 'http://example.com?& ', scopes: ['text.html.basic', 'meta.tag.inline.a.html', 'meta.attribute-with-value.html', 'string.quoted.double.html']
lines = grammar.tokenizeLines '<a href="http://example.com?&\n">'
expect(lines[0][6]).toEqual value: 'http://example.com?&', scopes: ['text.html.basic', 'meta.tag.inline.a.html', 'meta.attribute-with-value.html', 'string.quoted.double.html']
{tokens} = grammar.tokenizeLine '<a href="http://example.com?&&">'
expect(tokens[6]).toEqual value: 'http://example.com?&', scopes: ['text.html.basic', 'meta.tag.inline.a.html', 'meta.attribute-with-value.html', 'string.quoted.double.html']
expect(tokens[7]).toEqual value: '&', scopes: ['text.html.basic', 'meta.tag.inline.a.html', 'meta.attribute-with-value.html', 'string.quoted.double.html', 'invalid.illegal.bad-ampersand.html']
describe 'firstLineMatch', ->
it 'recognises HTML5 doctypes', ->
expect(grammar.firstLineRegex.findNextMatchSync('<!DOCTYPE html>')).not.toBeNull()
expect(grammar.firstLineRegex.findNextMatchSync('<!doctype HTML>')).not.toBeNull()
it 'recognises Emacs modelines', ->
valid = '''
#-*- HTML -*-
#-*- mode: HTML -*-
/* -*-html-*- */
// -*- HTML -*-
/* -*- mode:HTML -*- */
// -*- font:bar;mode:HTML -*-
// -*- font:bar;mode:HTML;foo:bar; -*-
// -*-font:mode;mode:HTML-*-
// -*- foo:bar mode: html bar:baz -*-
" -*-foo:bar;mode:html;bar:foo-*- ";
" -*-font-mode:foo;mode:html;foo-bar:quux-*-"
"-*-font:x;foo:bar; mode : HTML; bar:foo;foooooo:baaaaar;fo:ba;-*-";
"-*- font:x;foo : bar ; mode : HtML ; bar : foo ; foooooo:baaaaar;fo:ba-*-";
'''
for line in valid.split /\n/
expect(grammar.firstLineRegex.findNextMatchSync(line)).not.toBeNull()
invalid = '''
/* --*html-*- */
/* -*-- HTML -*-
/* -*- -- HTML -*-
/* -*- HTM -;- -*-
// -*- xHTML -*-
// -*- HTML; -*-
// -*- html-stuff -*-
/* -*- model:html -*-
/* -*- indent-mode:html -*-
// -*- font:mode;html -*-
// -*- HTimL -*-
// -*- mode: -*- HTML
// -*- mode: -html -*-
// -*-font:mode;mode:html--*-
'''
for line in invalid.split /\n/
expect(grammar.firstLineRegex.findNextMatchSync(line)).toBeNull()
it 'recognises Vim modelines', ->
valid = '''
vim: se filetype=html:
# vim: se ft=html:
# vim: set ft=HTML:
# vim: set filetype=XHTML:
# vim: ft=XHTML
# vim: syntax=HTML
# vim: se syntax=xhtml:
# ex: syntax=HTML
# vim:ft=html
# vim600: ft=xhtml
# vim>600: set ft=html:
# vi:noai:sw=3 ts=6 ft=html
# vi::::::::::noai:::::::::::: ft=html
# vim:ts=4:sts=4:sw=4:noexpandtab:ft=html
# vi:: noai : : : : sw =3 ts =6 ft =html
# vim: ts=4: pi sts=4: ft=html: noexpandtab: sw=4:
# vim: ts=4 sts=4: ft=html noexpandtab:
# vim:noexpandtab sts=4 ft=html ts=4
# vim:noexpandtab:ft=html
# vim:ts=4:sts=4 ft=html:noexpandtab:\x20
# vim:noexpandtab titlestring=hi\|there\\\\ ft=html ts=4
'''
for line in valid.split /\n/
expect(grammar.firstLineRegex.findNextMatchSync(line)).not.toBeNull()
invalid = '''
ex: se filetype=html:
_vi: se filetype=HTML:
vi: se filetype=HTML
# vim set ft=html5
# vim: soft=html
# vim: clean-syntax=html:
# vim set ft=html:
# vim: setft=HTML:
# vim: se ft=html backupdir=tmp
# vim: set ft=HTML set cmdheight=1
# vim:noexpandtab sts:4 ft:HTML ts:4
# vim:noexpandtab titlestring=hi\\|there\\ ft=HTML ts=4
# vim:noexpandtab titlestring=hi\\|there\\\\\\ ft=HTML ts=4
'''
for line in invalid.split /\n/
expect(grammar.firstLineRegex.findNextMatchSync(line)).toBeNull()
describe 'tags', ->
it 'tokenizes style tags as such', ->
{tokens} = grammar.tokenizeLine '<style>'
expect(tokens[0]).toEqual value: '<', scopes: ['text.html.basic', 'meta.tag.style.html', 'punctuation.definition.tag.html']
expect(tokens[1]).toEqual value: 'style', scopes: ['text.html.basic', 'meta.tag.style.html', 'entity.name.tag.style.html']
expect(tokens[2]).toEqual value: '>', scopes: ['text.html.basic', 'meta.tag.style.html', 'punctuation.definition.tag.html']
it 'tokenizes script tags as such', ->
{tokens} = grammar.tokenizeLine '<script>'
expect(tokens[0]).toEqual value: '<', scopes: ['text.html.basic', 'meta.tag.script.html', 'punctuation.definition.tag.html']
expect(tokens[1]).toEqual value: 'script', scopes: ['text.html.basic', 'meta.tag.script.html', 'entity.name.tag.script.html']
expect(tokens[2]).toEqual value: '>', scopes: ['text.html.basic', 'meta.tag.script.html', 'punctuation.definition.tag.html']
it 'tokenizes structure tags as such', ->
{tokens} = grammar.tokenizeLine '<html>'
expect(tokens[0]).toEqual value: '<', scopes: ['text.html.basic', 'meta.tag.structure.html.html', 'punctuation.definition.tag.html']
expect(tokens[1]).toEqual value: 'html', scopes: ['text.html.basic', 'meta.tag.structure.html.html', 'entity.name.tag.structure.html.html']
expect(tokens[2]).toEqual value: '>', scopes: ['text.html.basic', 'meta.tag.structure.html.html', 'punctuation.definition.tag.html']
it 'tokenizes block tags as such', ->
{tokens} = grammar.tokenizeLine '<div>'
expect(tokens[0]).toEqual value: '<', scopes: ['text.html.basic', 'meta.tag.block.div.html', 'punctuation.definition.tag.begin.html']
expect(tokens[1]).toEqual value: 'div', scopes: ['text.html.basic', 'meta.tag.block.div.html', 'entity.name.tag.block.div.html']
expect(tokens[2]).toEqual value: '>', scopes: ['text.html.basic', 'meta.tag.block.div.html', 'punctuation.definition.tag.end.html']
it 'tokenizes inline tags as such', ->
{tokens} = grammar.tokenizeLine '<span>'
expect(tokens[0]).toEqual value: '<', scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'punctuation.definition.tag.begin.html']
expect(tokens[1]).toEqual value: 'span', scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'entity.name.tag.inline.span.html']
expect(tokens[2]).toEqual value: '>', scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'punctuation.definition.tag.end.html']
it 'does not tokenize XML namespaces as tags if the prefix is a valid style tag', ->
{tokens} = grammar.tokenizeLine '<style:foo>'
expect(tokens[1].value).toNotEqual 'style'
expect(tokens[1].scopes).toEqual ['text.html.basic', 'meta.tag.other.html', 'entity.name.tag.other.html']
it 'does not tokenize XML namespaces as tags if the prefix is a valid script tag', ->
{tokens} = grammar.tokenizeLine '<script:foo>'
expect(tokens[1].value).toNotEqual 'script'
expect(tokens[1].scopes).toEqual ['text.html.basic', 'meta.tag.other.html', 'entity.name.tag.other.html']
it 'does not tokenize XML namespaces as tags if the prefix is a valid structure tag', ->
{tokens} = grammar.tokenizeLine '<html:foo>'
expect(tokens[1].value).toNotEqual 'html'
expect(tokens[1].scopes).toEqual ['text.html.basic', 'meta.tag.other.html', 'entity.name.tag.other.html']
it 'does not tokenize XML namespaces as tags if the prefix is a valid block tag', ->
{tokens} = grammar.tokenizeLine '<div:foo>'
expect(tokens[1].value).toNotEqual 'div'
expect(tokens[1].scopes).toEqual ['text.html.basic', 'meta.tag.other.html', 'entity.name.tag.other.html']
it 'does not tokenize XML namespaces as tags if the prefix is a valid inline tag', ->
{tokens} = grammar.tokenizeLine '<span:foo>'
expect(tokens[1].value).toNotEqual 'span'
expect(tokens[1].scopes).toEqual ['text.html.basic', 'meta.tag.other.html', 'entity.name.tag.other.html']
it 'it does not treat only the part before a hyphen as tag name if this part is a is a valid style tag', ->
{tokens} = grammar.tokenizeLine '<style-foo>'
expect(tokens[1].value).toNotEqual 'style'
expect(tokens[1].scopes).toEqual ['text.html.basic', 'meta.tag.other.html', 'entity.name.tag.other.html']
it 'it does not treat only the part before a hyphen as tag name if this part is a is a valid script tag', ->
{tokens} = grammar.tokenizeLine '<script-foo>'
expect(tokens[1].value).toNotEqual 'script'
expect(tokens[1].scopes).toEqual ['text.html.basic', 'meta.tag.other.html', 'entity.name.tag.other.html']
it 'it does not treat only the part before a hyphen as tag name if this part is a is a valid structure tag', ->
{tokens} = grammar.tokenizeLine '<html-foo>'
expect(tokens[1].value).toNotEqual 'html'
expect(tokens[1].scopes).toEqual ['text.html.basic', 'meta.tag.other.html', 'entity.name.tag.other.html']
it 'it does not treat only the part before a hyphen as tag name if this part is a is a valid block tag', ->
{tokens} = grammar.tokenizeLine '<div-foo>'
expect(tokens[1].value).toNotEqual 'div'
expect(tokens[1].scopes).toEqual ['text.html.basic', 'meta.tag.other.html', 'entity.name.tag.other.html']
it 'it does not treat only the part before a hyphen as tag name if this part is a is a valid inline tag', ->
{tokens} = grammar.tokenizeLine '<span-foo>'
expect(tokens[1].value).toNotEqual 'span'
expect(tokens[1].scopes).toEqual ['text.html.basic', 'meta.tag.other.html', 'entity.name.tag.other.html']
it 'tokenizes other tags as such', ->
{tokens} = grammar.tokenizeLine '<foo>'
expect(tokens[0]).toEqual value: '<', scopes: ['text.html.basic', 'meta.tag.other.html', 'punctuation.definition.tag.begin.html']
expect(tokens[1]).toEqual value: 'foo', scopes: ['text.html.basic', 'meta.tag.other.html', 'entity.name.tag.other.html']
expect(tokens[2]).toEqual value: '>', scopes: ['text.html.basic', 'meta.tag.other.html', 'punctuation.definition.tag.end.html']
it 'tolerates colons in other tag names', ->
{tokens} = grammar.tokenizeLine '<foo:bar>'
expect(tokens[1]).toEqual value: 'foo:bar', scopes: ['text.html.basic', 'meta.tag.other.html', 'entity.name.tag.other.html']
it 'tolerates hyphens in other tag names', ->
{tokens} = grammar.tokenizeLine '<foo-bar>'
expect(tokens[1]).toEqual value: 'foo-bar', scopes: ['text.html.basic', 'meta.tag.other.html', 'entity.name.tag.other.html']
it 'tokenizes XML declaration correctly', ->
{tokens} = grammar.tokenizeLine '<?xml version="1.0" encoding="UTF-8"?>'
expect(tokens[0]).toEqual value: '<?', scopes: ['text.html.basic', 'meta.tag.preprocessor.xml.html', 'punctuation.definition.tag.html']
expect(tokens[1]).toEqual value: 'xml', scopes: ['text.html.basic', 'meta.tag.preprocessor.xml.html', 'entity.name.tag.xml.html']
expect(tokens[2]).toEqual value: ' ', scopes: ['text.html.basic', 'meta.tag.preprocessor.xml.html']
expect(tokens[3]).toEqual value: 'version', scopes: ['text.html.basic', 'meta.tag.preprocessor.xml.html', 'meta.attribute-with-value.html', 'entity.other.attribute-name.html']
expect(tokens[4]).toEqual value: '=', scopes: ['text.html.basic', 'meta.tag.preprocessor.xml.html', 'meta.attribute-with-value.html', 'punctuation.separator.key-value.html']
expect(tokens[5]).toEqual value: '"', scopes: ['text.html.basic', 'meta.tag.preprocessor.xml.html', 'meta.attribute-with-value.html', 'string.quoted.double.html', 'punctuation.definition.string.begin.html']
expect(tokens[6]).toEqual value: '1.0', scopes: ['text.html.basic', 'meta.tag.preprocessor.xml.html', 'meta.attribute-with-value.html', 'string.quoted.double.html']
expect(tokens[7]).toEqual value: '"', scopes: ['text.html.basic', 'meta.tag.preprocessor.xml.html', 'meta.attribute-with-value.html', 'string.quoted.double.html', 'punctuation.definition.string.end.html']
expect(tokens[8]).toEqual value: ' ', scopes: ['text.html.basic', 'meta.tag.preprocessor.xml.html']
expect(tokens[9]).toEqual value: 'encoding', scopes: ['text.html.basic', 'meta.tag.preprocessor.xml.html', 'meta.attribute-with-value.html', 'entity.other.attribute-name.html']
expect(tokens[10]).toEqual value: '=', scopes: ['text.html.basic', 'meta.tag.preprocessor.xml.html', 'meta.attribute-with-value.html', 'punctuation.separator.key-value.html']
expect(tokens[11]).toEqual value: '"', scopes: ['text.html.basic', 'meta.tag.preprocessor.xml.html', 'meta.attribute-with-value.html', 'string.quoted.double.html', 'punctuation.definition.string.begin.html']
expect(tokens[12]).toEqual value: 'UTF-8', scopes: ['text.html.basic', 'meta.tag.preprocessor.xml.html', 'meta.attribute-with-value.html', 'string.quoted.double.html']
expect(tokens[13]).toEqual value: '"', scopes: ['text.html.basic', 'meta.tag.preprocessor.xml.html', 'meta.attribute-with-value.html', 'string.quoted.double.html', 'punctuation.definition.string.end.html']
expect(tokens[14]).toEqual value: '?>', scopes: ['text.html.basic', 'meta.tag.preprocessor.xml.html', 'punctuation.definition.tag.html']
describe 'snippets', ->
snippetsModule = null
beforeEach ->
# FIXME: This should just be atom.packages.loadPackage('snippets'),
# but a bug in PackageManager::resolvePackagePath where it finds language-html's
# `snippets` directory before the actual package necessitates passing an absolute path
# See https://github.com/atom/atom/issues/15953
snippetsPath = path.join(atom.packages.resourcePath, 'node_modules', 'snippets')
snippetsModule = require(atom.packages.loadPackage(snippetsPath).getMainModulePath())
# Disable loading of user snippets before the package is activated
spyOn(snippetsModule, 'loadUserSnippets').andCallFake (callback) -> callback({})
snippetsModule.activate()
waitsFor 'snippets to load', (done) -> snippetsModule.onDidLoadSnippets(done)
it 'suggests snippets', ->
expect(Object.keys(snippetsModule.parsedSnippetsForScopes(['.text.html'])).length).toBeGreaterThan 10
it 'does not suggest any HTML snippets when in embedded scripts', ->
expect(Object.keys(snippetsModule.parsedSnippetsForScopes(['.text.html .source.js.embedded.html'])).length).toBe 0

View File

@ -0,0 +1,925 @@
const path = require('path');
const grammarTest = require('atom-grammar-test');
describe('TextMate HTML grammar', function() {
let grammar = null;
beforeEach(function() {
atom.config.set('core.useTreeSitterParsers', false);
waitsForPromise(() => atom.packages.activatePackage('language-html'));
runs(() => grammar = atom.grammars.grammarForScopeName('text.html.basic'));
});
it('parses the grammar', function() {
expect(grammar).toBeTruthy();
expect(grammar.scopeName).toBe('text.html.basic');
});
describe('style tags', function() {
beforeEach(() => waitsForPromise(() => atom.packages.activatePackage('language-css')));
it('tokenizes the tag attributes', function() {
const lines = grammar.tokenizeLines(`\
<style id="id" class="very-classy">
</style>\
`
);
expect(lines[0][0]).toEqual({value: '<', scopes: ['text.html.basic', 'meta.tag.style.html', 'punctuation.definition.tag.html']});
expect(lines[0][1]).toEqual({value: 'style', scopes: ['text.html.basic', 'meta.tag.style.html', 'entity.name.tag.style.html']});
expect(lines[0][3]).toEqual({value: 'id', scopes: ['text.html.basic', 'meta.tag.style.html', 'meta.attribute-with-value.id.html', 'entity.other.attribute-name.id.html']});
expect(lines[0][4]).toEqual({value: '=', scopes: ['text.html.basic', 'meta.tag.style.html', 'meta.attribute-with-value.id.html', 'punctuation.separator.key-value.html']});
expect(lines[0][5]).toEqual({value: '"', scopes: ['text.html.basic', 'meta.tag.style.html', 'meta.attribute-with-value.id.html', 'string.quoted.double.html', 'punctuation.definition.string.begin.html']});
expect(lines[0][6]).toEqual({value: 'id', scopes: ['text.html.basic', 'meta.tag.style.html', 'meta.attribute-with-value.id.html', 'string.quoted.double.html', 'meta.toc-list.id.html']});
expect(lines[0][7]).toEqual({value: '"', scopes: ['text.html.basic', 'meta.tag.style.html', 'meta.attribute-with-value.id.html', 'string.quoted.double.html', 'punctuation.definition.string.end.html']});
expect(lines[0][9]).toEqual({value: 'class', scopes: ['text.html.basic', 'meta.tag.style.html', 'meta.attribute-with-value.class.html', 'entity.other.attribute-name.class.html']});
expect(lines[0][10]).toEqual({value: '=', scopes: ['text.html.basic', 'meta.tag.style.html', 'meta.attribute-with-value.class.html', 'punctuation.separator.key-value.html']});
expect(lines[0][11]).toEqual({value: '"', scopes: ['text.html.basic', 'meta.tag.style.html', 'meta.attribute-with-value.class.html', 'string.quoted.double.html', 'punctuation.definition.string.begin.html']});
expect(lines[0][12]).toEqual({value: 'very-classy', scopes: ['text.html.basic', 'meta.tag.style.html', 'meta.attribute-with-value.class.html', 'string.quoted.double.html']});
expect(lines[0][13]).toEqual({value: '"', scopes: ['text.html.basic', 'meta.tag.style.html', 'meta.attribute-with-value.class.html', 'string.quoted.double.html', 'punctuation.definition.string.end.html']});
expect(lines[0][14]).toEqual({value: '>', scopes: ['text.html.basic', 'meta.tag.style.html', 'punctuation.definition.tag.html']});
expect(lines[1][0]).toEqual({value: '</', scopes: ['text.html.basic', 'meta.tag.style.html', 'punctuation.definition.tag.html']});
expect(lines[1][1]).toEqual({value: 'style', scopes: ['text.html.basic', 'meta.tag.style.html', 'entity.name.tag.style.html']});
expect(lines[1][2]).toEqual({value: '>', scopes: ['text.html.basic', 'meta.tag.style.html', 'punctuation.definition.tag.html']});
});
it('tokenizes multiline tag attributes', function() {
const lines = grammar.tokenizeLines(`\
<style id="id"
class="very-classy"
>
</style>\
`
);
expect(lines[0][0]).toEqual({value: '<', scopes: ['text.html.basic', 'meta.tag.style.html', 'punctuation.definition.tag.html']});
expect(lines[0][1]).toEqual({value: 'style', scopes: ['text.html.basic', 'meta.tag.style.html', 'entity.name.tag.style.html']});
expect(lines[0][3]).toEqual({value: 'id', scopes: ['text.html.basic', 'meta.tag.style.html', 'meta.attribute-with-value.id.html', 'entity.other.attribute-name.id.html']});
expect(lines[0][4]).toEqual({value: '=', scopes: ['text.html.basic', 'meta.tag.style.html', 'meta.attribute-with-value.id.html', 'punctuation.separator.key-value.html']});
expect(lines[0][5]).toEqual({value: '"', scopes: ['text.html.basic', 'meta.tag.style.html', 'meta.attribute-with-value.id.html', 'string.quoted.double.html', 'punctuation.definition.string.begin.html']});
expect(lines[0][6]).toEqual({value: 'id', scopes: ['text.html.basic', 'meta.tag.style.html', 'meta.attribute-with-value.id.html', 'string.quoted.double.html', 'meta.toc-list.id.html']});
expect(lines[0][7]).toEqual({value: '"', scopes: ['text.html.basic', 'meta.tag.style.html', 'meta.attribute-with-value.id.html', 'string.quoted.double.html', 'punctuation.definition.string.end.html']});
expect(lines[1][1]).toEqual({value: 'class', scopes: ['text.html.basic', 'meta.tag.style.html', 'meta.attribute-with-value.class.html', 'entity.other.attribute-name.class.html']});
expect(lines[1][5]).toEqual({value: '"', scopes: ['text.html.basic', 'meta.tag.style.html', 'meta.attribute-with-value.class.html', 'string.quoted.double.html', 'punctuation.definition.string.end.html']});
expect(lines[2][0]).toEqual({value: '>', scopes: ['text.html.basic', 'meta.tag.style.html', 'punctuation.definition.tag.html']});
expect(lines[3][0]).toEqual({value: '</', scopes: ['text.html.basic', 'meta.tag.style.html', 'punctuation.definition.tag.html']});
expect(lines[3][1]).toEqual({value: 'style', scopes: ['text.html.basic', 'meta.tag.style.html', 'entity.name.tag.style.html']});
expect(lines[3][2]).toEqual({value: '>', scopes: ['text.html.basic', 'meta.tag.style.html', 'punctuation.definition.tag.html']});
});
it('tokenizes the content inside the tag as CSS', function() {
const lines = grammar.tokenizeLines(`\
<style class="very-classy">
span { color: red; }
</style>\
`
);
expect(lines[0][0]).toEqual({value: '<', scopes: ['text.html.basic', 'meta.tag.style.html', 'punctuation.definition.tag.html']});
expect(lines[1][0]).toEqual({value: ' ', scopes: ['text.html.basic', 'meta.tag.style.html', 'source.css.embedded.html']});
expect(lines[1][1]).toEqual({value: 'span', scopes: ['text.html.basic', 'meta.tag.style.html', 'source.css.embedded.html', 'meta.selector.css', 'entity.name.tag.css']});
expect(lines[2][0]).toEqual({value: '</', scopes: ['text.html.basic', 'meta.tag.style.html', 'punctuation.definition.tag.html']});
});
it('tokenizes multiline tags', function() {
const lines = grammar.tokenizeLines(`\
<style
class="very-classy">
span { color: red; }
</style>\
`
);
expect(lines[0][0]).toEqual({value: '<', scopes: ['text.html.basic', 'meta.tag.style.html', 'punctuation.definition.tag.html']});
expect(lines[2][1]).toEqual({value: 'span', scopes: ['text.html.basic', 'meta.tag.style.html', 'source.css.embedded.html', 'meta.selector.css', 'entity.name.tag.css']});
expect(lines[3][0]).toEqual({value: '</', scopes: ['text.html.basic', 'meta.tag.style.html', 'punctuation.definition.tag.html']});
});
});
describe('script tags', function() {
it('tokenizes the tag attributes', function() {
const lines = grammar.tokenizeLines(`\
<script id="id" type="text/html">
</script>\
`
);
expect(lines[0][0]).toEqual({value: '<', scopes: ['text.html.basic', 'meta.tag.script.html', 'punctuation.definition.tag.html']});
expect(lines[0][1]).toEqual({value: 'script', scopes: ['text.html.basic', 'meta.tag.script.html', 'entity.name.tag.script.html']});
expect(lines[0][3]).toEqual({value: 'id', scopes: ['text.html.basic', 'meta.tag.script.html', 'meta.attribute-with-value.id.html', 'entity.other.attribute-name.id.html']});
expect(lines[0][4]).toEqual({value: '=', scopes: ['text.html.basic', 'meta.tag.script.html', 'meta.attribute-with-value.id.html', 'punctuation.separator.key-value.html']});
expect(lines[0][5]).toEqual({value: '"', scopes: ['text.html.basic', 'meta.tag.script.html', 'meta.attribute-with-value.id.html', 'string.quoted.double.html', 'punctuation.definition.string.begin.html']});
expect(lines[0][6]).toEqual({value: 'id', scopes: ['text.html.basic', 'meta.tag.script.html', 'meta.attribute-with-value.id.html', 'string.quoted.double.html', 'meta.toc-list.id.html']});
expect(lines[0][7]).toEqual({value: '"', scopes: ['text.html.basic', 'meta.tag.script.html', 'meta.attribute-with-value.id.html', 'string.quoted.double.html', 'punctuation.definition.string.end.html']});
expect(lines[0][9]).toEqual({value: 'type', scopes: ['text.html.basic', 'meta.tag.script.html', 'meta.attribute-with-value.html', 'entity.other.attribute-name.html']});
expect(lines[0][10]).toEqual({value: '=', scopes: ['text.html.basic', 'meta.tag.script.html', 'meta.attribute-with-value.html', 'punctuation.separator.key-value.html']});
expect(lines[0][11]).toEqual({value: '"', scopes: ['text.html.basic', 'meta.tag.script.html', 'meta.attribute-with-value.html', 'string.quoted.double.html', 'punctuation.definition.string.begin.html']});
expect(lines[0][12]).toEqual({value: 'text/html', scopes: ['text.html.basic', 'meta.tag.script.html', 'meta.attribute-with-value.html', 'string.quoted.double.html']});
expect(lines[0][13]).toEqual({value: '"', scopes: ['text.html.basic', 'meta.tag.script.html', 'meta.attribute-with-value.html', 'string.quoted.double.html', 'punctuation.definition.string.end.html']});
expect(lines[0][14]).toEqual({value: '>', scopes: ['text.html.basic', 'meta.tag.script.html', 'punctuation.definition.tag.html']});
expect(lines[1][0]).toEqual({value: '</', scopes: ['text.html.basic', 'meta.tag.script.html', 'punctuation.definition.tag.html']});
expect(lines[1][1]).toEqual({value: 'script', scopes: ['text.html.basic', 'meta.tag.script.html', 'entity.name.tag.script.html']});
expect(lines[1][2]).toEqual({value: '>', scopes: ['text.html.basic', 'meta.tag.script.html', 'punctuation.definition.tag.html']});
});
it('tokenizes multiline tag attributes', function() {
const lines = grammar.tokenizeLines(`\
<script id="id" type="text/html"
class="very-classy"
>
</script>\
`
);
expect(lines[0][0]).toEqual({value: '<', scopes: ['text.html.basic', 'meta.tag.script.html', 'punctuation.definition.tag.html']});
expect(lines[0][1]).toEqual({value: 'script', scopes: ['text.html.basic', 'meta.tag.script.html', 'entity.name.tag.script.html']});
expect(lines[0][3]).toEqual({value: 'id', scopes: ['text.html.basic', 'meta.tag.script.html', 'meta.attribute-with-value.id.html', 'entity.other.attribute-name.id.html']});
expect(lines[0][4]).toEqual({value: '=', scopes: ['text.html.basic', 'meta.tag.script.html', 'meta.attribute-with-value.id.html', 'punctuation.separator.key-value.html']});
expect(lines[0][5]).toEqual({value: '"', scopes: ['text.html.basic', 'meta.tag.script.html', 'meta.attribute-with-value.id.html', 'string.quoted.double.html', 'punctuation.definition.string.begin.html']});
expect(lines[0][6]).toEqual({value: 'id', scopes: ['text.html.basic', 'meta.tag.script.html', 'meta.attribute-with-value.id.html', 'string.quoted.double.html', 'meta.toc-list.id.html']});
expect(lines[0][7]).toEqual({value: '"', scopes: ['text.html.basic', 'meta.tag.script.html', 'meta.attribute-with-value.id.html', 'string.quoted.double.html', 'punctuation.definition.string.end.html']});
expect(lines[0][9]).toEqual({value: 'type', scopes: ['text.html.basic', 'meta.tag.script.html', 'meta.attribute-with-value.html', 'entity.other.attribute-name.html']});
expect(lines[0][10]).toEqual({value: '=', scopes: ['text.html.basic', 'meta.tag.script.html', 'meta.attribute-with-value.html', 'punctuation.separator.key-value.html']});
expect(lines[0][11]).toEqual({value: '"', scopes: ['text.html.basic', 'meta.tag.script.html', 'meta.attribute-with-value.html', 'string.quoted.double.html', 'punctuation.definition.string.begin.html']});
expect(lines[0][12]).toEqual({value: 'text/html', scopes: ['text.html.basic', 'meta.tag.script.html', 'meta.attribute-with-value.html', 'string.quoted.double.html']});
expect(lines[0][13]).toEqual({value: '"', scopes: ['text.html.basic', 'meta.tag.script.html', 'meta.attribute-with-value.html', 'string.quoted.double.html', 'punctuation.definition.string.end.html']});
expect(lines[1][1]).toEqual({value: 'class', scopes: ['text.html.basic', 'meta.tag.script.html', 'meta.attribute-with-value.class.html', 'entity.other.attribute-name.class.html']});
expect(lines[1][5]).toEqual({value: '"', scopes: ['text.html.basic', 'meta.tag.script.html', 'meta.attribute-with-value.class.html', 'string.quoted.double.html', 'punctuation.definition.string.end.html']});
expect(lines[2][0]).toEqual({value: '>', scopes: ['text.html.basic', 'meta.tag.script.html', 'punctuation.definition.tag.html']});
expect(lines[3][0]).toEqual({value: '</', scopes: ['text.html.basic', 'meta.tag.script.html', 'punctuation.definition.tag.html']});
expect(lines[3][1]).toEqual({value: 'script', scopes: ['text.html.basic', 'meta.tag.script.html', 'entity.name.tag.script.html']});
expect(lines[3][2]).toEqual({value: '>', scopes: ['text.html.basic', 'meta.tag.script.html', 'punctuation.definition.tag.html']});
});
});
describe('template script tags', function() {
it('tokenizes the content inside the tag as HTML', function() {
const lines = grammar.tokenizeLines(`\
<script id='id' type='text/template'>
<div>test</div>
</script>\
`
);
expect(lines[0][0]).toEqual({value: '<', scopes: ['text.html.basic', 'meta.tag.script.html', 'punctuation.definition.tag.html']});
expect(lines[1][0]).toEqual({value: ' ', scopes: ['text.html.basic', 'meta.tag.script.html', 'text.embedded.html']});
expect(lines[1][1]).toEqual({value: '<', scopes: ['text.html.basic', 'meta.tag.script.html', 'text.embedded.html', 'meta.tag.block.div.html', 'punctuation.definition.tag.begin.html']});
expect(lines[2][0]).toEqual({value: '</', scopes: ['text.html.basic', 'meta.tag.script.html', 'punctuation.definition.tag.html']});
});
it('tokenizes multiline tags', function() {
const lines = grammar.tokenizeLines(`\
<script id='id' type='text/template'
class='very-classy'>
<div>test</div>
</script>\
`
);
expect(lines[0][0]).toEqual({value: '<', scopes: ['text.html.basic', 'meta.tag.script.html', 'punctuation.definition.tag.html']});
expect(lines[2][1]).toEqual({value: '<', scopes: ['text.html.basic', 'meta.tag.script.html', 'text.embedded.html', 'meta.tag.block.div.html', 'punctuation.definition.tag.begin.html']});
expect(lines[3][0]).toEqual({value: '</', scopes: ['text.html.basic', 'meta.tag.script.html', 'punctuation.definition.tag.html']});
});
});
describe('CoffeeScript script tags', function() {
beforeEach(() => waitsForPromise(() => atom.packages.activatePackage('language-coffee-script')));
it('tokenizes the content inside the tag as CoffeeScript', function() {
const lines = grammar.tokenizeLines(`\
<script id='id' type='text/coffeescript'>
-> console.log 'hi'
</script>\
`
);
expect(lines[0][0]).toEqual({value: '<', scopes: ['text.html.basic', 'meta.tag.script.html', 'punctuation.definition.tag.html']});
expect(lines[1][0]).toEqual({value: ' ', scopes: ['text.html.basic', 'meta.tag.script.html', 'source.coffee.embedded.html']});
expect(lines[1][1]).toEqual({value: '->', scopes: ['text.html.basic', 'meta.tag.script.html', 'source.coffee.embedded.html', 'meta.function.inline.coffee', 'storage.type.function.coffee']});
expect(lines[2][0]).toEqual({value: '</', scopes: ['text.html.basic', 'meta.tag.script.html', 'punctuation.definition.tag.html']});
});
it('tokenizes multiline tags', function() {
const lines = grammar.tokenizeLines(`\
<script id='id' type='text/coffeescript'
class='very-classy'>
-> console.log 'hi'
</script>\
`
);
expect(lines[0][0]).toEqual({value: '<', scopes: ['text.html.basic', 'meta.tag.script.html', 'punctuation.definition.tag.html']});
expect(lines[2][1]).toEqual({value: '->', scopes: ['text.html.basic', 'meta.tag.script.html', 'source.coffee.embedded.html', 'meta.function.inline.coffee', 'storage.type.function.coffee']});
expect(lines[3][0]).toEqual({value: '</', scopes: ['text.html.basic', 'meta.tag.script.html', 'punctuation.definition.tag.html']});
});
it('recognizes closing script tags in comments', function() {
let lines = grammar.tokenizeLines(`\
<script id='id' type='text/coffeescript'>
# comment </script>\
`
);
expect(lines[1][1]).toEqual({value: '#', scopes: ['text.html.basic', 'meta.tag.script.html', 'source.coffee.embedded.html', 'comment.line.number-sign.coffee', 'punctuation.definition.comment.coffee']});
expect(lines[1][2]).toEqual({value: ' comment ', scopes: ['text.html.basic', 'meta.tag.script.html', 'source.coffee.embedded.html', 'comment.line.number-sign.coffee']});
expect(lines[1][3]).toEqual({value: '</', scopes: ['text.html.basic', 'meta.tag.script.html', 'punctuation.definition.tag.html']});
lines = grammar.tokenizeLines(`\
<script id='id' type='text/coffeescript'>
###
comment </script>\
`
);
expect(lines[1][1]).toEqual({value: '###', scopes: ['text.html.basic', 'meta.tag.script.html', 'source.coffee.embedded.html', 'comment.block.coffee', 'punctuation.definition.comment.coffee']});
expect(lines[2][0]).toEqual({value: ' comment ', scopes: ['text.html.basic', 'meta.tag.script.html', 'source.coffee.embedded.html', 'comment.block.coffee']});
expect(lines[2][1]).toEqual({value: '</', scopes: ['text.html.basic', 'meta.tag.script.html', 'punctuation.definition.tag.html']});
});
});
describe('JavaScript script tags', function() {
beforeEach(() => waitsForPromise(() => atom.packages.activatePackage('language-javascript')));
it('tokenizes the content inside the tag as JavaScript', function() {
const lines = grammar.tokenizeLines(`\
<script id='id' type='text/javascript'>
var hi = 'hi'
</script>\
`
);
expect(lines[0][0]).toEqual({value: '<', scopes: ['text.html.basic', 'meta.tag.script.html', 'punctuation.definition.tag.html']});
expect(lines[1][0]).toEqual({value: ' ', scopes: ['text.html.basic', 'meta.tag.script.html', 'source.js.embedded.html']});
expect(lines[1][1]).toEqual({value: 'var', scopes: ['text.html.basic', 'meta.tag.script.html', 'source.js.embedded.html', 'storage.type.var.js']});
expect(lines[2][0]).toEqual({value: '</', scopes: ['text.html.basic', 'meta.tag.script.html', 'punctuation.definition.tag.html']});
});
it('tokenizes multiline tags', function() {
const lines = grammar.tokenizeLines(`\
<script id='id'
class='very-classy'>
var hi = 'hi'
</script>\
`
);
expect(lines[0][0]).toEqual({value: '<', scopes: ['text.html.basic', 'meta.tag.script.html', 'punctuation.definition.tag.html']});
expect(lines[2][1]).toEqual({value: 'var', scopes: ['text.html.basic', 'meta.tag.script.html', 'source.js.embedded.html', 'storage.type.var.js']});
expect(lines[3][0]).toEqual({value: '</', scopes: ['text.html.basic', 'meta.tag.script.html', 'punctuation.definition.tag.html']});
});
it('recognizes closing script tags in comments', function() {
let lines = grammar.tokenizeLines(`\
<script id='id' type='text/javascript'>
// comment </script>\
`
);
expect(lines[1][1]).toEqual({value: '//', scopes: ['text.html.basic', 'meta.tag.script.html', 'source.js.embedded.html', 'comment.line.double-slash.js', 'punctuation.definition.comment.js']});
expect(lines[1][2]).toEqual({value: ' comment ', scopes: ['text.html.basic', 'meta.tag.script.html', 'source.js.embedded.html', 'comment.line.double-slash.js']});
expect(lines[1][3]).toEqual({value: '</', scopes: ['text.html.basic', 'meta.tag.script.html', 'punctuation.definition.tag.html']});
lines = grammar.tokenizeLines(`\
<script id='id' type='text/javascript'>
/*
comment </script>\
`
);
expect(lines[1][1]).toEqual({value: '/*', scopes: ['text.html.basic', 'meta.tag.script.html', 'source.js.embedded.html', 'comment.block.js', 'punctuation.definition.comment.begin.js']});
expect(lines[2][0]).toEqual({value: ' comment ', scopes: ['text.html.basic', 'meta.tag.script.html', 'source.js.embedded.html', 'comment.block.js']});
expect(lines[2][1]).toEqual({value: '</', scopes: ['text.html.basic', 'meta.tag.script.html', 'punctuation.definition.tag.html']});
});
});
describe('comments', () => it('tokenizes -- as an error', function() {
let {tokens} = grammar.tokenizeLine('<!-- some comment --->');
expect(tokens[0]).toEqual({value: '<!--', scopes: ['text.html.basic', 'comment.block.html', 'punctuation.definition.comment.html']});
expect(tokens[1]).toEqual({value: ' some comment -', scopes: ['text.html.basic', 'comment.block.html']});
expect(tokens[2]).toEqual({value: '-->', scopes: ['text.html.basic', 'comment.block.html', 'punctuation.definition.comment.html']});
({tokens} = grammar.tokenizeLine('<!-- -- -->'));
expect(tokens[0]).toEqual({value: '<!--', scopes: ['text.html.basic', 'comment.block.html', 'punctuation.definition.comment.html']});
expect(tokens[1]).toEqual({value: ' ', scopes: ['text.html.basic', 'comment.block.html']});
expect(tokens[2]).toEqual({value: '--', scopes: ['text.html.basic', 'comment.block.html', 'invalid.illegal.bad-comments-or-CDATA.html']});
expect(tokens[3]).toEqual({value: ' ', scopes: ['text.html.basic', 'comment.block.html']});
expect(tokens[4]).toEqual({value: '-->', scopes: ['text.html.basic', 'comment.block.html', 'punctuation.definition.comment.html']});
}));
grammarTest(path.join(__dirname, 'fixtures/syntax_test_html.html'));
grammarTest(path.join(__dirname, 'fixtures/syntax_test_html_template_fragments.html'));
describe('attributes', function() {
it('recognizes a single attribute with a quoted value', function() {
let {tokens} = grammar.tokenizeLine('<span class="foo">');
expect(tokens[3]).toEqual({value: 'class', scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'meta.attribute-with-value.class.html', 'entity.other.attribute-name.class.html']});
expect(tokens[4]).toEqual({value: '=', scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'meta.attribute-with-value.class.html', 'punctuation.separator.key-value.html']});
expect(tokens[5]).toEqual({value: '"', scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'meta.attribute-with-value.class.html', 'string.quoted.double.html', 'punctuation.definition.string.begin.html']});
expect(tokens[6]).toEqual({value: 'foo', scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'meta.attribute-with-value.class.html', 'string.quoted.double.html']});
expect(tokens[7]).toEqual({value: '"', scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'meta.attribute-with-value.class.html', 'string.quoted.double.html', 'punctuation.definition.string.end.html']});
expect(tokens[8]).toEqual({value: '>', scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'punctuation.definition.tag.end.html']});
({tokens} = grammar.tokenizeLine("<span class='foo'>"));
expect(tokens[3]).toEqual({value: 'class', scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'meta.attribute-with-value.class.html', 'entity.other.attribute-name.class.html']});
expect(tokens[4]).toEqual({value: '=', scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'meta.attribute-with-value.class.html', 'punctuation.separator.key-value.html']});
expect(tokens[5]).toEqual({value: "'", scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'meta.attribute-with-value.class.html', 'string.quoted.single.html', 'punctuation.definition.string.begin.html']});
expect(tokens[6]).toEqual({value: 'foo', scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'meta.attribute-with-value.class.html', 'string.quoted.single.html']});
expect(tokens[7]).toEqual({value: "'", scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'meta.attribute-with-value.class.html', 'string.quoted.single.html', 'punctuation.definition.string.end.html']});
expect(tokens[8]).toEqual({value: '>', scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'punctuation.definition.tag.end.html']});
});
it('recognizes a single attribute with spaces around the equals sign', function() {
let {tokens} = grammar.tokenizeLine('<span class ="foo">');
expect(tokens[3]).toEqual({value: 'class', scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'meta.attribute-with-value.class.html', 'entity.other.attribute-name.class.html']});
expect(tokens[4]).toEqual({value: ' ', scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'meta.attribute-with-value.class.html']});
expect(tokens[5]).toEqual({value: '=', scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'meta.attribute-with-value.class.html', 'punctuation.separator.key-value.html']});
expect(tokens[6]).toEqual({value: '"', scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'meta.attribute-with-value.class.html', 'string.quoted.double.html', 'punctuation.definition.string.begin.html']});
expect(tokens[7]).toEqual({value: 'foo', scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'meta.attribute-with-value.class.html', 'string.quoted.double.html']});
expect(tokens[8]).toEqual({value: '"', scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'meta.attribute-with-value.class.html', 'string.quoted.double.html', 'punctuation.definition.string.end.html']});
expect(tokens[9]).toEqual({value: '>', scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'punctuation.definition.tag.end.html']});
({tokens} = grammar.tokenizeLine('<span class= "foo">'));
expect(tokens[3]).toEqual({value: 'class', scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'meta.attribute-with-value.class.html', 'entity.other.attribute-name.class.html']});
expect(tokens[4]).toEqual({value: '=', scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'meta.attribute-with-value.class.html', 'punctuation.separator.key-value.html']});
expect(tokens[5]).toEqual({value: ' ', scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'meta.attribute-with-value.class.html']});
expect(tokens[6]).toEqual({value: '"', scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'meta.attribute-with-value.class.html', 'string.quoted.double.html', 'punctuation.definition.string.begin.html']});
expect(tokens[7]).toEqual({value: 'foo', scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'meta.attribute-with-value.class.html', 'string.quoted.double.html']});
expect(tokens[8]).toEqual({value: '"', scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'meta.attribute-with-value.class.html', 'string.quoted.double.html', 'punctuation.definition.string.end.html']});
expect(tokens[9]).toEqual({value: '>', scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'punctuation.definition.tag.end.html']});
({tokens} = grammar.tokenizeLine('<span class = "foo">'));
expect(tokens[3]).toEqual({value: 'class', scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'meta.attribute-with-value.class.html', 'entity.other.attribute-name.class.html']});
expect(tokens[4]).toEqual({value: ' ', scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'meta.attribute-with-value.class.html']});
expect(tokens[5]).toEqual({value: '=', scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'meta.attribute-with-value.class.html', 'punctuation.separator.key-value.html']});
expect(tokens[6]).toEqual({value: ' ', scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'meta.attribute-with-value.class.html']});
expect(tokens[7]).toEqual({value: '"', scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'meta.attribute-with-value.class.html', 'string.quoted.double.html', 'punctuation.definition.string.begin.html']});
expect(tokens[8]).toEqual({value: 'foo', scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'meta.attribute-with-value.class.html', 'string.quoted.double.html']});
expect(tokens[9]).toEqual({value: '"', scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'meta.attribute-with-value.class.html', 'string.quoted.double.html', 'punctuation.definition.string.end.html']});
expect(tokens[10]).toEqual({value: '>', scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'punctuation.definition.tag.end.html']});
});
it('recognizes a single attribute with an unquoted value', function() {
const {tokens} = grammar.tokenizeLine('<span class=foo-3+5@>');
expect(tokens[3]).toEqual({value: 'class', scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'meta.attribute-with-value.class.html', 'entity.other.attribute-name.class.html']});
expect(tokens[4]).toEqual({value: '=', scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'meta.attribute-with-value.class.html', 'punctuation.separator.key-value.html']});
expect(tokens[5]).toEqual({value: 'foo-3+5@', scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'meta.attribute-with-value.class.html', 'string.unquoted.html']});
expect(tokens[6]).toEqual({value: '>', scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'punctuation.definition.tag.end.html']});
});
it('recognizes a single attribute with no value', function() {
const {tokens} = grammar.tokenizeLine('<span class>');
expect(tokens[3]).toEqual({value: 'class', scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'meta.attribute-without-value.html', 'entity.other.attribute-name.html']});
expect(tokens[4]).toEqual({value: '>', scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'punctuation.definition.tag.end.html']});
});
it('recognizes multiple attributes with varying values', function() {
const {tokens} = grammar.tokenizeLine("<span class='btn' disabled spellcheck=true>");
expect(tokens[3]).toEqual({value: 'class', scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'meta.attribute-with-value.class.html', 'entity.other.attribute-name.class.html']});
expect(tokens[4]).toEqual({value: '=', scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'meta.attribute-with-value.class.html', 'punctuation.separator.key-value.html']});
expect(tokens[5]).toEqual({value: "'", scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'meta.attribute-with-value.class.html', 'string.quoted.single.html', 'punctuation.definition.string.begin.html']});
expect(tokens[6]).toEqual({value: 'btn', scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'meta.attribute-with-value.class.html', 'string.quoted.single.html']});
expect(tokens[7]).toEqual({value: "'", scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'meta.attribute-with-value.class.html', 'string.quoted.single.html', 'punctuation.definition.string.end.html']});
expect(tokens[8]).toEqual({value: ' ', scopes: ['text.html.basic', 'meta.tag.inline.span.html']});
expect(tokens[9]).toEqual({value: 'disabled', scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'meta.attribute-without-value.html', 'entity.other.attribute-name.html']});
expect(tokens[10]).toEqual({value: ' ', scopes: ['text.html.basic', 'meta.tag.inline.span.html']});
expect(tokens[11]).toEqual({value: 'spellcheck', scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'meta.attribute-with-value.html', 'entity.other.attribute-name.html']});
expect(tokens[12]).toEqual({value: '=', scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'meta.attribute-with-value.html', 'punctuation.separator.key-value.html']});
expect(tokens[13]).toEqual({value: 'true', scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'meta.attribute-with-value.html', 'string.unquoted.html']});
expect(tokens[14]).toEqual({value: '>', scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'punctuation.definition.tag.end.html']});
});
it('recognizes attributes that are not on the same line as the tag name', function() {
const lines = grammar.tokenizeLines(`\
<span
class="foo"
disabled>\
`
);
expect(lines[1][1]).toEqual({value: 'class', scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'meta.attribute-with-value.class.html', 'entity.other.attribute-name.class.html']});
expect(lines[1][2]).toEqual({value: '=', scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'meta.attribute-with-value.class.html', 'punctuation.separator.key-value.html']});
expect(lines[1][5]).toEqual({value: '"', scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'meta.attribute-with-value.class.html', 'string.quoted.double.html', 'punctuation.definition.string.end.html']});
expect(lines[2][1]).toEqual({value: 'disabled', scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'meta.attribute-without-value.html', 'entity.other.attribute-name.html']});
expect(lines[2][2]).toEqual({value: '>', scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'punctuation.definition.tag.end.html']});
});
it('tokenizes only one attribute value in a row', function() {
// The following line is invalid per HTML specification, however some browsers parse the 'world' as attribute for compatibility reasons.
const {tokens} = grammar.tokenizeLine('<span attr="hello"world>');
expect(tokens[3]).toEqual({value: 'attr', scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'meta.attribute-with-value.html', 'entity.other.attribute-name.html']});
expect(tokens[4]).toEqual({value: '=', scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'meta.attribute-with-value.html', 'punctuation.separator.key-value.html']});
expect(tokens[5]).toEqual({value: '"', scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'meta.attribute-with-value.html', 'string.quoted.double.html', 'punctuation.definition.string.begin.html']});
expect(tokens[6]).toEqual({value: 'hello', scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'meta.attribute-with-value.html', 'string.quoted.double.html']});
expect(tokens[7]).toEqual({value: '"', scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'meta.attribute-with-value.html', 'string.quoted.double.html', 'punctuation.definition.string.end.html']});
expect(tokens[8]).toEqual({value: 'world', scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'meta.attribute-without-value.html', 'entity.other.attribute-name.html']});
expect(tokens[9]).toEqual({value: '>', scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'punctuation.definition.tag.end.html']});
});
describe("the 'style' attribute", function() {
let quote, type;
beforeEach(() => waitsForPromise(() => atom.packages.activatePackage('language-css')));
const quotes = {
'"': 'double',
"'": 'single'
};
for (quote in quotes) {
type = quotes[quote];
it(`tokenizes ${type}-quoted style attribute values as CSS property lists`, function() {
let {tokens} = grammar.tokenizeLine(`<span style=${quote}display: none;${quote}>`);
expect(tokens[3]).toEqual({value: 'style', scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'meta.attribute-with-value.style.html', 'entity.other.attribute-name.style.html']});
expect(tokens[5]).toEqual({value: quote, scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'meta.attribute-with-value.style.html', `string.quoted.${type}.html`, 'punctuation.definition.string.begin.html']});
expect(tokens[6]).toEqual({value: 'display', scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'meta.attribute-with-value.style.html', `string.quoted.${type}.html`, 'source.css.style.html', 'meta.property-list.css', 'meta.property-name.css', 'support.type.property-name.css']});
expect(tokens[9]).toEqual({value: 'none', scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'meta.attribute-with-value.style.html', `string.quoted.${type}.html`, 'source.css.style.html', 'meta.property-list.css', 'meta.property-value.css', 'support.constant.property-value.css']});
expect(tokens[10]).toEqual({value: ';', scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'meta.attribute-with-value.style.html', `string.quoted.${type}.html`, 'source.css.style.html', 'meta.property-list.css', 'punctuation.terminator.rule.css']});
expect(tokens[11]).toEqual({value: quote, scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'meta.attribute-with-value.style.html', `string.quoted.${type}.html`, 'punctuation.definition.string.end.html']});
expect(tokens[12]).toEqual({value: '>', scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'punctuation.definition.tag.end.html']});
({tokens} = grammar.tokenizeLine(`<span style=${quote}display: none; z-index: 10;${quote}>`));
expect(tokens[3]).toEqual({value: 'style', scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'meta.attribute-with-value.style.html', 'entity.other.attribute-name.style.html']});
expect(tokens[5]).toEqual({value: quote, scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'meta.attribute-with-value.style.html', `string.quoted.${type}.html`, 'punctuation.definition.string.begin.html']});
expect(tokens[6]).toEqual({value: 'display', scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'meta.attribute-with-value.style.html', `string.quoted.${type}.html`, 'source.css.style.html', 'meta.property-list.css', 'meta.property-name.css', 'support.type.property-name.css']});
expect(tokens[9]).toEqual({value: 'none', scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'meta.attribute-with-value.style.html', `string.quoted.${type}.html`, 'source.css.style.html', 'meta.property-list.css', 'meta.property-value.css', 'support.constant.property-value.css']});
expect(tokens[10]).toEqual({value: ';', scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'meta.attribute-with-value.style.html', `string.quoted.${type}.html`, 'source.css.style.html', 'meta.property-list.css', 'punctuation.terminator.rule.css']});
expect(tokens[12]).toEqual({value: 'z-index', scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'meta.attribute-with-value.style.html', `string.quoted.${type}.html`, 'source.css.style.html', 'meta.property-list.css', 'meta.property-name.css', 'support.type.property-name.css']});
expect(tokens[15]).toEqual({value: '10', scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'meta.attribute-with-value.style.html', `string.quoted.${type}.html`, 'source.css.style.html', 'meta.property-list.css', 'meta.property-value.css', 'constant.numeric.css']});
expect(tokens[16]).toEqual({value: ';', scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'meta.attribute-with-value.style.html', `string.quoted.${type}.html`, 'source.css.style.html', 'meta.property-list.css', 'punctuation.terminator.rule.css']});
expect(tokens[17]).toEqual({value: quote, scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'meta.attribute-with-value.style.html', `string.quoted.${type}.html`, 'punctuation.definition.string.end.html']});
expect(tokens[18]).toEqual({value: '>', scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'punctuation.definition.tag.end.html']});
});
it(`tokenizes ${type}-quoted multiline attributes`, function() {
const lines = grammar.tokenizeLines(`\
<span style=${quote}display: none;
z-index: 10;${quote}>\
`
);
expect(lines[0][3]).toEqual({value: 'style', scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'meta.attribute-with-value.style.html', 'entity.other.attribute-name.style.html']});
expect(lines[0][5]).toEqual({value: quote, scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'meta.attribute-with-value.style.html', `string.quoted.${type}.html`, 'punctuation.definition.string.begin.html']});
expect(lines[0][6]).toEqual({value: 'display', scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'meta.attribute-with-value.style.html', `string.quoted.${type}.html`, 'source.css.style.html', 'meta.property-list.css', 'meta.property-name.css', 'support.type.property-name.css']});
expect(lines[0][9]).toEqual({value: 'none', scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'meta.attribute-with-value.style.html', `string.quoted.${type}.html`, 'source.css.style.html', 'meta.property-list.css', 'meta.property-value.css', 'support.constant.property-value.css']});
expect(lines[0][10]).toEqual({value: ';', scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'meta.attribute-with-value.style.html', `string.quoted.${type}.html`, 'source.css.style.html', 'meta.property-list.css', 'punctuation.terminator.rule.css']});
expect(lines[1][0]).toEqual({value: 'z-index', scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'meta.attribute-with-value.style.html', `string.quoted.${type}.html`, 'source.css.style.html', 'meta.property-list.css', 'meta.property-name.css', 'support.type.property-name.css']});
expect(lines[1][3]).toEqual({value: '10', scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'meta.attribute-with-value.style.html', `string.quoted.${type}.html`, 'source.css.style.html', 'meta.property-list.css', 'meta.property-value.css', 'constant.numeric.css']});
expect(lines[1][4]).toEqual({value: ';', scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'meta.attribute-with-value.style.html', `string.quoted.${type}.html`, 'source.css.style.html', 'meta.property-list.css', 'punctuation.terminator.rule.css']});
expect(lines[1][5]).toEqual({value: quote, scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'meta.attribute-with-value.style.html', `string.quoted.${type}.html`, 'punctuation.definition.string.end.html']});
expect(lines[1][6]).toEqual({value: '>', scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'punctuation.definition.tag.end.html']});
});
}
it('tokenizes incomplete property lists', function() {
const {tokens} = grammar.tokenizeLine('<span style="display: none">');
expect(tokens[3]).toEqual({value: 'style', scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'meta.attribute-with-value.style.html', 'entity.other.attribute-name.style.html']});
expect(tokens[5]).toEqual({value: '"', scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'meta.attribute-with-value.style.html', 'string.quoted.double.html', 'punctuation.definition.string.begin.html']});
expect(tokens[6]).toEqual({value: 'display', scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'meta.attribute-with-value.style.html', 'string.quoted.double.html', 'source.css.style.html', 'meta.property-list.css', 'meta.property-name.css', 'support.type.property-name.css']});
expect(tokens[9]).toEqual({value: 'none', scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'meta.attribute-with-value.style.html', 'string.quoted.double.html', 'source.css.style.html', 'meta.property-list.css', 'meta.property-value.css', 'support.constant.property-value.css']});
expect(tokens[10]).toEqual({value: '"', scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'meta.attribute-with-value.style.html', 'string.quoted.double.html', 'punctuation.definition.string.end.html']});
expect(tokens[11]).toEqual({value: '>', scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'punctuation.definition.tag.end.html']});
const lines = grammar.tokenizeLines(`\
<span style=${quote}display: none;
z-index: 10${quote}>\
`
);
expect(lines[0][3]).toEqual({value: 'style', scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'meta.attribute-with-value.style.html', 'entity.other.attribute-name.style.html']});
expect(lines[0][5]).toEqual({value: quote, scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'meta.attribute-with-value.style.html', `string.quoted.${type}.html`, 'punctuation.definition.string.begin.html']});
expect(lines[0][6]).toEqual({value: 'display', scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'meta.attribute-with-value.style.html', `string.quoted.${type}.html`, 'source.css.style.html', 'meta.property-list.css', 'meta.property-name.css', 'support.type.property-name.css']});
expect(lines[0][9]).toEqual({value: 'none', scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'meta.attribute-with-value.style.html', `string.quoted.${type}.html`, 'source.css.style.html', 'meta.property-list.css', 'meta.property-value.css', 'support.constant.property-value.css']});
expect(lines[0][10]).toEqual({value: ';', scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'meta.attribute-with-value.style.html', `string.quoted.${type}.html`, 'source.css.style.html', 'meta.property-list.css', 'punctuation.terminator.rule.css']});
expect(lines[1][0]).toEqual({value: 'z-index', scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'meta.attribute-with-value.style.html', `string.quoted.${type}.html`, 'source.css.style.html', 'meta.property-list.css', 'meta.property-name.css', 'support.type.property-name.css']});
expect(lines[1][3]).toEqual({value: '10', scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'meta.attribute-with-value.style.html', `string.quoted.${type}.html`, 'source.css.style.html', 'meta.property-list.css', 'meta.property-value.css', 'constant.numeric.css']});
expect(lines[1][4]).toEqual({value: quote, scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'meta.attribute-with-value.style.html', `string.quoted.${type}.html`, 'punctuation.definition.string.end.html']});
expect(lines[1][5]).toEqual({value: '>', scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'punctuation.definition.tag.end.html']});
});
it('ends invalid quoted property lists correctly', function() {
const {tokens} = grammar.tokenizeLine('<span style="s:">');
expect(tokens[3]).toEqual({value: 'style', scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'meta.attribute-with-value.style.html', 'entity.other.attribute-name.style.html']});
expect(tokens[5]).toEqual({value: '"', scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'meta.attribute-with-value.style.html', 'string.quoted.double.html', 'punctuation.definition.string.begin.html']});
expect(tokens[6]).toEqual({value: 's', scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'meta.attribute-with-value.style.html', 'string.quoted.double.html', 'source.css.style.html', 'meta.property-list.css', 'meta.property-name.css']});
expect(tokens[7]).toEqual({value: ':', scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'meta.attribute-with-value.style.html', 'string.quoted.double.html', 'source.css.style.html', 'meta.property-list.css', 'punctuation.separator.key-value.css']});
expect(tokens[8]).toEqual({value: '"', scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'meta.attribute-with-value.style.html', 'string.quoted.double.html', 'punctuation.definition.string.end.html']});
expect(tokens[9]).toEqual({value: '>', scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'punctuation.definition.tag.end.html']});
});
it('tokenizes unquoted property lists', function() {
let {tokens} = grammar.tokenizeLine('<span style=display:none;></span>');
expect(tokens[3]).toEqual({value: 'style', scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'meta.attribute-with-value.style.html', 'entity.other.attribute-name.style.html']});
expect(tokens[4]).toEqual({value: '=', scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'meta.attribute-with-value.style.html', 'punctuation.separator.key-value.html']});
expect(tokens[5]).toEqual({value: 'display', scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'meta.attribute-with-value.style.html', 'string.unquoted.html', 'source.css.style.html', 'meta.property-list.css', 'meta.property-name.css', 'support.type.property-name.css']});
expect(tokens[7]).toEqual({value: 'none', scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'meta.attribute-with-value.style.html', 'string.unquoted.html', 'source.css.style.html', 'meta.property-list.css', 'meta.property-value.css', 'support.constant.property-value.css']});
expect(tokens[8]).toEqual({value: ';', scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'meta.attribute-with-value.style.html', 'string.unquoted.html', 'source.css.style.html', 'meta.property-list.css', 'punctuation.terminator.rule.css']});
expect(tokens[9]).toEqual({value: '>', scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'punctuation.definition.tag.end.html']});
({tokens} = grammar.tokenizeLine('<span style=display:none;z-index:10></span>'));
expect(tokens[3]).toEqual({value: 'style', scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'meta.attribute-with-value.style.html', 'entity.other.attribute-name.style.html']});
expect(tokens[4]).toEqual({value: '=', scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'meta.attribute-with-value.style.html', 'punctuation.separator.key-value.html']});
expect(tokens[5]).toEqual({value: 'display', scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'meta.attribute-with-value.style.html', 'string.unquoted.html', 'source.css.style.html', 'meta.property-list.css', 'meta.property-name.css', 'support.type.property-name.css']});
expect(tokens[7]).toEqual({value: 'none', scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'meta.attribute-with-value.style.html', 'string.unquoted.html', 'source.css.style.html', 'meta.property-list.css', 'meta.property-value.css', 'support.constant.property-value.css']});
expect(tokens[8]).toEqual({value: ';', scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'meta.attribute-with-value.style.html', 'string.unquoted.html', 'source.css.style.html', 'meta.property-list.css', 'punctuation.terminator.rule.css']});
expect(tokens[9]).toEqual({value: 'z-index', scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'meta.attribute-with-value.style.html', 'string.unquoted.html', 'source.css.style.html', 'meta.property-list.css', 'meta.property-name.css', 'support.type.property-name.css']});
expect(tokens[11]).toEqual({value: '10', scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'meta.attribute-with-value.style.html', 'string.unquoted.html', 'source.css.style.html', 'meta.property-list.css', 'meta.property-value.css', 'constant.numeric.css']});
expect(tokens[12]).toEqual({value: '>', scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'punctuation.definition.tag.end.html']});
});
it('ends invalid unquoted property lists correctly', function() {
let {tokens} = grammar.tokenizeLine('<span style=s:></span>');
expect(tokens[3]).toEqual({value: 'style', scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'meta.attribute-with-value.style.html', 'entity.other.attribute-name.style.html']});
expect(tokens[4]).toEqual({value: '=', scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'meta.attribute-with-value.style.html', 'punctuation.separator.key-value.html']});
expect(tokens[5]).toEqual({value: 's', scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'meta.attribute-with-value.style.html', 'string.unquoted.html', 'source.css.style.html', 'meta.property-list.css', 'meta.property-name.css']});
expect(tokens[6]).toEqual({value: ':', scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'meta.attribute-with-value.style.html', 'string.unquoted.html', 'source.css.style.html', 'meta.property-list.css', 'punctuation.separator.key-value.css']});
expect(tokens[7]).toEqual({value: '>', scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'punctuation.definition.tag.end.html']});
({tokens} = grammar.tokenizeLine('<span style=display: none></span>'));
expect(tokens[3]).toEqual({value: 'style', scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'meta.attribute-with-value.style.html', 'entity.other.attribute-name.style.html']});
expect(tokens[4]).toEqual({value: '=', scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'meta.attribute-with-value.style.html', 'punctuation.separator.key-value.html']});
expect(tokens[5]).toEqual({value: 'display', scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'meta.attribute-with-value.style.html', 'string.unquoted.html', 'source.css.style.html', 'meta.property-list.css', 'meta.property-name.css', 'support.type.property-name.css']});
expect(tokens[6]).toEqual({value: ':', scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'meta.attribute-with-value.style.html', 'string.unquoted.html', 'source.css.style.html', 'meta.property-list.css', 'punctuation.separator.key-value.css']});
expect(tokens[7]).toEqual({value: ' ', scopes: ['text.html.basic', 'meta.tag.inline.span.html']});
expect(tokens[8]).toEqual({value: 'none', scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'meta.attribute-without-value.html', 'entity.other.attribute-name.html']});
expect(tokens[9]).toEqual({value: '>', scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'punctuation.definition.tag.end.html']});
});
});
});
describe('character references', function() {
it('tokenizes & and characters after it', function() {
// NOTE: &a should NOT be tokenized as a character reference as there is no semicolon following it
// We have no way of knowing if there will ever be a semicolon so we play conservatively.
const {tokens} = grammar.tokenizeLine('& &amp; &a');
expect(tokens[0]).toEqual({value: '& ', scopes: ['text.html.basic']});
expect(tokens[1]).toEqual({value: '&', scopes: ['text.html.basic', 'constant.character.entity.html', 'punctuation.definition.entity.begin.html']});
expect(tokens[2]).toEqual({value: 'amp', scopes: ['text.html.basic', 'constant.character.entity.html', 'entity.name.entity.other.html']});
expect(tokens[3]).toEqual({value: ';', scopes: ['text.html.basic', 'constant.character.entity.html', 'punctuation.definition.entity.end.html']});
expect(tokens[4]).toEqual({value: ' &a', scopes: ['text.html.basic']});
const lines = grammar.tokenizeLines('&\n');
expect(lines[0][0]).toEqual({value: '&', scopes: ['text.html.basic']});
});
it('tokenizes hexadecimal and digit character references', function() {
const {tokens} = grammar.tokenizeLine('&#x00022; &#X00022; &#34;');
expect(tokens[0]).toEqual({value: '&', scopes: ['text.html.basic', 'constant.character.entity.html', 'punctuation.definition.entity.begin.html']});
expect(tokens[1]).toEqual({value: '#x00022', scopes: ['text.html.basic', 'constant.character.entity.html', 'entity.name.entity.other.html']});
expect(tokens[2]).toEqual({value: ';', scopes: ['text.html.basic', 'constant.character.entity.html', 'punctuation.definition.entity.end.html']});
expect(tokens[4]).toEqual({value: '&', scopes: ['text.html.basic', 'constant.character.entity.html', 'punctuation.definition.entity.begin.html']});
expect(tokens[5]).toEqual({value: '#X00022', scopes: ['text.html.basic', 'constant.character.entity.html', 'entity.name.entity.other.html']});
expect(tokens[6]).toEqual({value: ';', scopes: ['text.html.basic', 'constant.character.entity.html', 'punctuation.definition.entity.end.html']});
expect(tokens[8]).toEqual({value: '&', scopes: ['text.html.basic', 'constant.character.entity.html', 'punctuation.definition.entity.begin.html']});
expect(tokens[9]).toEqual({value: '#34', scopes: ['text.html.basic', 'constant.character.entity.html', 'entity.name.entity.other.html']});
expect(tokens[10]).toEqual({value: ';', scopes: ['text.html.basic', 'constant.character.entity.html', 'punctuation.definition.entity.end.html']});
});
it('tokenizes invalid ampersands', function() {
let {tokens} = grammar.tokenizeLine('PSE&>');
expect(tokens[0]).toEqual({value: 'PSE', scopes: ['text.html.basic']});
expect(tokens[1]).toEqual({value: '&', scopes: ['text.html.basic', 'invalid.illegal.bad-ampersand.html']});
expect(tokens[2]).toEqual({value: '>', scopes: ['text.html.basic']});
({tokens} = grammar.tokenizeLine('PSE&'));
expect(tokens[0]).toEqual({value: 'PSE&', scopes: ['text.html.basic']});
({tokens} = grammar.tokenizeLine('&<'));
expect(tokens[0]).toEqual({value: '&<', scopes: ['text.html.basic']});
({tokens} = grammar.tokenizeLine('& '));
expect(tokens[0]).toEqual({value: '& ', scopes: ['text.html.basic']});
({tokens} = grammar.tokenizeLine('&'));
expect(tokens[0]).toEqual({value: '&', scopes: ['text.html.basic']});
({tokens} = grammar.tokenizeLine('&&'));
expect(tokens[0]).toEqual({value: '&&', scopes: ['text.html.basic']});
});
it('tokenizes character references in attributes', function() {
const {tokens} = grammar.tokenizeLine('<a href="http://example.com?&amp;">');
expect(tokens[7]).toEqual({value: '&', scopes: ['text.html.basic', 'meta.tag.inline.a.html', 'meta.attribute-with-value.html', 'string.quoted.double.html', 'constant.character.entity.html', 'punctuation.definition.entity.begin.html']});
expect(tokens[8]).toEqual({value: 'amp', scopes: ['text.html.basic', 'meta.tag.inline.a.html', 'meta.attribute-with-value.html', 'string.quoted.double.html', 'constant.character.entity.html', 'entity.name.entity.other.html']});
expect(tokens[9]).toEqual({value: ';', scopes: ['text.html.basic', 'meta.tag.inline.a.html', 'meta.attribute-with-value.html', 'string.quoted.double.html', 'constant.character.entity.html', 'punctuation.definition.entity.end.html']});
});
it('does not tokenize query parameters as character references', function() {
const {tokens} = grammar.tokenizeLine('<a href="http://example.com?one=1&type=json&topic=css">');
expect(tokens[6]).toEqual({value: 'http://example.com?one=1&type=json&topic=css', scopes: ['text.html.basic', 'meta.tag.inline.a.html', 'meta.attribute-with-value.html', 'string.quoted.double.html']});
});
it('does not tokenize multiple ampersands followed by alphabetical characters as character references', function() {
const {tokens} = grammar.tokenizeLine('<a href="http://example.com?price&something&yummy:&wow">');
expect(tokens[6]).toEqual({value: 'http://example.com?price&something&yummy:&wow', scopes: ['text.html.basic', 'meta.tag.inline.a.html', 'meta.attribute-with-value.html', 'string.quoted.double.html']});
});
it('tokenizes invalid ampersands in attributes', function() {
// Note: in order to replicate the following tests' behaviors, make sure you have language-hyperlink disabled
let {tokens} = grammar.tokenizeLine('<a href="http://example.com?&">');
expect(tokens[7]).toEqual({value: '&', scopes: ['text.html.basic', 'meta.tag.inline.a.html', 'meta.attribute-with-value.html', 'string.quoted.double.html', 'invalid.illegal.bad-ampersand.html']});
({tokens} = grammar.tokenizeLine('<a href="http://example.com?&=">'));
expect(tokens[7]).toEqual({value: '&', scopes: ['text.html.basic', 'meta.tag.inline.a.html', 'meta.attribute-with-value.html', 'string.quoted.double.html', 'invalid.illegal.bad-ampersand.html']});
({tokens} = grammar.tokenizeLine('<a href="http://example.com?& ">'));
expect(tokens[6]).toEqual({value: 'http://example.com?& ', scopes: ['text.html.basic', 'meta.tag.inline.a.html', 'meta.attribute-with-value.html', 'string.quoted.double.html']});
const lines = grammar.tokenizeLines('<a href="http://example.com?&\n">');
expect(lines[0][6]).toEqual({value: 'http://example.com?&', scopes: ['text.html.basic', 'meta.tag.inline.a.html', 'meta.attribute-with-value.html', 'string.quoted.double.html']});
({tokens} = grammar.tokenizeLine('<a href="http://example.com?&&">'));
expect(tokens[6]).toEqual({value: 'http://example.com?&', scopes: ['text.html.basic', 'meta.tag.inline.a.html', 'meta.attribute-with-value.html', 'string.quoted.double.html']});
expect(tokens[7]).toEqual({value: '&', scopes: ['text.html.basic', 'meta.tag.inline.a.html', 'meta.attribute-with-value.html', 'string.quoted.double.html', 'invalid.illegal.bad-ampersand.html']});
});
});
describe('firstLineMatch', function() {
it('recognises HTML5 doctypes', function() {
expect(grammar.firstLineRegex.findNextMatchSync('<!DOCTYPE html>')).not.toBeNull();
expect(grammar.firstLineRegex.findNextMatchSync('<!doctype HTML>')).not.toBeNull();
});
it('recognises Emacs modelines', function() {
let line;
const valid = `\
#-*- HTML -*-
#-*- mode: HTML -*-
/* -*-html-*- */
// -*- HTML -*-
/* -*- mode:HTML -*- */
// -*- font:bar;mode:HTML -*-
// -*- font:bar;mode:HTML;foo:bar; -*-
// -*-font:mode;mode:HTML-*-
// -*- foo:bar mode: html bar:baz -*-
" -*-foo:bar;mode:html;bar:foo-*- ";
" -*-font-mode:foo;mode:html;foo-bar:quux-*-"
"-*-font:x;foo:bar; mode : HTML; bar:foo;foooooo:baaaaar;fo:ba;-*-";
"-*- font:x;foo : bar ; mode : HtML ; bar : foo ; foooooo:baaaaar;fo:ba-*-";\
`;
for (line of Array.from(valid.split(/\n/))) {
expect(grammar.firstLineRegex.findNextMatchSync(line)).not.toBeNull();
}
const invalid = `\
/* --*html-*- */
/* -*-- HTML -*-
/* -*- -- HTML -*-
/* -*- HTM -;- -*-
// -*- xHTML -*-
// -*- HTML; -*-
// -*- html-stuff -*-
/* -*- model:html -*-
/* -*- indent-mode:html -*-
// -*- font:mode;html -*-
// -*- HTimL -*-
// -*- mode: -*- HTML
// -*- mode: -html -*-
// -*-font:mode;mode:html--*-\
`;
return (() => {
const result = [];
for (line of invalid.split(/\n/)) {
result.push(expect(grammar.firstLineRegex.findNextMatchSync(line)).toBeNull());
}
return result;
})();
});
it('recognises Vim modelines', function() {
let line;
const valid = `\
vim: se filetype=html:
# vim: se ft=html:
# vim: set ft=HTML:
# vim: set filetype=XHTML:
# vim: ft=XHTML
# vim: syntax=HTML
# vim: se syntax=xhtml:
# ex: syntax=HTML
# vim:ft=html
# vim600: ft=xhtml
# vim>600: set ft=html:
# vi:noai:sw=3 ts=6 ft=html
# vi::::::::::noai:::::::::::: ft=html
# vim:ts=4:sts=4:sw=4:noexpandtab:ft=html
# vi:: noai : : : : sw =3 ts =6 ft =html
# vim: ts=4: pi sts=4: ft=html: noexpandtab: sw=4:
# vim: ts=4 sts=4: ft=html noexpandtab:
# vim:noexpandtab sts=4 ft=html ts=4
# vim:noexpandtab:ft=html
# vim:ts=4:sts=4 ft=html:noexpandtab:\x20
# vim:noexpandtab titlestring=hi\|there\\\\ ft=html ts=4\
`;
for (line of valid.split(/\n/)) {
expect(grammar.firstLineRegex.findNextMatchSync(line)).not.toBeNull();
}
const invalid = `\
ex: se filetype=html:
_vi: se filetype=HTML:
vi: se filetype=HTML
# vim set ft=html5
# vim: soft=html
# vim: clean-syntax=html:
# vim set ft=html:
# vim: setft=HTML:
# vim: se ft=html backupdir=tmp
# vim: set ft=HTML set cmdheight=1
# vim:noexpandtab sts:4 ft:HTML ts:4
# vim:noexpandtab titlestring=hi\\|there\\ ft=HTML ts=4
# vim:noexpandtab titlestring=hi\\|there\\\\\\ ft=HTML ts=4\
`;
return (() => {
const result = [];
for (line of Array.from(invalid.split(/\n/))) {
result.push(expect(grammar.firstLineRegex.findNextMatchSync(line)).toBeNull());
}
return result;
})();
});
});
describe('tags', function() {
it('tokenizes style tags as such', function() {
const {tokens} = grammar.tokenizeLine('<style>');
expect(tokens[0]).toEqual({value: '<', scopes: ['text.html.basic', 'meta.tag.style.html', 'punctuation.definition.tag.html']});
expect(tokens[1]).toEqual({value: 'style', scopes: ['text.html.basic', 'meta.tag.style.html', 'entity.name.tag.style.html']});
expect(tokens[2]).toEqual({value: '>', scopes: ['text.html.basic', 'meta.tag.style.html', 'punctuation.definition.tag.html']});
});
it('tokenizes script tags as such', function() {
const {tokens} = grammar.tokenizeLine('<script>');
expect(tokens[0]).toEqual({value: '<', scopes: ['text.html.basic', 'meta.tag.script.html', 'punctuation.definition.tag.html']});
expect(tokens[1]).toEqual({value: 'script', scopes: ['text.html.basic', 'meta.tag.script.html', 'entity.name.tag.script.html']});
expect(tokens[2]).toEqual({value: '>', scopes: ['text.html.basic', 'meta.tag.script.html', 'punctuation.definition.tag.html']});
});
it('tokenizes structure tags as such', function() {
const {tokens} = grammar.tokenizeLine('<html>');
expect(tokens[0]).toEqual({value: '<', scopes: ['text.html.basic', 'meta.tag.structure.html.html', 'punctuation.definition.tag.html']});
expect(tokens[1]).toEqual({value: 'html', scopes: ['text.html.basic', 'meta.tag.structure.html.html', 'entity.name.tag.structure.html.html']});
expect(tokens[2]).toEqual({value: '>', scopes: ['text.html.basic', 'meta.tag.structure.html.html', 'punctuation.definition.tag.html']});
});
it('tokenizes block tags as such', function() {
const {tokens} = grammar.tokenizeLine('<div>');
expect(tokens[0]).toEqual({value: '<', scopes: ['text.html.basic', 'meta.tag.block.div.html', 'punctuation.definition.tag.begin.html']});
expect(tokens[1]).toEqual({value: 'div', scopes: ['text.html.basic', 'meta.tag.block.div.html', 'entity.name.tag.block.div.html']});
expect(tokens[2]).toEqual({value: '>', scopes: ['text.html.basic', 'meta.tag.block.div.html', 'punctuation.definition.tag.end.html']});
});
it('tokenizes inline tags as such', function() {
const {tokens} = grammar.tokenizeLine('<span>');
expect(tokens[0]).toEqual({value: '<', scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'punctuation.definition.tag.begin.html']});
expect(tokens[1]).toEqual({value: 'span', scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'entity.name.tag.inline.span.html']});
expect(tokens[2]).toEqual({value: '>', scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'punctuation.definition.tag.end.html']});
});
it('does not tokenize XML namespaces as tags if the prefix is a valid style tag', function() {
const {tokens} = grammar.tokenizeLine('<style:foo>');
expect(tokens[1].value).toNotEqual('style');
expect(tokens[1].scopes).toEqual(['text.html.basic', 'meta.tag.other.html', 'entity.name.tag.other.html']);
});
it('does not tokenize XML namespaces as tags if the prefix is a valid script tag', function() {
const {tokens} = grammar.tokenizeLine('<script:foo>');
expect(tokens[1].value).toNotEqual('script');
expect(tokens[1].scopes).toEqual(['text.html.basic', 'meta.tag.other.html', 'entity.name.tag.other.html']);
});
it('does not tokenize XML namespaces as tags if the prefix is a valid structure tag', function() {
const {tokens} = grammar.tokenizeLine('<html:foo>');
expect(tokens[1].value).toNotEqual('html');
expect(tokens[1].scopes).toEqual(['text.html.basic', 'meta.tag.other.html', 'entity.name.tag.other.html']);
});
it('does not tokenize XML namespaces as tags if the prefix is a valid block tag', function() {
const {tokens} = grammar.tokenizeLine('<div:foo>');
expect(tokens[1].value).toNotEqual('div');
expect(tokens[1].scopes).toEqual(['text.html.basic', 'meta.tag.other.html', 'entity.name.tag.other.html']);
});
it('does not tokenize XML namespaces as tags if the prefix is a valid inline tag', function() {
const {tokens} = grammar.tokenizeLine('<span:foo>');
expect(tokens[1].value).toNotEqual('span');
expect(tokens[1].scopes).toEqual(['text.html.basic', 'meta.tag.other.html', 'entity.name.tag.other.html']);
});
it('it does not treat only the part before a hyphen as tag name if this part is a is a valid style tag', function() {
const {tokens} = grammar.tokenizeLine('<style-foo>');
expect(tokens[1].value).toNotEqual('style');
expect(tokens[1].scopes).toEqual(['text.html.basic', 'meta.tag.other.html', 'entity.name.tag.other.html']);
});
it('it does not treat only the part before a hyphen as tag name if this part is a is a valid script tag', function() {
const {tokens} = grammar.tokenizeLine('<script-foo>');
expect(tokens[1].value).toNotEqual('script');
expect(tokens[1].scopes).toEqual(['text.html.basic', 'meta.tag.other.html', 'entity.name.tag.other.html']);
});
it('it does not treat only the part before a hyphen as tag name if this part is a is a valid structure tag', function() {
const {tokens} = grammar.tokenizeLine('<html-foo>');
expect(tokens[1].value).toNotEqual('html');
expect(tokens[1].scopes).toEqual(['text.html.basic', 'meta.tag.other.html', 'entity.name.tag.other.html']);
});
it('it does not treat only the part before a hyphen as tag name if this part is a is a valid block tag', function() {
const {tokens} = grammar.tokenizeLine('<div-foo>');
expect(tokens[1].value).toNotEqual('div');
expect(tokens[1].scopes).toEqual(['text.html.basic', 'meta.tag.other.html', 'entity.name.tag.other.html']);
});
it('it does not treat only the part before a hyphen as tag name if this part is a is a valid inline tag', function() {
const {tokens} = grammar.tokenizeLine('<span-foo>');
expect(tokens[1].value).toNotEqual('span');
expect(tokens[1].scopes).toEqual(['text.html.basic', 'meta.tag.other.html', 'entity.name.tag.other.html']);
});
it('tokenizes other tags as such', function() {
const {tokens} = grammar.tokenizeLine('<foo>');
expect(tokens[0]).toEqual({value: '<', scopes: ['text.html.basic', 'meta.tag.other.html', 'punctuation.definition.tag.begin.html']});
expect(tokens[1]).toEqual({value: 'foo', scopes: ['text.html.basic', 'meta.tag.other.html', 'entity.name.tag.other.html']});
expect(tokens[2]).toEqual({value: '>', scopes: ['text.html.basic', 'meta.tag.other.html', 'punctuation.definition.tag.end.html']});
});
it('tolerates colons in other tag names', function() {
const {tokens} = grammar.tokenizeLine('<foo:bar>');
expect(tokens[1]).toEqual({value: 'foo:bar', scopes: ['text.html.basic', 'meta.tag.other.html', 'entity.name.tag.other.html']});
});
it('tolerates hyphens in other tag names', function() {
const {tokens} = grammar.tokenizeLine('<foo-bar>');
expect(tokens[1]).toEqual({value: 'foo-bar', scopes: ['text.html.basic', 'meta.tag.other.html', 'entity.name.tag.other.html']});
});
it('tokenizes XML declaration correctly', function() {
const {tokens} = grammar.tokenizeLine('<?xml version="1.0" encoding="UTF-8"?>');
expect(tokens[0]).toEqual({value: '<?', scopes: ['text.html.basic', 'meta.tag.preprocessor.xml.html', 'punctuation.definition.tag.html']});
expect(tokens[1]).toEqual({value: 'xml', scopes: ['text.html.basic', 'meta.tag.preprocessor.xml.html', 'entity.name.tag.xml.html']});
expect(tokens[2]).toEqual({value: ' ', scopes: ['text.html.basic', 'meta.tag.preprocessor.xml.html']});
expect(tokens[3]).toEqual({value: 'version', scopes: ['text.html.basic', 'meta.tag.preprocessor.xml.html', 'meta.attribute-with-value.html', 'entity.other.attribute-name.html']});
expect(tokens[4]).toEqual({value: '=', scopes: ['text.html.basic', 'meta.tag.preprocessor.xml.html', 'meta.attribute-with-value.html', 'punctuation.separator.key-value.html']});
expect(tokens[5]).toEqual({value: '"', scopes: ['text.html.basic', 'meta.tag.preprocessor.xml.html', 'meta.attribute-with-value.html', 'string.quoted.double.html', 'punctuation.definition.string.begin.html']});
expect(tokens[6]).toEqual({value: '1.0', scopes: ['text.html.basic', 'meta.tag.preprocessor.xml.html', 'meta.attribute-with-value.html', 'string.quoted.double.html']});
expect(tokens[7]).toEqual({value: '"', scopes: ['text.html.basic', 'meta.tag.preprocessor.xml.html', 'meta.attribute-with-value.html', 'string.quoted.double.html', 'punctuation.definition.string.end.html']});
expect(tokens[8]).toEqual({value: ' ', scopes: ['text.html.basic', 'meta.tag.preprocessor.xml.html']});
expect(tokens[9]).toEqual({value: 'encoding', scopes: ['text.html.basic', 'meta.tag.preprocessor.xml.html', 'meta.attribute-with-value.html', 'entity.other.attribute-name.html']});
expect(tokens[10]).toEqual({value: '=', scopes: ['text.html.basic', 'meta.tag.preprocessor.xml.html', 'meta.attribute-with-value.html', 'punctuation.separator.key-value.html']});
expect(tokens[11]).toEqual({value: '"', scopes: ['text.html.basic', 'meta.tag.preprocessor.xml.html', 'meta.attribute-with-value.html', 'string.quoted.double.html', 'punctuation.definition.string.begin.html']});
expect(tokens[12]).toEqual({value: 'UTF-8', scopes: ['text.html.basic', 'meta.tag.preprocessor.xml.html', 'meta.attribute-with-value.html', 'string.quoted.double.html']});
expect(tokens[13]).toEqual({value: '"', scopes: ['text.html.basic', 'meta.tag.preprocessor.xml.html', 'meta.attribute-with-value.html', 'string.quoted.double.html', 'punctuation.definition.string.end.html']});
expect(tokens[14]).toEqual({value: '?>', scopes: ['text.html.basic', 'meta.tag.preprocessor.xml.html', 'punctuation.definition.tag.html']});
});
});
describe('snippets', function() {
let snippetsModule = null;
beforeEach(function() {
// FIXME: This should just be atom.packages.loadPackage('snippets'),
// but a bug in PackageManager::resolvePackagePath where it finds language-html's
// `snippets` directory before the actual package necessitates passing an absolute path
// See https://github.com/atom/atom/issues/15953
const snippetsPath = path.join(atom.packages.resourcePath, 'node_modules', 'snippets');
snippetsModule = require(atom.packages.loadPackage(snippetsPath).getMainModulePath());
// Disable loading of user snippets before the package is activated
spyOn(snippetsModule, 'loadUserSnippets').andCallFake(callback => callback({}));
snippetsModule.activate();
waitsFor('snippets to load', done => snippetsModule.onDidLoadSnippets(done));
});
it('suggests snippets', () => expect(Object.keys(snippetsModule.parsedSnippetsForScopes(['.text.html'])).length).toBeGreaterThan(10));
it('does not suggest any HTML snippets when in embedded scripts', () => expect(Object.keys(snippetsModule.parsedSnippetsForScopes(['.text.html .source.js.embedded.html'])).length).toBe(0));
});
});

View File

@ -1,92 +0,0 @@
path = require 'path'
describe 'Hyperlink grammar', ->
grammar = null
beforeEach ->
waitsForPromise ->
atom.packages.activatePackage('language-hyperlink')
runs ->
grammar = atom.grammars.grammarForScopeName('text.hyperlink')
it 'parses the grammar', ->
expect(grammar).toBeTruthy()
expect(grammar.scopeName).toBe 'text.hyperlink'
it 'parses http: and https: links', ->
plainGrammar = atom.grammars.selectGrammar()
{tokens} = plainGrammar.tokenizeLine 'http://github.com'
expect(tokens[0]).toEqual value: 'http://github.com', scopes: ['text.plain.null-grammar', 'markup.underline.link.http.hyperlink']
{tokens} = plainGrammar.tokenizeLine 'https://github.com'
expect(tokens[0]).toEqual value: 'https://github.com', scopes: ['text.plain.null-grammar', 'markup.underline.link.https.hyperlink']
{tokens} = plainGrammar.tokenizeLine 'http://twitter.com/#!/AtomEditor'
expect(tokens[0]).toEqual value: 'http://twitter.com/#!/AtomEditor', scopes: ['text.plain.null-grammar', 'markup.underline.link.http.hyperlink']
{tokens} = plainGrammar.tokenizeLine 'https://github.com/atom/brightray_example'
expect(tokens[0]).toEqual value: 'https://github.com/atom/brightray_example', scopes: ['text.plain.null-grammar', 'markup.underline.link.https.hyperlink']
it 'parses http: and https: links that contains unicode characters', ->
plainGrammar = atom.grammars.selectGrammar()
{tokens} = plainGrammar.tokenizeLine 'https://sv.wikipedia.org/wiki/Mañana'
expect(tokens[0]).toEqual value: 'https://sv.wikipedia.org/wiki/Mañana', scopes: ['text.plain.null-grammar', 'markup.underline.link.https.hyperlink']
it 'parses other links', ->
plainGrammar = atom.grammars.selectGrammar()
{tokens} = plainGrammar.tokenizeLine 'mailto:noreply@example.com'
expect(tokens[0]).toEqual value: 'mailto:noreply@example.com', scopes: ['text.plain.null-grammar', 'markup.underline.link.mailto.hyperlink']
{tokens} = plainGrammar.tokenizeLine 'x-man-page://tar'
expect(tokens[0]).toEqual value: 'x-man-page://tar', scopes: ['text.plain.null-grammar', 'markup.underline.link.x-man-page.hyperlink']
{tokens} = plainGrammar.tokenizeLine 'atom://core/open/file?filename=urlEncodedFileName&line=n&column=n'
expect(tokens[0]).toEqual value: 'atom://core/open/file?filename=urlEncodedFileName&line=n&column=n', scopes: ['text.plain.null-grammar', 'markup.underline.link.atom.hyperlink']
it 'does not parse links in a regex string', ->
testGrammar = atom.grammars.loadGrammarSync(path.join(__dirname, 'fixtures', 'test-grammar.cson'))
{tokens} = testGrammar.tokenizeLine 'regexp:http://github.com'
expect(tokens[1]).toEqual value: 'http://github.com', scopes: ['source.test', 'string.regexp.test']
describe 'parsing PHP strings', ->
it 'does not parse links in a regex string', ->
# PHP is unique in that its root scope is `text.html.php`, meaning that even though
# `string - string.regexp` won't match in a regex string, `text` still will.
# This is the reason the injection selector is `text - string.regexp` instead.
# https://github.com/atom/language-php/issues/219
waitsForPromise ->
atom.packages.activatePackage('language-php')
runs ->
phpGrammar = atom.grammars.grammarForScopeName('text.html.php')
{tokens} = phpGrammar.tokenizeLine '<?php "/mailto:/" ?>'
expect(tokens[3]).toEqual value: 'mailto:', scopes: ['text.html.php', 'meta.embedded.line.php', 'source.php', 'string.regexp.double-quoted.php']
describe 'parsing cfml strings', ->
it 'does not include anything between (and including) pound signs', ->
plainGrammar = atom.grammars.selectGrammar()
{tokens} = plainGrammar.tokenizeLine 'http://github.com/#username#'
expect(tokens[0]).toEqual value: 'http://github.com/', scopes: ['text.plain.null-grammar', 'markup.underline.link.http.hyperlink']
it 'still includes single pound signs', ->
plainGrammar = atom.grammars.selectGrammar()
{tokens} = plainGrammar.tokenizeLine 'http://github.com/atom/#start-of-content'
expect(tokens[0]).toEqual value: 'http://github.com/atom/#start-of-content', scopes: ['text.plain.null-grammar', 'markup.underline.link.http.hyperlink']
describe 'parsing matching parentheses', ->
it 'still includes matching parentheses', ->
plainGrammar = atom.grammars.selectGrammar()
{tokens} = plainGrammar.tokenizeLine 'https://en.wikipedia.org/wiki/Atom_(text_editor)'
expect(tokens[0]).toEqual value: 'https://en.wikipedia.org/wiki/Atom_(text_editor)', scopes: ['text.plain.null-grammar', 'markup.underline.link.https.hyperlink']
it 'does not include wrapping parentheses', ->
plainGrammar = atom.grammars.selectGrammar()
{tokens} = plainGrammar.tokenizeLine '(https://en.wikipedia.org/wiki/Atom_(text_editor))'
expect(tokens[1]).toEqual value: 'https://en.wikipedia.org/wiki/Atom_(text_editor)', scopes: ['text.plain.null-grammar', 'markup.underline.link.https.hyperlink']

View File

@ -0,0 +1,103 @@
const path = require('path');
describe('Hyperlink grammar', function() {
let grammar = null;
beforeEach(function() {
waitsForPromise(() => atom.packages.activatePackage('language-hyperlink'));
runs(() => grammar = atom.grammars.grammarForScopeName('text.hyperlink'));
});
it('parses the grammar', function() {
expect(grammar).toBeTruthy();
expect(grammar.scopeName).toBe('text.hyperlink');
});
it('parses http: and https: links', function() {
const plainGrammar = atom.grammars.selectGrammar();
let {tokens} = plainGrammar.tokenizeLine('http://github.com');
expect(tokens[0]).toEqual({value: 'http://github.com', scopes: ['text.plain.null-grammar', 'markup.underline.link.http.hyperlink']});
({tokens} = plainGrammar.tokenizeLine('https://github.com'));
expect(tokens[0]).toEqual({value: 'https://github.com', scopes: ['text.plain.null-grammar', 'markup.underline.link.https.hyperlink']});
({tokens} = plainGrammar.tokenizeLine('http://twitter.com/#!/AtomEditor'));
expect(tokens[0]).toEqual({value: 'http://twitter.com/#!/AtomEditor', scopes: ['text.plain.null-grammar', 'markup.underline.link.http.hyperlink']});
({tokens} = plainGrammar.tokenizeLine('https://github.com/atom/brightray_example'));
expect(tokens[0]).toEqual({value: 'https://github.com/atom/brightray_example', scopes: ['text.plain.null-grammar', 'markup.underline.link.https.hyperlink']});
});
it('parses http: and https: links that contains unicode characters', function() {
const plainGrammar = atom.grammars.selectGrammar();
const {tokens} = plainGrammar.tokenizeLine('https://sv.wikipedia.org/wiki/Mañana');
expect(tokens[0]).toEqual({value: 'https://sv.wikipedia.org/wiki/Mañana', scopes: ['text.plain.null-grammar', 'markup.underline.link.https.hyperlink']});
});
it('parses other links', function() {
const plainGrammar = atom.grammars.selectGrammar();
let {tokens} = plainGrammar.tokenizeLine('mailto:noreply@example.com');
expect(tokens[0]).toEqual({value: 'mailto:noreply@example.com', scopes: ['text.plain.null-grammar', 'markup.underline.link.mailto.hyperlink']});
({tokens} = plainGrammar.tokenizeLine('x-man-page://tar'));
expect(tokens[0]).toEqual({value: 'x-man-page://tar', scopes: ['text.plain.null-grammar', 'markup.underline.link.x-man-page.hyperlink']});
({tokens} = plainGrammar.tokenizeLine('atom://core/open/file?filename=urlEncodedFileName&line=n&column=n'));
expect(tokens[0]).toEqual({value: 'atom://core/open/file?filename=urlEncodedFileName&line=n&column=n', scopes: ['text.plain.null-grammar', 'markup.underline.link.atom.hyperlink']});
});
it('does not parse links in a regex string', function() {
const testGrammar = atom.grammars.loadGrammarSync(path.join(__dirname, 'fixtures', 'test-grammar.cson'));
const {tokens} = testGrammar.tokenizeLine('regexp:http://github.com');
expect(tokens[1]).toEqual({value: 'http://github.com', scopes: ['source.test', 'string.regexp.test']});
});
describe('parsing PHP strings', () => it('does not parse links in a regex string', function() {
// PHP is unique in that its root scope is `text.html.php`, meaning that even though
// `string - string.regexp` won't match in a regex string, `text` still will.
// This is the reason the injection selector is `text - string.regexp` instead.
// https://github.com/atom/language-php/issues/219
waitsForPromise(() => atom.packages.activatePackage('language-php'));
runs(function() {
const phpGrammar = atom.grammars.grammarForScopeName('text.html.php');
const {tokens} = phpGrammar.tokenizeLine('<?php "/mailto:/" ?>');
expect(tokens[3]).toEqual({value: 'mailto:', scopes: ['text.html.php', 'meta.embedded.line.php', 'source.php', 'string.regexp.double-quoted.php']});});
}));
describe('parsing cfml strings', function() {
it('does not include anything between (and including) pound signs', function() {
const plainGrammar = atom.grammars.selectGrammar();
const {tokens} = plainGrammar.tokenizeLine('http://github.com/#username#');
expect(tokens[0]).toEqual({value: 'http://github.com/', scopes: ['text.plain.null-grammar', 'markup.underline.link.http.hyperlink']});
});
it('still includes single pound signs', function() {
const plainGrammar = atom.grammars.selectGrammar();
const {tokens} = plainGrammar.tokenizeLine('http://github.com/atom/#start-of-content');
expect(tokens[0]).toEqual({value: 'http://github.com/atom/#start-of-content', scopes: ['text.plain.null-grammar', 'markup.underline.link.http.hyperlink']});
});
});
describe('parsing matching parentheses', function() {
it('still includes matching parentheses', function() {
const plainGrammar = atom.grammars.selectGrammar();
const {tokens} = plainGrammar.tokenizeLine('https://en.wikipedia.org/wiki/Atom_(text_editor)');
expect(tokens[0]).toEqual({value: 'https://en.wikipedia.org/wiki/Atom_(text_editor)', scopes: ['text.plain.null-grammar', 'markup.underline.link.https.hyperlink']});
});
it('does not include wrapping parentheses', function() {
const plainGrammar = atom.grammars.selectGrammar();
const {tokens} = plainGrammar.tokenizeLine('(https://en.wikipedia.org/wiki/Atom_(text_editor))');
expect(tokens[1]).toEqual({value: 'https://en.wikipedia.org/wiki/Atom_(text_editor)', scopes: ['text.plain.null-grammar', 'markup.underline.link.https.hyperlink']});
});
});
});

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@ -1,190 +0,0 @@
describe 'Unified expression language grammar', ->
grammar = null
beforeEach ->
atom.config.set 'core.useTreeSitterParsers', false
waitsForPromise ->
atom.packages.activatePackage('language-java')
runs ->
grammar = atom.grammars.grammarForScopeName('source.java.el')
it 'parses the grammar', ->
expect(grammar).toBeTruthy()
expect(grammar.scopeName).toBe 'source.java.el'
describe 'operators', ->
it 'tokenizes the ternary operator', ->
{tokens} = grammar.tokenizeLine 'true ? 0 : 1'
expect(tokens[2]).toEqual value: '?', scopes: ['source.java.el', 'keyword.control.ternary.java.el']
expect(tokens[6]).toEqual value: ':', scopes: ['source.java.el', 'keyword.control.ternary.java.el']
it 'parses the comparison operator `==`', ->
{tokens} = grammar.tokenizeLine '1 == 1'
expect(tokens[2]).toEqual value: '==', scopes: ['source.java.el', 'keyword.operator.comparison.java.el']
it 'parses the comparison operator `!=`', ->
{tokens} = grammar.tokenizeLine '1 != 1'
expect(tokens[2]).toEqual value: '!=', scopes: ['source.java.el', 'keyword.operator.comparison.java.el']
it 'parses the comparison operator `<=`', ->
{tokens} = grammar.tokenizeLine '1 <= 1'
expect(tokens[2]).toEqual value: '<=', scopes: ['source.java.el', 'keyword.operator.comparison.java.el']
it 'parses the comparison operator `>=`', ->
{tokens} = grammar.tokenizeLine '1 >= 1'
expect(tokens[2]).toEqual value: '>=', scopes: ['source.java.el', 'keyword.operator.comparison.java.el']
it 'parses the comparison operator `<`', ->
{tokens} = grammar.tokenizeLine '1 < 1'
expect(tokens[2]).toEqual value: '<', scopes: ['source.java.el', 'keyword.operator.comparison.java.el']
it 'parses the comparison operator `>`', ->
{tokens} = grammar.tokenizeLine '1 > 1'
expect(tokens[2]).toEqual value: '>', scopes: ['source.java.el', 'keyword.operator.comparison.java.el']
it 'parses the comparison operator `eq`', ->
{tokens} = grammar.tokenizeLine '1 eq 1'
expect(tokens[2]).toEqual value: 'eq', scopes: ['source.java.el', 'keyword.operator.comparison.java.el']
it 'parses the comparison operator `ne`', ->
{tokens} = grammar.tokenizeLine '1 ne 1'
expect(tokens[2]).toEqual value: 'ne', scopes: ['source.java.el', 'keyword.operator.comparison.java.el']
it 'parses the comparison operator `le`', ->
{tokens} = grammar.tokenizeLine '1 le 1'
expect(tokens[2]).toEqual value: 'le', scopes: ['source.java.el', 'keyword.operator.comparison.java.el']
it 'parses the comparison operator `gt`', ->
{tokens} = grammar.tokenizeLine '1 gt 1'
expect(tokens[2]).toEqual value: 'gt', scopes: ['source.java.el', 'keyword.operator.comparison.java.el']
it 'parses the comparison operator `lt`', ->
{tokens} = grammar.tokenizeLine '1 lt 1'
expect(tokens[2]).toEqual value: 'lt', scopes: ['source.java.el', 'keyword.operator.comparison.java.el']
it 'parses the comparison operator `gt`', ->
{tokens} = grammar.tokenizeLine '1 gt 1'
expect(tokens[2]).toEqual value: 'gt', scopes: ['source.java.el', 'keyword.operator.comparison.java.el']
it 'parses the empty operators', ->
{tokens} = grammar.tokenizeLine 'empty foo'
expect(tokens[0]).toEqual value: 'empty', scopes: ['source.java.el', 'keyword.operator.empty.java.el']
it 'parses the arithmetic operator `-`', ->
{tokens} = grammar.tokenizeLine '1 - 1'
expect(tokens[2]).toEqual value: '-', scopes: ['source.java.el', 'keyword.operator.arithmetic.java.el']
it 'parses the arithmetic operator `+`', ->
{tokens} = grammar.tokenizeLine '1 + 1'
expect(tokens[2]).toEqual value: '+', scopes: ['source.java.el', 'keyword.operator.arithmetic.java.el']
it 'parses the arithmetic operator `*`', ->
{tokens} = grammar.tokenizeLine '1 * 1'
expect(tokens[2]).toEqual value: '*', scopes: ['source.java.el', 'keyword.operator.arithmetic.java.el']
it 'parses the arithmetic operator `/`', ->
{tokens} = grammar.tokenizeLine '1 / 1'
expect(tokens[2]).toEqual value: '/', scopes: ['source.java.el', 'keyword.operator.arithmetic.java.el']
it 'parses the arithmetic operator `%`', ->
{tokens} = grammar.tokenizeLine '1 % 1'
expect(tokens[2]).toEqual value: '%', scopes: ['source.java.el', 'keyword.operator.arithmetic.java.el']
it 'parses the arithmetic operator `div`', ->
{tokens} = grammar.tokenizeLine '1 div 1'
expect(tokens[2]).toEqual value: 'div', scopes: ['source.java.el', 'keyword.operator.arithmetic.java.el']
it 'parses the arithmetic operator `mod`', ->
{tokens} = grammar.tokenizeLine '1 mod 1'
expect(tokens[2]).toEqual value: 'mod', scopes: ['source.java.el', 'keyword.operator.arithmetic.java.el']
it 'parses the logical operator `!`', ->
{tokens} = grammar.tokenizeLine '!foo'
expect(tokens[0]).toEqual value: '!', scopes: ['source.java.el', 'keyword.operator.logical.java.el']
it 'parses the logical operator `&&`', ->
{tokens} = grammar.tokenizeLine '1 && 1'
expect(tokens[2]).toEqual value: '&&', scopes: ['source.java.el', 'keyword.operator.logical.java.el']
it 'parses the logical operator `||`', ->
{tokens} = grammar.tokenizeLine '1 || 1'
expect(tokens[2]).toEqual value: '||', scopes: ['source.java.el', 'keyword.operator.logical.java.el']
it 'parses the logical operator `not`', ->
{tokens} = grammar.tokenizeLine '1 not 1'
expect(tokens[2]).toEqual value: 'not', scopes: ['source.java.el', 'keyword.operator.logical.java.el']
it 'parses the logical operator `and`', ->
{tokens} = grammar.tokenizeLine '1 and 1'
expect(tokens[2]).toEqual value: 'and', scopes: ['source.java.el', 'keyword.operator.logical.java.el']
it 'parses the logical operator `or`', ->
{tokens} = grammar.tokenizeLine '1 or 1'
expect(tokens[2]).toEqual value: 'or', scopes: ['source.java.el', 'keyword.operator.logical.java.el']
describe 'literals', ->
it 'parses boolean literals', ->
{tokens} = grammar.tokenizeLine 'true'
expect(tokens[0]).toEqual value: 'true', scopes: ['source.java.el', 'constant.boolean.java.el']
{tokens} = grammar.tokenizeLine 'false'
expect(tokens[0]).toEqual value: 'false', scopes: ['source.java.el', 'constant.boolean.java.el']
it 'parses the null literal', ->
{tokens} = grammar.tokenizeLine 'null'
it 'parses numeric literals', ->
{tokens} = grammar.tokenizeLine '0'
expect(tokens[0]).toEqual value: '0', scopes: ['source.java.el', 'constant.numeric.java.el']
{tokens} = grammar.tokenizeLine '9804'
expect(tokens[0]).toEqual value: '9804', scopes: ['source.java.el', 'constant.numeric.java.el']
{tokens} = grammar.tokenizeLine '0.54'
expect(tokens[0]).toEqual value: '0.54', scopes: ['source.java.el', 'constant.numeric.java.el']
{tokens} = grammar.tokenizeLine '13.12'
expect(tokens[0]).toEqual value: '13.12', scopes: ['source.java.el', 'constant.numeric.java.el']
it 'tokenizes single quoted string literals', ->
{tokens} = grammar.tokenizeLine "'foo\\n bar \\\'baz'"
expect(tokens[0]).toEqual value: "'", scopes: ['source.java.el', 'string.quoted.single.java.el', 'punctuation.definition.string.begin.java.el']
expect(tokens[1]).toEqual value: 'foo', scopes: ['source.java.el', 'string.quoted.single.java.el']
expect(tokens[2]).toEqual value: '\\n', scopes: ['source.java.el', 'string.quoted.single.java.el', 'constant.character.escape.java.el']
expect(tokens[3]).toEqual value: ' bar ', scopes: ['source.java.el', 'string.quoted.single.java.el']
expect(tokens[4]).toEqual value: '\\\'', scopes: ['source.java.el', 'string.quoted.single.java.el', 'constant.character.escape.java.el']
expect(tokens[5]).toEqual value: 'baz', scopes: ['source.java.el', 'string.quoted.single.java.el']
expect(tokens[6]).toEqual value: "'", scopes: ['source.java.el', 'string.quoted.single.java.el', 'punctuation.definition.string.end.java.el']
it 'tokenizes double quoted string literals', ->
{tokens} = grammar.tokenizeLine '"foo\\n bar \\\"baz"'
expect(tokens[0]).toEqual value: '"', scopes: ['source.java.el', 'string.quoted.double.java.el', 'punctuation.definition.string.begin.java.el']
expect(tokens[1]).toEqual value: 'foo', scopes: ['source.java.el', 'string.quoted.double.java.el']
expect(tokens[2]).toEqual value: '\\n', scopes: ['source.java.el', 'string.quoted.double.java.el', 'constant.character.escape.java.el']
expect(tokens[3]).toEqual value: ' bar ', scopes: ['source.java.el', 'string.quoted.double.java.el']
expect(tokens[4]).toEqual value: '\\\"', scopes: ['source.java.el', 'string.quoted.double.java.el', 'constant.character.escape.java.el']
expect(tokens[5]).toEqual value: 'baz', scopes: ['source.java.el', 'string.quoted.double.java.el']
expect(tokens[6]).toEqual value: '"', scopes: ['source.java.el', 'string.quoted.double.java.el', 'punctuation.definition.string.end.java.el']
it 'tokenizes function calls', ->
{tokens} = grammar.tokenizeLine 'fn:split(foo, bar)'
expect(tokens[0]).toEqual value: 'fn', scopes: ['source.java.el', 'namespace.java.el']
expect(tokens[1]).toEqual value: ':', scopes: ['source.java.el', 'namespace.java.el', 'punctuation.separator.namespace.java.el']
expect(tokens[2]).toEqual value: 'split', scopes: ['source.java.el']
expect(tokens[3]).toEqual value: '(', scopes: ['source.java.el', 'meta.brace.round.java.el']
expect(tokens[4]).toEqual value: 'foo', scopes: ['source.java.el']
expect(tokens[5]).toEqual value: ',', scopes: ['source.java.el', 'meta.delimiter.java.el']
expect(tokens[6]).toEqual value: ' bar', scopes: ['source.java.el']
expect(tokens[7]).toEqual value: ')', scopes: ['source.java.el', 'meta.brace.round.java.el']
it 'tokenizes a computed property access', ->
{tokens} = grammar.tokenizeLine 'foo[0]'
expect(tokens[0]).toEqual value: 'foo', scopes: ['source.java.el']
expect(tokens[1]).toEqual value: '[', scopes: ['source.java.el', 'meta.brace.square.java.el']
expect(tokens[2]).toEqual value: '0', scopes: ['source.java.el', 'constant.numeric.java.el']
expect(tokens[3]).toEqual value: ']', scopes: ['source.java.el', 'meta.brace.square.java.el']

View File

@ -0,0 +1,229 @@
describe('Unified expression language grammar', function() {
let grammar = null;
beforeEach(function() {
atom.config.set('core.useTreeSitterParsers', false);
waitsForPromise(() => atom.packages.activatePackage('language-java'));
runs(() => grammar = atom.grammars.grammarForScopeName('source.java.el'));
});
it('parses the grammar', function() {
expect(grammar).toBeTruthy();
expect(grammar.scopeName).toBe('source.java.el');
});
describe('operators', function() {
it('tokenizes the ternary operator', function() {
const {tokens} = grammar.tokenizeLine('true ? 0 : 1');
expect(tokens[2]).toEqual({value: '?', scopes: ['source.java.el', 'keyword.control.ternary.java.el']});
expect(tokens[6]).toEqual({value: ':', scopes: ['source.java.el', 'keyword.control.ternary.java.el']});
});
it('parses the comparison operator `==`', function() {
const {tokens} = grammar.tokenizeLine('1 == 1');
expect(tokens[2]).toEqual({value: '==', scopes: ['source.java.el', 'keyword.operator.comparison.java.el']});
});
it('parses the comparison operator `!=`', function() {
const {tokens} = grammar.tokenizeLine('1 != 1');
expect(tokens[2]).toEqual({value: '!=', scopes: ['source.java.el', 'keyword.operator.comparison.java.el']});
});
it('parses the comparison operator `<=`', function() {
const {tokens} = grammar.tokenizeLine('1 <= 1');
expect(tokens[2]).toEqual({value: '<=', scopes: ['source.java.el', 'keyword.operator.comparison.java.el']});
});
it('parses the comparison operator `>=`', function() {
const {tokens} = grammar.tokenizeLine('1 >= 1');
expect(tokens[2]).toEqual({value: '>=', scopes: ['source.java.el', 'keyword.operator.comparison.java.el']});
});
it('parses the comparison operator `<`', function() {
const {tokens} = grammar.tokenizeLine('1 < 1');
expect(tokens[2]).toEqual({value: '<', scopes: ['source.java.el', 'keyword.operator.comparison.java.el']});
});
it('parses the comparison operator `>`', function() {
const {tokens} = grammar.tokenizeLine('1 > 1');
expect(tokens[2]).toEqual({value: '>', scopes: ['source.java.el', 'keyword.operator.comparison.java.el']});
});
it('parses the comparison operator `eq`', function() {
const {tokens} = grammar.tokenizeLine('1 eq 1');
expect(tokens[2]).toEqual({value: 'eq', scopes: ['source.java.el', 'keyword.operator.comparison.java.el']});
});
it('parses the comparison operator `ne`', function() {
const {tokens} = grammar.tokenizeLine('1 ne 1');
expect(tokens[2]).toEqual({value: 'ne', scopes: ['source.java.el', 'keyword.operator.comparison.java.el']});
});
it('parses the comparison operator `le`', function() {
const {tokens} = grammar.tokenizeLine('1 le 1');
expect(tokens[2]).toEqual({value: 'le', scopes: ['source.java.el', 'keyword.operator.comparison.java.el']});
});
it('parses the comparison operator `gt`', function() {
const {tokens} = grammar.tokenizeLine('1 gt 1');
expect(tokens[2]).toEqual({value: 'gt', scopes: ['source.java.el', 'keyword.operator.comparison.java.el']});
});
it('parses the comparison operator `lt`', function() {
const {tokens} = grammar.tokenizeLine('1 lt 1');
expect(tokens[2]).toEqual({value: 'lt', scopes: ['source.java.el', 'keyword.operator.comparison.java.el']});
});
it('parses the comparison operator `gt`', function() {
const {tokens} = grammar.tokenizeLine('1 gt 1');
expect(tokens[2]).toEqual({value: 'gt', scopes: ['source.java.el', 'keyword.operator.comparison.java.el']});
});
it('parses the empty operators', function() {
const {tokens} = grammar.tokenizeLine('empty foo');
expect(tokens[0]).toEqual({value: 'empty', scopes: ['source.java.el', 'keyword.operator.empty.java.el']});
});
it('parses the arithmetic operator `-`', function() {
const {tokens} = grammar.tokenizeLine('1 - 1');
expect(tokens[2]).toEqual({value: '-', scopes: ['source.java.el', 'keyword.operator.arithmetic.java.el']});
});
it('parses the arithmetic operator `+`', function() {
const {tokens} = grammar.tokenizeLine('1 + 1');
expect(tokens[2]).toEqual({value: '+', scopes: ['source.java.el', 'keyword.operator.arithmetic.java.el']});
});
it('parses the arithmetic operator `*`', function() {
const {tokens} = grammar.tokenizeLine('1 * 1');
expect(tokens[2]).toEqual({value: '*', scopes: ['source.java.el', 'keyword.operator.arithmetic.java.el']});
});
it('parses the arithmetic operator `/`', function() {
const {tokens} = grammar.tokenizeLine('1 / 1');
expect(tokens[2]).toEqual({value: '/', scopes: ['source.java.el', 'keyword.operator.arithmetic.java.el']});
});
it('parses the arithmetic operator `%`', function() {
const {tokens} = grammar.tokenizeLine('1 % 1');
expect(tokens[2]).toEqual({value: '%', scopes: ['source.java.el', 'keyword.operator.arithmetic.java.el']});
});
it('parses the arithmetic operator `div`', function() {
const {tokens} = grammar.tokenizeLine('1 div 1');
expect(tokens[2]).toEqual({value: 'div', scopes: ['source.java.el', 'keyword.operator.arithmetic.java.el']});
});
it('parses the arithmetic operator `mod`', function() {
const {tokens} = grammar.tokenizeLine('1 mod 1');
expect(tokens[2]).toEqual({value: 'mod', scopes: ['source.java.el', 'keyword.operator.arithmetic.java.el']});
});
it('parses the logical operator `!`', function() {
const {tokens} = grammar.tokenizeLine('!foo');
expect(tokens[0]).toEqual({value: '!', scopes: ['source.java.el', 'keyword.operator.logical.java.el']});
});
it('parses the logical operator `&&`', function() {
const {tokens} = grammar.tokenizeLine('1 && 1');
expect(tokens[2]).toEqual({value: '&&', scopes: ['source.java.el', 'keyword.operator.logical.java.el']});
});
it('parses the logical operator `||`', function() {
const {tokens} = grammar.tokenizeLine('1 || 1');
expect(tokens[2]).toEqual({value: '||', scopes: ['source.java.el', 'keyword.operator.logical.java.el']});
});
it('parses the logical operator `not`', function() {
const {tokens} = grammar.tokenizeLine('1 not 1');
expect(tokens[2]).toEqual({value: 'not', scopes: ['source.java.el', 'keyword.operator.logical.java.el']});
});
it('parses the logical operator `and`', function() {
const {tokens} = grammar.tokenizeLine('1 and 1');
expect(tokens[2]).toEqual({value: 'and', scopes: ['source.java.el', 'keyword.operator.logical.java.el']});
});
it('parses the logical operator `or`', function() {
const {tokens} = grammar.tokenizeLine('1 or 1');
expect(tokens[2]).toEqual({value: 'or', scopes: ['source.java.el', 'keyword.operator.logical.java.el']});
});
});
describe('literals', function() {
it('parses boolean literals', function() {
let {tokens} = grammar.tokenizeLine('true');
expect(tokens[0]).toEqual({value: 'true', scopes: ['source.java.el', 'constant.boolean.java.el']});
({tokens} = grammar.tokenizeLine('false'));
expect(tokens[0]).toEqual({value: 'false', scopes: ['source.java.el', 'constant.boolean.java.el']});
});
it('parses the null literal', function() {
let tokens;
return ({tokens} = grammar.tokenizeLine('null'));
});
it('parses numeric literals', function() {
let {tokens} = grammar.tokenizeLine('0');
expect(tokens[0]).toEqual({value: '0', scopes: ['source.java.el', 'constant.numeric.java.el']});
({tokens} = grammar.tokenizeLine('9804'));
expect(tokens[0]).toEqual({value: '9804', scopes: ['source.java.el', 'constant.numeric.java.el']});
({tokens} = grammar.tokenizeLine('0.54'));
expect(tokens[0]).toEqual({value: '0.54', scopes: ['source.java.el', 'constant.numeric.java.el']});
({tokens} = grammar.tokenizeLine('13.12'));
expect(tokens[0]).toEqual({value: '13.12', scopes: ['source.java.el', 'constant.numeric.java.el']});
});
it('tokenizes single quoted string literals', function() {
const {tokens} = grammar.tokenizeLine("'foo\\n bar \\\'baz'");
expect(tokens[0]).toEqual({value: "'", scopes: ['source.java.el', 'string.quoted.single.java.el', 'punctuation.definition.string.begin.java.el']});
expect(tokens[1]).toEqual({value: 'foo', scopes: ['source.java.el', 'string.quoted.single.java.el']});
expect(tokens[2]).toEqual({value: '\\n', scopes: ['source.java.el', 'string.quoted.single.java.el', 'constant.character.escape.java.el']});
expect(tokens[3]).toEqual({value: ' bar ', scopes: ['source.java.el', 'string.quoted.single.java.el']});
expect(tokens[4]).toEqual({value: '\\\'', scopes: ['source.java.el', 'string.quoted.single.java.el', 'constant.character.escape.java.el']});
expect(tokens[5]).toEqual({value: 'baz', scopes: ['source.java.el', 'string.quoted.single.java.el']});
expect(tokens[6]).toEqual({value: "'", scopes: ['source.java.el', 'string.quoted.single.java.el', 'punctuation.definition.string.end.java.el']});
});
it('tokenizes double quoted string literals', function() {
const {tokens} = grammar.tokenizeLine('"foo\\n bar \\\"baz"');
expect(tokens[0]).toEqual({value: '"', scopes: ['source.java.el', 'string.quoted.double.java.el', 'punctuation.definition.string.begin.java.el']});
expect(tokens[1]).toEqual({value: 'foo', scopes: ['source.java.el', 'string.quoted.double.java.el']});
expect(tokens[2]).toEqual({value: '\\n', scopes: ['source.java.el', 'string.quoted.double.java.el', 'constant.character.escape.java.el']});
expect(tokens[3]).toEqual({value: ' bar ', scopes: ['source.java.el', 'string.quoted.double.java.el']});
expect(tokens[4]).toEqual({value: '\\\"', scopes: ['source.java.el', 'string.quoted.double.java.el', 'constant.character.escape.java.el']});
expect(tokens[5]).toEqual({value: 'baz', scopes: ['source.java.el', 'string.quoted.double.java.el']});
expect(tokens[6]).toEqual({value: '"', scopes: ['source.java.el', 'string.quoted.double.java.el', 'punctuation.definition.string.end.java.el']});
});
});
it('tokenizes function calls', function() {
const {tokens} = grammar.tokenizeLine('fn:split(foo, bar)');
expect(tokens[0]).toEqual({value: 'fn', scopes: ['source.java.el', 'namespace.java.el']});
expect(tokens[1]).toEqual({value: ':', scopes: ['source.java.el', 'namespace.java.el', 'punctuation.separator.namespace.java.el']});
expect(tokens[2]).toEqual({value: 'split', scopes: ['source.java.el']});
expect(tokens[3]).toEqual({value: '(', scopes: ['source.java.el', 'meta.brace.round.java.el']});
expect(tokens[4]).toEqual({value: 'foo', scopes: ['source.java.el']});
expect(tokens[5]).toEqual({value: ',', scopes: ['source.java.el', 'meta.delimiter.java.el']});
expect(tokens[6]).toEqual({value: ' bar', scopes: ['source.java.el']});
expect(tokens[7]).toEqual({value: ')', scopes: ['source.java.el', 'meta.brace.round.java.el']});
});
it('tokenizes a computed property access', function() {
const {tokens} = grammar.tokenizeLine('foo[0]');
expect(tokens[0]).toEqual({value: 'foo', scopes: ['source.java.el']});
expect(tokens[1]).toEqual({value: '[', scopes: ['source.java.el', 'meta.brace.square.java.el']});
expect(tokens[2]).toEqual({value: '0', scopes: ['source.java.el', 'constant.numeric.java.el']});
expect(tokens[3]).toEqual({value: ']', scopes: ['source.java.el', 'meta.brace.square.java.el']});
});
});

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@ -1,88 +0,0 @@
describe "Regular Expression Replacement grammar", ->
grammar = null
beforeEach ->
atom.config.set 'core.useTreeSitterParsers', false
waitsForPromise ->
atom.packages.activatePackage("language-javascript")
runs ->
grammar = atom.grammars.grammarForScopeName("source.js.regexp.replacement")
it "parses the grammar", ->
expect(grammar).toBeTruthy()
expect(grammar.scopeName).toBe "source.js.regexp.replacement"
describe "basic strings", ->
it "tokenizes with no extra scopes", ->
{tokens} = grammar.tokenizeLine('Hello [world]. (hi to you)')
expect(tokens[0]).toEqual value: 'Hello [world]. (hi to you)', scopes: ['source.js.regexp.replacement']
describe "escaped characters", ->
it "tokenizes with as an escape character", ->
{tokens} = grammar.tokenizeLine('\\n')
expect(tokens[0]).toEqual value: '\\n', scopes: ['source.js.regexp.replacement', 'constant.character.escape.backslash.regexp.replacement']
it "tokenizes '$$' as an escaped '$' character", ->
{tokens} = grammar.tokenizeLine('$$')
expect(tokens[0]).toEqual value: '$$', scopes: ['source.js.regexp.replacement', 'constant.character.escape.dollar.regexp.replacement']
it "doesn't treat '\\$' as an escaped '$' character", ->
{tokens} = grammar.tokenizeLine('\\$')
expect(tokens[0]).toEqual value: '\\$', scopes: ['source.js.regexp.replacement']
it "tokenizes '$$1' as an escaped '$' character followed by a '1' character", ->
{tokens} = grammar.tokenizeLine('$$1')
expect(tokens[0]).toEqual value: '$$', scopes: ['source.js.regexp.replacement', 'constant.character.escape.dollar.regexp.replacement']
expect(tokens[1]).toEqual value: '1', scopes: ['source.js.regexp.replacement']
describe "Numeric placeholders", ->
it "doesn't tokenize $0 as a variable", ->
{tokens} = grammar.tokenizeLine('$0')
expect(tokens[0]).toEqual value: '$0', scopes: ['source.js.regexp.replacement']
it "doesn't tokenize $00 as a variable", ->
{tokens} = grammar.tokenizeLine('$00')
expect(tokens[0]).toEqual value: '$00', scopes: ['source.js.regexp.replacement']
it "tokenizes $1 as a variable", ->
{tokens} = grammar.tokenizeLine('$1')
expect(tokens[0]).toEqual value: '$1', scopes: ['source.js.regexp.replacement', 'variable.regexp.replacement']
it "tokenizes $01 as a variable", ->
{tokens} = grammar.tokenizeLine('$01')
expect(tokens[0]).toEqual value: '$01', scopes: ['source.js.regexp.replacement', 'variable.regexp.replacement']
it "tokenizes $3 as a variable", ->
{tokens} = grammar.tokenizeLine('$3')
expect(tokens[0]).toEqual value: '$3', scopes: ['source.js.regexp.replacement', 'variable.regexp.replacement']
it "tokenizes $10 as a variable", ->
{tokens} = grammar.tokenizeLine('$10')
expect(tokens[0]).toEqual value: '$10', scopes: ['source.js.regexp.replacement', 'variable.regexp.replacement']
it "tokenizes $99 as a variable", ->
{tokens} = grammar.tokenizeLine('$99')
expect(tokens[0]).toEqual value: '$99', scopes: ['source.js.regexp.replacement', 'variable.regexp.replacement']
it "doesn't tokenize the third numberic character in '$100' as a variable", ->
{tokens} = grammar.tokenizeLine('$100')
expect(tokens[0]).toEqual value: '$10', scopes: ['source.js.regexp.replacement', 'variable.regexp.replacement']
expect(tokens[1]).toEqual value: '0', scopes: ['source.js.regexp.replacement']
describe "Matched sub-string placeholder", ->
it "tokenizes $& as a variable", ->
{tokens} = grammar.tokenizeLine('$&')
expect(tokens[0]).toEqual value: '$&', scopes: ['source.js.regexp.replacement', 'variable.regexp.replacement']
describe "Preceeding portion placeholder", ->
it "tokenizes $` as a variable", ->
{tokens} = grammar.tokenizeLine('$`')
expect(tokens[0]).toEqual value: '$`', scopes: ['source.js.regexp.replacement', 'variable.regexp.replacement']
describe "Following portion placeholder", ->
it "tokenizes $' as a variable", ->
{tokens} = grammar.tokenizeLine('$\'')
expect(tokens[0]).toEqual value: '$\'', scopes: ['source.js.regexp.replacement', 'variable.regexp.replacement']

View File

@ -0,0 +1,104 @@
describe("Regular Expression Replacement grammar", function() {
let grammar = null;
beforeEach(function() {
atom.config.set('core.useTreeSitterParsers', false);
waitsForPromise(() => atom.packages.activatePackage("language-javascript"));
runs(() => grammar = atom.grammars.grammarForScopeName("source.js.regexp.replacement"));
});
it("parses the grammar", function() {
expect(grammar).toBeTruthy();
expect(grammar.scopeName).toBe("source.js.regexp.replacement");
});
describe("basic strings", () => it("tokenizes with no extra scopes", function() {
const {tokens} = grammar.tokenizeLine('Hello [world]. (hi to you)');
expect(tokens[0]).toEqual({value: 'Hello [world]. (hi to you)', scopes: ['source.js.regexp.replacement']});
}));
describe("escaped characters", function() {
it("tokenizes with as an escape character", function() {
const {tokens} = grammar.tokenizeLine('\\n');
expect(tokens[0]).toEqual({value: '\\n', scopes: ['source.js.regexp.replacement', 'constant.character.escape.backslash.regexp.replacement']});
});
it("tokenizes '$$' as an escaped '$' character", function() {
const {tokens} = grammar.tokenizeLine('$$');
expect(tokens[0]).toEqual({value: '$$', scopes: ['source.js.regexp.replacement', 'constant.character.escape.dollar.regexp.replacement']});
});
it("doesn't treat '\\$' as an escaped '$' character", function() {
const {tokens} = grammar.tokenizeLine('\\$');
expect(tokens[0]).toEqual({value: '\\$', scopes: ['source.js.regexp.replacement']});
});
it("tokenizes '$$1' as an escaped '$' character followed by a '1' character", function() {
const {tokens} = grammar.tokenizeLine('$$1');
expect(tokens[0]).toEqual({value: '$$', scopes: ['source.js.regexp.replacement', 'constant.character.escape.dollar.regexp.replacement']});
expect(tokens[1]).toEqual({value: '1', scopes: ['source.js.regexp.replacement']});
});
});
describe("Numeric placeholders", function() {
it("doesn't tokenize $0 as a variable", function() {
const {tokens} = grammar.tokenizeLine('$0');
expect(tokens[0]).toEqual({value: '$0', scopes: ['source.js.regexp.replacement']});
});
it("doesn't tokenize $00 as a variable", function() {
const {tokens} = grammar.tokenizeLine('$00');
expect(tokens[0]).toEqual({value: '$00', scopes: ['source.js.regexp.replacement']});
});
it("tokenizes $1 as a variable", function() {
const {tokens} = grammar.tokenizeLine('$1');
expect(tokens[0]).toEqual({value: '$1', scopes: ['source.js.regexp.replacement', 'variable.regexp.replacement']});
});
it("tokenizes $01 as a variable", function() {
const {tokens} = grammar.tokenizeLine('$01');
expect(tokens[0]).toEqual({value: '$01', scopes: ['source.js.regexp.replacement', 'variable.regexp.replacement']});
});
it("tokenizes $3 as a variable", function() {
const {tokens} = grammar.tokenizeLine('$3');
expect(tokens[0]).toEqual({value: '$3', scopes: ['source.js.regexp.replacement', 'variable.regexp.replacement']});
});
it("tokenizes $10 as a variable", function() {
const {tokens} = grammar.tokenizeLine('$10');
expect(tokens[0]).toEqual({value: '$10', scopes: ['source.js.regexp.replacement', 'variable.regexp.replacement']});
});
it("tokenizes $99 as a variable", function() {
const {tokens} = grammar.tokenizeLine('$99');
expect(tokens[0]).toEqual({value: '$99', scopes: ['source.js.regexp.replacement', 'variable.regexp.replacement']});
});
it("doesn't tokenize the third numberic character in '$100' as a variable", function() {
const {tokens} = grammar.tokenizeLine('$100');
expect(tokens[0]).toEqual({value: '$10', scopes: ['source.js.regexp.replacement', 'variable.regexp.replacement']});
expect(tokens[1]).toEqual({value: '0', scopes: ['source.js.regexp.replacement']});
});
describe("Matched sub-string placeholder", () => it("tokenizes $& as a variable", function() {
const {tokens} = grammar.tokenizeLine('$&');
expect(tokens[0]).toEqual({value: '$&', scopes: ['source.js.regexp.replacement', 'variable.regexp.replacement']});
}));
describe("Preceeding portion placeholder", () => it("tokenizes $` as a variable", function() {
const {tokens} = grammar.tokenizeLine('$`');
expect(tokens[0]).toEqual({value: '$`', scopes: ['source.js.regexp.replacement', 'variable.regexp.replacement']});
}));
describe("Following portion placeholder", () => it("tokenizes $' as a variable", function() {
const {tokens} = grammar.tokenizeLine('$\'');
expect(tokens[0]).toEqual({value: '$\'', scopes: ['source.js.regexp.replacement', 'variable.regexp.replacement']});
}));
});
});

View File

@ -1,111 +0,0 @@
describe "JSON grammar", ->
grammar = null
beforeEach ->
atom.config.set 'core.useTreeSitterParsers', false
waitsForPromise ->
atom.packages.activatePackage('language-json')
runs ->
grammar = atom.grammars.grammarForScopeName('source.json')
it "parses the grammar", ->
expect(grammar).toBeDefined()
expect(grammar.scopeName).toBe 'source.json'
it "tokenizes arrays", ->
baseScopes = ['source.json', 'meta.structure.array.json']
numericScopes = [baseScopes..., 'constant.numeric.json']
separatorScopes = [baseScopes..., 'punctuation.separator.array.json']
{tokens} = grammar.tokenizeLine('[1, 2, 3]')
expect(tokens[0]).toEqual value: '[', scopes: [baseScopes..., 'punctuation.definition.array.begin.json']
expect(tokens[1]).toEqual value: '1', scopes: numericScopes
expect(tokens[2]).toEqual value: ',', scopes: separatorScopes
expect(tokens[3]).toEqual value: ' ', scopes: baseScopes
expect(tokens[4]).toEqual value: '2', scopes: numericScopes
expect(tokens[5]).toEqual value: ',', scopes: separatorScopes
expect(tokens[6]).toEqual value: ' ', scopes: baseScopes
expect(tokens[7]).toEqual value: '3', scopes: numericScopes
expect(tokens[8]).toEqual value: ']', scopes: [baseScopes..., 'punctuation.definition.array.end.json']
it "identifies trailing commas in arrays", ->
baseScopes = ['source.json', 'meta.structure.array.json']
numericScopes = [baseScopes..., 'constant.numeric.json']
separatorScopes = [baseScopes..., 'punctuation.separator.array.json']
{tokens} = grammar.tokenizeLine('[1, ]')
expect(tokens[0]).toEqual value: '[', scopes: [baseScopes..., 'punctuation.definition.array.begin.json']
expect(tokens[1]).toEqual value: '1', scopes: numericScopes
expect(tokens[2]).toEqual value: ',', scopes: [baseScopes..., 'invalid.illegal.trailing-array-separator.json']
expect(tokens[3]).toEqual value: ' ', scopes: baseScopes
expect(tokens[4]).toEqual value: ']', scopes: [baseScopes..., 'punctuation.definition.array.end.json']
it "tokenizes objects", ->
baseScopes = ['source.json', 'meta.structure.dictionary.json']
keyScopes = [baseScopes..., 'meta.structure.dictionary.key.json', 'string.quoted.double.json']
keyBeginScopes = [keyScopes..., 'punctuation.definition.string.begin.json']
keyEndScopes = [keyScopes..., 'punctuation.definition.string.end.json']
valueScopes = [baseScopes..., 'meta.structure.dictionary.value.json']
keyValueSeparatorScopes = [valueScopes..., 'punctuation.separator.dictionary.key-value.json']
pairSeparatorScopes = [valueScopes..., 'punctuation.separator.dictionary.pair.json']
stringValueScopes = [valueScopes..., 'string.quoted.double.json']
{tokens} = grammar.tokenizeLine('{"a": 1, "b": true, "foo": "bar"}')
expect(tokens[0]).toEqual value: '{', scopes: [baseScopes..., 'punctuation.definition.dictionary.begin.json']
expect(tokens[1]).toEqual value: '"', scopes: keyBeginScopes
expect(tokens[2]).toEqual value: 'a', scopes: keyScopes
expect(tokens[3]).toEqual value: '"', scopes: keyEndScopes
expect(tokens[4]).toEqual value: ':', scopes: keyValueSeparatorScopes
expect(tokens[5]).toEqual value: ' ', scopes: valueScopes
expect(tokens[6]).toEqual value: '1', scopes: [valueScopes..., 'constant.numeric.json']
expect(tokens[7]).toEqual value: ',', scopes: pairSeparatorScopes
expect(tokens[8]).toEqual value: ' ', scopes: baseScopes
expect(tokens[9]).toEqual value: '"', scopes: keyBeginScopes
expect(tokens[10]).toEqual value: 'b', scopes: keyScopes
expect(tokens[11]).toEqual value: '"', scopes: keyEndScopes
expect(tokens[12]).toEqual value: ':', scopes: keyValueSeparatorScopes
expect(tokens[13]).toEqual value: ' ', scopes: valueScopes
expect(tokens[14]).toEqual value: 'true', scopes: [valueScopes..., 'constant.language.json']
expect(tokens[15]).toEqual value: ',', scopes: pairSeparatorScopes
expect(tokens[16]).toEqual value: ' ', scopes: baseScopes
expect(tokens[17]).toEqual value: '"', scopes: keyBeginScopes
expect(tokens[18]).toEqual value: 'foo', scopes: keyScopes
expect(tokens[19]).toEqual value: '"', scopes: keyEndScopes
expect(tokens[20]).toEqual value: ':', scopes: keyValueSeparatorScopes
expect(tokens[21]).toEqual value: ' ', scopes: valueScopes
expect(tokens[22]).toEqual value: '"', scopes: [stringValueScopes..., 'punctuation.definition.string.begin.json']
expect(tokens[23]).toEqual value: 'bar', scopes: stringValueScopes
expect(tokens[24]).toEqual value: '"', scopes: [stringValueScopes..., 'punctuation.definition.string.end.json']
expect(tokens[25]).toEqual value: '}', scopes: [baseScopes..., 'punctuation.definition.dictionary.end.json']
it "identifies trailing commas in objects", ->
baseScopes = ['source.json', 'meta.structure.dictionary.json']
keyScopes = [baseScopes..., 'meta.structure.dictionary.key.json', 'string.quoted.double.json']
keyBeginScopes = [keyScopes..., 'punctuation.definition.string.begin.json']
keyEndScopes = [keyScopes..., 'punctuation.definition.string.end.json']
valueScopes = [baseScopes..., 'meta.structure.dictionary.value.json']
keyValueSeparatorScopes = [valueScopes..., 'punctuation.separator.dictionary.key-value.json']
pairSeparatorScopes = [valueScopes..., 'punctuation.separator.dictionary.pair.json']
{tokens} = grammar.tokenizeLine('{"a": 1, "b": 2, }')
expect(tokens[0]).toEqual value: '{', scopes: [baseScopes..., 'punctuation.definition.dictionary.begin.json']
expect(tokens[1]).toEqual value: '"', scopes: keyBeginScopes
expect(tokens[2]).toEqual value: 'a', scopes: keyScopes
expect(tokens[3]).toEqual value: '"', scopes: keyEndScopes
expect(tokens[4]).toEqual value: ':', scopes: keyValueSeparatorScopes
expect(tokens[5]).toEqual value: ' ', scopes: valueScopes
expect(tokens[6]).toEqual value: '1', scopes: [valueScopes..., 'constant.numeric.json']
expect(tokens[7]).toEqual value: ',', scopes: pairSeparatorScopes
expect(tokens[8]).toEqual value: ' ', scopes: baseScopes
expect(tokens[9]).toEqual value: '"', scopes: keyBeginScopes
expect(tokens[10]).toEqual value: 'b', scopes: keyScopes
expect(tokens[11]).toEqual value: '"', scopes: keyEndScopes
expect(tokens[12]).toEqual value: ':', scopes: keyValueSeparatorScopes
expect(tokens[13]).toEqual value: ' ', scopes: valueScopes
expect(tokens[14]).toEqual value: '2', scopes: [valueScopes..., 'constant.numeric.json']
expect(tokens[15]).toEqual value: ',', scopes: [valueScopes..., 'invalid.illegal.trailing-dictionary-separator.json']
expect(tokens[16]).toEqual value: ' ', scopes: baseScopes
expect(tokens[17]).toEqual value: '}', scopes: [baseScopes..., 'punctuation.definition.dictionary.end.json']

View File

@ -0,0 +1,117 @@
describe("JSON grammar", () => {
let grammar = null;
beforeEach(() => {
atom.config.set('core.useTreeSitterParsers', false);
waitsForPromise(() => atom.packages.activatePackage('language-json'));
runs(() => grammar = atom.grammars.grammarForScopeName('source.json'));
});
it("parses the grammar", () => {
expect(grammar).toBeDefined();
expect(grammar.scopeName).toBe('source.json');
});
it("tokenizes arrays", () => {
const baseScopes = ['source.json', 'meta.structure.array.json'];
const numericScopes = [...baseScopes, 'constant.numeric.json'];
const separatorScopes = [...baseScopes, 'punctuation.separator.array.json'];
const {tokens} = grammar.tokenizeLine('[1, 2, 3]');
expect(tokens[0]).toEqual({value: '[', scopes: [...baseScopes, 'punctuation.definition.array.begin.json']});
expect(tokens[1]).toEqual({value: '1', scopes: numericScopes});
expect(tokens[2]).toEqual({value: ',', scopes: separatorScopes});
expect(tokens[3]).toEqual({value: ' ', scopes: baseScopes});
expect(tokens[4]).toEqual({value: '2', scopes: numericScopes});
expect(tokens[5]).toEqual({value: ',', scopes: separatorScopes});
expect(tokens[6]).toEqual({value: ' ', scopes: baseScopes});
expect(tokens[7]).toEqual({value: '3', scopes: numericScopes});
expect(tokens[8]).toEqual({value: ']', scopes: [...baseScopes, 'punctuation.definition.array.end.json']});
});
it("identifies trailing commas in arrays", () => {
const baseScopes = ['source.json', 'meta.structure.array.json'];
const numericScopes = [...baseScopes, 'constant.numeric.json'];
const separatorScopes = [...baseScopes, 'punctuation.separator.array.json'];
const {tokens} = grammar.tokenizeLine('[1, ]');
expect(tokens[0]).toEqual({value: '[', scopes: [...baseScopes, 'punctuation.definition.array.begin.json']});
expect(tokens[1]).toEqual({value: '1', scopes: numericScopes});
expect(tokens[2]).toEqual({value: ',', scopes: [...baseScopes, 'invalid.illegal.trailing-array-separator.json']});
expect(tokens[3]).toEqual({value: ' ', scopes: baseScopes});
expect(tokens[4]).toEqual({value: ']', scopes: [...baseScopes, 'punctuation.definition.array.end.json']});
});
it("tokenizes objects", () => {
const baseScopes = ['source.json', 'meta.structure.dictionary.json'];
const keyScopes = [...baseScopes, 'meta.structure.dictionary.key.json', 'string.quoted.double.json'];
const keyBeginScopes = [...keyScopes, 'punctuation.definition.string.begin.json'];
const keyEndScopes = [...keyScopes, 'punctuation.definition.string.end.json'];
const valueScopes = [...baseScopes, 'meta.structure.dictionary.value.json'];
const keyValueSeparatorScopes = [...valueScopes, 'punctuation.separator.dictionary.key-value.json'];
const pairSeparatorScopes = [...valueScopes, 'punctuation.separator.dictionary.pair.json'];
const stringValueScopes = [...valueScopes, 'string.quoted.double.json'];
const {tokens} = grammar.tokenizeLine('{"a": 1, "b": true, "foo": "bar"}');
expect(tokens[0]).toEqual({value: '{', scopes: [...baseScopes, 'punctuation.definition.dictionary.begin.json']});
expect(tokens[1]).toEqual({value: '"', scopes: keyBeginScopes});
expect(tokens[2]).toEqual({value: 'a', scopes: keyScopes});
expect(tokens[3]).toEqual({value: '"', scopes: keyEndScopes});
expect(tokens[4]).toEqual({value: ':', scopes: keyValueSeparatorScopes});
expect(tokens[5]).toEqual({value: ' ', scopes: valueScopes});
expect(tokens[6]).toEqual({value: '1', scopes: [...valueScopes, 'constant.numeric.json']});
expect(tokens[7]).toEqual({value: ',', scopes: pairSeparatorScopes});
expect(tokens[8]).toEqual({value: ' ', scopes: baseScopes});
expect(tokens[9]).toEqual({value: '"', scopes: keyBeginScopes});
expect(tokens[10]).toEqual({value: 'b', scopes: keyScopes});
expect(tokens[11]).toEqual({value: '"', scopes: keyEndScopes});
expect(tokens[12]).toEqual({value: ':', scopes: keyValueSeparatorScopes});
expect(tokens[13]).toEqual({value: ' ', scopes: valueScopes});
expect(tokens[14]).toEqual({value: 'true', scopes: [...valueScopes, 'constant.language.json']});
expect(tokens[15]).toEqual({value: ',', scopes: pairSeparatorScopes});
expect(tokens[16]).toEqual({value: ' ', scopes: baseScopes});
expect(tokens[17]).toEqual({value: '"', scopes: keyBeginScopes});
expect(tokens[18]).toEqual({value: 'foo', scopes: keyScopes});
expect(tokens[19]).toEqual({value: '"', scopes: keyEndScopes});
expect(tokens[20]).toEqual({value: ':', scopes: keyValueSeparatorScopes});
expect(tokens[21]).toEqual({value: ' ', scopes: valueScopes});
expect(tokens[22]).toEqual({value: '"', scopes: [...stringValueScopes, 'punctuation.definition.string.begin.json']});
expect(tokens[23]).toEqual({value: 'bar', scopes: stringValueScopes});
expect(tokens[24]).toEqual({value: '"', scopes: [...stringValueScopes, 'punctuation.definition.string.end.json']});
expect(tokens[25]).toEqual({value: '}', scopes: [...baseScopes, 'punctuation.definition.dictionary.end.json']});
});
it("identifies trailing commas in objects", () => {
const baseScopes = ['source.json', 'meta.structure.dictionary.json'];
const keyScopes = [...baseScopes, 'meta.structure.dictionary.key.json', 'string.quoted.double.json'];
const keyBeginScopes = [...keyScopes, 'punctuation.definition.string.begin.json'];
const keyEndScopes = [...keyScopes, 'punctuation.definition.string.end.json'];
const valueScopes = [...baseScopes, 'meta.structure.dictionary.value.json'];
const keyValueSeparatorScopes = [...valueScopes, 'punctuation.separator.dictionary.key-value.json'];
const pairSeparatorScopes = [...valueScopes, 'punctuation.separator.dictionary.pair.json'];
const {tokens} = grammar.tokenizeLine('{"a": 1, "b": 2, }');
expect(tokens[0]).toEqual({value: '{', scopes: [...baseScopes, 'punctuation.definition.dictionary.begin.json']});
expect(tokens[1]).toEqual({value: '"', scopes: keyBeginScopes});
expect(tokens[2]).toEqual({value: 'a', scopes: keyScopes});
expect(tokens[3]).toEqual({value: '"', scopes: keyEndScopes});
expect(tokens[4]).toEqual({value: ':', scopes: keyValueSeparatorScopes});
expect(tokens[5]).toEqual({value: ' ', scopes: valueScopes});
expect(tokens[6]).toEqual({value: '1', scopes: [...valueScopes, 'constant.numeric.json']});
expect(tokens[7]).toEqual({value: ',', scopes: pairSeparatorScopes});
expect(tokens[8]).toEqual({value: ' ', scopes: baseScopes});
expect(tokens[9]).toEqual({value: '"', scopes: keyBeginScopes});
expect(tokens[10]).toEqual({value: 'b', scopes: keyScopes});
expect(tokens[11]).toEqual({value: '"', scopes: keyEndScopes});
expect(tokens[12]).toEqual({value: ':', scopes: keyValueSeparatorScopes});
expect(tokens[13]).toEqual({value: ' ', scopes: valueScopes});
expect(tokens[14]).toEqual({value: '2', scopes: [...valueScopes, 'constant.numeric.json']});
expect(tokens[15]).toEqual({value: ',', scopes: [...valueScopes, 'invalid.illegal.trailing-dictionary-separator.json']});
expect(tokens[16]).toEqual({value: ' ', scopes: baseScopes});
expect(tokens[17]).toEqual({value: '}', scopes: [...baseScopes, 'punctuation.definition.dictionary.end.json']});
});
});

View File

@ -1,387 +0,0 @@
describe "Less grammar", ->
grammar = null
beforeEach ->
waitsForPromise ->
atom.packages.activatePackage("language-css")
waitsForPromise ->
atom.packages.activatePackage("language-less")
runs ->
grammar = atom.grammars.grammarForScopeName("source.css.less")
it "parses the grammar", ->
expect(grammar).toBeDefined()
expect(grammar.scopeName).toBe "source.css.less"
it "parses numbers", ->
{tokens} = grammar.tokenizeLine(" 10")
expect(tokens).toHaveLength 2
expect(tokens[0]).toEqual value: " ", scopes: ['source.css.less']
expect(tokens[1]).toEqual value: "10", scopes: ['source.css.less', 'constant.numeric.css']
{tokens} = grammar.tokenizeLine("-.1")
expect(tokens).toHaveLength 1
expect(tokens[0]).toEqual value: "-.1", scopes: ['source.css.less', 'constant.numeric.css']
{tokens} = grammar.tokenizeLine(".4")
expect(tokens).toHaveLength 1
expect(tokens[0]).toEqual value: ".4", scopes: ['source.css.less', 'constant.numeric.css']
it 'parses color names', ->
{tokens} = grammar.tokenizeLine '.foo { color: rebeccapurple; background: whitesmoke; }'
expect(tokens[8]).toEqual value: "rebeccapurple", scopes: ['source.css.less', 'meta.property-list.css', 'meta.property-value.css', 'support.constant.color.w3c-extended-color-name.css']
expect(tokens[14]).toEqual value: "whitesmoke", scopes: ['source.css.less', 'meta.property-list.css', 'meta.property-value.css', 'support.constant.color.w3c-extended-color-name.css']
it "parses property names", ->
{tokens} = grammar.tokenizeLine("{display: none;}")
expect(tokens[1]).toEqual value: "display", scopes: ['source.css.less', 'meta.property-list.css', 'support.type.property-name.css']
{tokens} = grammar.tokenizeLine("{displaya: none;}")
expect(tokens[1]).toEqual value: "displaya", scopes: ['source.css.less', 'meta.property-list.css']
it "parses property names distinctly from property values with the same text", ->
{tokens} = grammar.tokenizeLine("{left: left;}")
expect(tokens).toHaveLength 7
expect(tokens[1]).toEqual value: "left", scopes: ['source.css.less', 'meta.property-list.css', 'support.type.property-name.css']
expect(tokens[2]).toEqual value: ":", scopes: ['source.css.less', 'meta.property-list.css', 'punctuation.separator.key-value.css']
expect(tokens[3]).toEqual value: " ", scopes: ['source.css.less', 'meta.property-list.css']
expect(tokens[4]).toEqual value: "left", scopes: ['source.css.less', 'meta.property-list.css', 'meta.property-value.css', 'support.constant.property-value.css']
expect(tokens[5]).toEqual value: ";", scopes: ['source.css.less', 'meta.property-list.css', 'punctuation.terminator.rule.css']
{tokens} = grammar.tokenizeLine("{left:left;}")
expect(tokens).toHaveLength 6
expect(tokens[1]).toEqual value: "left", scopes: ['source.css.less', 'meta.property-list.css', 'support.type.property-name.css']
expect(tokens[2]).toEqual value: ":", scopes: ['source.css.less', 'meta.property-list.css', 'punctuation.separator.key-value.css']
expect(tokens[3]).toEqual value: "left", scopes: ['source.css.less', 'meta.property-list.css', 'meta.property-value.css', 'support.constant.property-value.css']
expect(tokens[4]).toEqual value: ";", scopes: ['source.css.less', 'meta.property-list.css', 'punctuation.terminator.rule.css']
it "parses property names distinctly from element selectors with the same prefix", ->
{tokens} = grammar.tokenizeLine("{table-layout: fixed;}")
expect(tokens).toHaveLength 7
expect(tokens[1]).toEqual value: "table-layout", scopes: ['source.css.less', 'meta.property-list.css', 'support.type.property-name.css']
expect(tokens[2]).toEqual value: ":", scopes: ['source.css.less', 'meta.property-list.css', 'punctuation.separator.key-value.css']
expect(tokens[3]).toEqual value: " ", scopes: ['source.css.less', 'meta.property-list.css']
expect(tokens[4]).toEqual value: "fixed", scopes: ['source.css.less', 'meta.property-list.css', 'meta.property-value.css', 'support.constant.property-value.css']
expect(tokens[5]).toEqual value: ";", scopes: ['source.css.less', 'meta.property-list.css', 'punctuation.terminator.rule.css']
it "does not parse @media conditions as a property-list", ->
{tokens} = grammar.tokenizeLine('@media (min-resolution: 2dppx) {}')
expect(tokens[4].scopes).not.toContain 'support.type.property-name.css'
expect(tokens[7].scopes).not.toContain 'meta.property-value.css'
expect(tokens[11].scopes).not.toContain 'meta.property-value.css'
it "parses @media features", ->
{tokens} = grammar.tokenizeLine('@media (min-width: 100px) {}')
expect(tokens[0]).toEqual value: "@", scopes: ['source.css.less', 'meta.at-rule.media.css', 'keyword.control.at-rule.media.css', 'punctuation.definition.keyword.css']
expect(tokens[1]).toEqual value: "media", scopes: ['source.css.less', 'meta.at-rule.media.css', 'keyword.control.at-rule.media.css']
expect(tokens[4]).toEqual value: "min-width", scopes: ['source.css.less', 'support.type.property-name.media.css']
expect(tokens[7]).toEqual value: "100", scopes: ['source.css.less', 'constant.numeric.css']
expect(tokens[8]).toEqual value: "px", scopes: ['source.css.less', 'constant.numeric.css', 'keyword.other.unit.px.css']
it "parses @media orientation", ->
{tokens} = grammar.tokenizeLine('@media (orientation: portrait){}')
expect(tokens[4]).toEqual value: "orientation", scopes: ['source.css.less', 'support.type.property-name.media.css']
expect(tokens[7]).toEqual value: "portrait", scopes: ['source.css.less', 'support.constant.property-value.media-property.media.css']
it "parses parent selector", ->
{tokens} = grammar.tokenizeLine('& .foo {}')
expect(tokens[0]).toEqual value: "&", scopes: ['source.css.less', 'entity.other.attribute-name.parent-selector.css', 'punctuation.definition.entity.css']
expect(tokens[1]).toEqual value: " ", scopes: ['source.css.less']
expect(tokens[2]).toEqual value: ".", scopes: ['source.css.less', 'entity.other.attribute-name.class.css', 'punctuation.definition.entity.css']
expect(tokens[3]).toEqual value: "foo", scopes: ['source.css.less', 'entity.other.attribute-name.class.css']
expect(tokens[4]).toEqual value: " ", scopes: ['source.css.less']
expect(tokens[5]).toEqual value: "{", scopes: ['source.css.less', 'meta.property-list.css', 'punctuation.section.property-list.begin.bracket.curly.css']
expect(tokens[6]).toEqual value: "}", scopes: ['source.css.less', 'meta.property-list.css', 'punctuation.section.property-list.end.bracket.curly.css']
{tokens} = grammar.tokenizeLine('&:hover {}')
expect(tokens[0]).toEqual value: "&", scopes: ['source.css.less', 'entity.other.attribute-name.parent-selector.css', 'punctuation.definition.entity.css']
expect(tokens[1]).toEqual value: ":", scopes: ['source.css.less', 'entity.other.attribute-name.pseudo-class.css', 'punctuation.definition.entity.css']
expect(tokens[2]).toEqual value: "hover", scopes: ['source.css.less', 'entity.other.attribute-name.pseudo-class.css']
expect(tokens[3]).toEqual value: " ", scopes: ['source.css.less']
expect(tokens[4]).toEqual value: "{", scopes: ['source.css.less', 'meta.property-list.css', 'punctuation.section.property-list.begin.bracket.curly.css']
expect(tokens[5]).toEqual value: "}", scopes: ['source.css.less', 'meta.property-list.css', 'punctuation.section.property-list.end.bracket.curly.css']
it "parses pseudo element", ->
{tokens} = grammar.tokenizeLine('.foo::after {}')
expect(tokens[0]).toEqual value: ".", scopes: ['source.css.less', 'entity.other.attribute-name.class.css', 'punctuation.definition.entity.css']
expect(tokens[1]).toEqual value: "foo", scopes: ['source.css.less', 'entity.other.attribute-name.class.css']
expect(tokens[2]).toEqual value: "::", scopes: ['source.css.less', 'entity.other.attribute-name.pseudo-element.css', 'punctuation.definition.entity.css']
expect(tokens[3]).toEqual value: "after", scopes: ['source.css.less', 'entity.other.attribute-name.pseudo-element.css']
expect(tokens[4]).toEqual value: " ", scopes: ['source.css.less']
expect(tokens[5]).toEqual value: "{", scopes: ['source.css.less', 'meta.property-list.css', 'punctuation.section.property-list.begin.bracket.curly.css']
expect(tokens[6]).toEqual value: "}", scopes: ['source.css.less', 'meta.property-list.css', 'punctuation.section.property-list.end.bracket.curly.css']
it "parses id selectors", ->
{tokens} = grammar.tokenizeLine("#abc {}")
expect(tokens[0]).toEqual value: "#", scopes: ['source.css.less', 'meta.selector.css', 'entity.other.attribute-name.id', 'punctuation.definition.entity.css']
expect(tokens[1]).toEqual value: "abc", scopes: ['source.css.less', 'meta.selector.css', 'entity.other.attribute-name.id']
{tokens} = grammar.tokenizeLine("#abc-123 {}")
expect(tokens[0]).toEqual value: "#", scopes: ['source.css.less', 'meta.selector.css', 'entity.other.attribute-name.id', 'punctuation.definition.entity.css']
expect(tokens[1]).toEqual value: "abc-123", scopes: ['source.css.less', 'meta.selector.css', 'entity.other.attribute-name.id']
it "parses custom selectors", ->
{tokens} = grammar.tokenizeLine("abc-123-xyz {}")
expect(tokens[0]).toEqual value: "abc-123-xyz", scopes: ['source.css.less', 'entity.name.tag.custom.css']
it "parses pseudo classes", ->
{tokens} = grammar.tokenizeLine(".foo:hover { span:last-of-type { font-weight: bold; } }")
expect(tokens[0]).toEqual value: ".", scopes: ['source.css.less', 'entity.other.attribute-name.class.css', 'punctuation.definition.entity.css']
expect(tokens[1]).toEqual value: "foo", scopes: ['source.css.less', 'entity.other.attribute-name.class.css']
expect(tokens[2]).toEqual value: ":", scopes: ['source.css.less', 'entity.other.attribute-name.pseudo-class.css', 'punctuation.definition.entity.css']
expect(tokens[3]).toEqual value: "hover", scopes: ['source.css.less', 'entity.other.attribute-name.pseudo-class.css']
expect(tokens[4]).toEqual value: " ", scopes: ['source.css.less']
expect(tokens[5]).toEqual value: "{", scopes: ['source.css.less', 'meta.property-list.css', 'punctuation.section.property-list.begin.bracket.curly.css']
expect(tokens[6]).toEqual value: " ", scopes: ['source.css.less', 'meta.property-list.css']
expect(tokens[7]).toEqual value: "span", scopes: ['source.css.less', 'meta.property-list.css', 'entity.name.tag.css']
expect(tokens[8]).toEqual value: ":", scopes: ['source.css.less', 'meta.property-list.css', 'entity.other.attribute-name.pseudo-class.css', 'punctuation.definition.entity.css']
expect(tokens[9]).toEqual value: "last-of-type", scopes: ['source.css.less', 'meta.property-list.css', 'entity.other.attribute-name.pseudo-class.css']
expect(tokens[10]).toEqual value: " ", scopes: ['source.css.less', 'meta.property-list.css']
expect(tokens[11]).toEqual value: "{", scopes: ['source.css.less', 'meta.property-list.css', 'meta.property-list.css', 'punctuation.section.property-list.begin.bracket.curly.css']
expect(tokens[12]).toEqual value: " ", scopes: ['source.css.less', 'meta.property-list.css', 'meta.property-list.css']
expect(tokens[13]).toEqual value: "font-weight", scopes: ['source.css.less', 'meta.property-list.css', 'meta.property-list.css', 'support.type.property-name.css']
expect(tokens[14]).toEqual value: ":", scopes: ['source.css.less', 'meta.property-list.css', 'meta.property-list.css', 'punctuation.separator.key-value.css']
it 'parses nested multiple lines with pseudo-classes', ->
lines = grammar.tokenizeLines '''
a { p:hover,
p:active { color: blue; } }
'''
expect(lines[0][0]).toEqual value: 'a', scopes: ['source.css.less', 'entity.name.tag.css']
expect(lines[0][1]).toEqual value: ' ', scopes: ['source.css.less']
expect(lines[0][2]).toEqual value: '{', scopes: ['source.css.less', 'meta.property-list.css', 'punctuation.section.property-list.begin.bracket.curly.css']
expect(lines[0][3]).toEqual value: ' ', scopes: ['source.css.less', 'meta.property-list.css']
expect(lines[0][4]).toEqual value: 'p', scopes: ['source.css.less', 'meta.property-list.css', 'entity.name.tag.css']
expect(lines[0][5]).toEqual value: ':', scopes: ['source.css.less', 'meta.property-list.css', 'entity.other.attribute-name.pseudo-class.css', 'punctuation.definition.entity.css']
expect(lines[0][6]).toEqual value: 'hover', scopes: ['source.css.less', 'meta.property-list.css', 'entity.other.attribute-name.pseudo-class.css']
expect(lines[0][7]).toEqual value: ',', scopes: ['source.css.less', 'meta.property-list.css', 'punctuation.separator.list.comma.css']
expect(lines[1][0]).toEqual value: 'p', scopes: ['source.css.less', 'meta.property-list.css', 'entity.name.tag.css']
expect(lines[1][1]).toEqual value: ':', scopes: ['source.css.less', 'meta.property-list.css', 'entity.other.attribute-name.pseudo-class.css', 'punctuation.definition.entity.css']
expect(lines[1][2]).toEqual value: 'active', scopes: ['source.css.less', 'meta.property-list.css', 'entity.other.attribute-name.pseudo-class.css']
expect(lines[1][3]).toEqual value: ' ', scopes: ['source.css.less', 'meta.property-list.css']
expect(lines[1][4]).toEqual value: '{', scopes: ['source.css.less', 'meta.property-list.css', 'meta.property-list.css', 'punctuation.section.property-list.begin.bracket.curly.css']
expect(lines[1][5]).toEqual value: ' ', scopes: ['source.css.less', 'meta.property-list.css', 'meta.property-list.css']
it "parses property lists", ->
{tokens} = grammar.tokenizeLine(".foo { display: table-row; }")
expect(tokens).toHaveLength 12
expect(tokens[0]).toEqual value: ".", scopes: ['source.css.less', 'entity.other.attribute-name.class.css', 'punctuation.definition.entity.css']
expect(tokens[1]).toEqual value: "foo", scopes: ['source.css.less', 'entity.other.attribute-name.class.css']
expect(tokens[2]).toEqual value: " ", scopes: ['source.css.less']
expect(tokens[3]).toEqual value: "{", scopes: ['source.css.less', 'meta.property-list.css', 'punctuation.section.property-list.begin.bracket.curly.css']
expect(tokens[4]).toEqual value: " ", scopes: ['source.css.less', 'meta.property-list.css']
expect(tokens[5]).toEqual value: "display", scopes: ['source.css.less', 'meta.property-list.css', 'support.type.property-name.css']
expect(tokens[6]).toEqual value: ":", scopes: ['source.css.less', 'meta.property-list.css', 'punctuation.separator.key-value.css']
expect(tokens[7]).toEqual value: " ", scopes: ['source.css.less', 'meta.property-list.css']
expect(tokens[8]).toEqual value: "table-row", scopes: ['source.css.less', 'meta.property-list.css', 'meta.property-value.css', 'support.constant.property-value.css']
expect(tokens[9]).toEqual value: ";", scopes: ['source.css.less', 'meta.property-list.css', 'punctuation.terminator.rule.css']
expect(tokens[10]).toEqual value: " ", scopes: ['source.css.less', 'meta.property-list.css']
expect(tokens[11]).toEqual value: "}", scopes: ['source.css.less', 'meta.property-list.css', 'punctuation.section.property-list.end.bracket.curly.css']
it 'parses font lists', ->
{tokens} = grammar.tokenizeLine '.foo { font-family: "Some Font Name", serif; }'
expect(tokens[5]).toEqual value: 'font-family', scopes: ['source.css.less', 'meta.property-list.css', 'support.type.property-name.css']
expect(tokens[9]).toEqual value: 'Some Font Name', scopes: ['source.css.less', 'meta.property-list.css', 'meta.property-value.css', 'string.quoted.double.css']
expect(tokens[13]).toEqual value: 'serif', scopes: ['source.css.less', 'meta.property-list.css', 'meta.property-value.css', 'support.constant.font-name.css']
it 'parses an incomplete property list', ->
{tokens} = grammar.tokenizeLine '.foo { border: none}'
expect(tokens[5]).toEqual value: 'border', scopes: ['source.css.less', 'meta.property-list.css', 'support.type.property-name.css']
expect(tokens[8]).toEqual value: 'none', scopes: ['source.css.less', 'meta.property-list.css', 'meta.property-value.css', 'support.constant.property-value.css']
expect(tokens[9]).toEqual value: '}', scopes: ['source.css.less', 'meta.property-list.css', 'punctuation.section.property-list.end.bracket.curly.css']
it 'parses multiple lines of an incomplete property-list', ->
lines = grammar.tokenizeLines '''
very-custom { color: inherit }
another-one { display: none; }
'''
expect(lines[0][0]).toEqual value: 'very-custom', scopes: ['source.css.less', 'entity.name.tag.custom.css']
expect(lines[0][4]).toEqual value: 'color', scopes: ['source.css.less', 'meta.property-list.css', 'support.type.property-name.css']
expect(lines[0][7]).toEqual value: 'inherit', scopes: ['source.css.less', 'meta.property-list.css', 'meta.property-value.css', 'support.constant.property-value.css']
expect(lines[0][9]).toEqual value: '}', scopes: ['source.css.less', 'meta.property-list.css', 'punctuation.section.property-list.end.bracket.curly.css']
expect(lines[1][0]).toEqual value: 'another-one', scopes: ['source.css.less', 'entity.name.tag.custom.css']
expect(lines[1][10]).toEqual value: '}', scopes: ['source.css.less', 'meta.property-list.css', 'punctuation.section.property-list.end.bracket.curly.css']
it "parses variables", ->
{tokens} = grammar.tokenizeLine(".foo { border: @bar; }")
expect(tokens[0]).toEqual value: ".", scopes: ['source.css.less', 'entity.other.attribute-name.class.css', 'punctuation.definition.entity.css']
expect(tokens[1]).toEqual value: "foo", scopes: ['source.css.less', 'entity.other.attribute-name.class.css']
expect(tokens[2]).toEqual value: " ", scopes: ['source.css.less']
expect(tokens[3]).toEqual value: "{", scopes: ['source.css.less', 'meta.property-list.css', 'punctuation.section.property-list.begin.bracket.curly.css']
expect(tokens[4]).toEqual value: " ", scopes: ['source.css.less', 'meta.property-list.css']
expect(tokens[5]).toEqual value: "border", scopes: ['source.css.less', 'meta.property-list.css', 'support.type.property-name.css']
expect(tokens[6]).toEqual value: ":", scopes: ['source.css.less', 'meta.property-list.css', 'punctuation.separator.key-value.css']
expect(tokens[7]).toEqual value: " ", scopes: ['source.css.less', 'meta.property-list.css']
expect(tokens[8]).toEqual value: "@", scopes: ['source.css.less', 'meta.property-list.css', 'meta.property-value.css', 'variable.other.less', 'punctuation.definition.variable.less']
expect(tokens[9]).toEqual value: "bar", scopes: ['source.css.less', 'meta.property-list.css', 'meta.property-value.css', 'variable.other.less']
expect(tokens[10]).toEqual value: ";", scopes: ['source.css.less', 'meta.property-list.css', 'punctuation.terminator.rule.css']
expect(tokens[11]).toEqual value: " ", scopes: ['source.css.less', 'meta.property-list.css']
expect(tokens[12]).toEqual value: "}", scopes: ['source.css.less', 'meta.property-list.css', 'punctuation.section.property-list.end.bracket.curly.css']
it "parses css variables", ->
{tokens} = grammar.tokenizeLine(".foo { --spacing-unit: 6px; }")
expect(tokens[0]).toEqual value: ".", scopes: ['source.css.less', 'entity.other.attribute-name.class.css', 'punctuation.definition.entity.css']
expect(tokens[1]).toEqual value: "foo", scopes: ['source.css.less', 'entity.other.attribute-name.class.css']
expect(tokens[2]).toEqual value: " ", scopes: ['source.css.less']
expect(tokens[3]).toEqual value: "{", scopes: ['source.css.less', 'meta.property-list.css', 'punctuation.section.property-list.begin.bracket.curly.css']
expect(tokens[4]).toEqual value: " ", scopes: ['source.css.less', 'meta.property-list.css']
expect(tokens[5]).toEqual value: "--", scopes: ['source.css.less', 'meta.property-list.css', 'variable.other.less', 'punctuation.definition.variable.less']
expect(tokens[6]).toEqual value: "spacing-unit", scopes: ['source.css.less', 'meta.property-list.css', 'variable.other.less']
expect(tokens[7]).toEqual value: ":", scopes: ['source.css.less', 'meta.property-list.css', 'punctuation.separator.key-value.css']
expect(tokens[8]).toEqual value: " ", scopes: ['source.css.less', 'meta.property-list.css']
expect(tokens[9]).toEqual value: "6", scopes: ['source.css.less', 'meta.property-list.css', 'meta.property-value.css', 'constant.numeric.css']
expect(tokens[10]).toEqual value: "px", scopes: ['source.css.less', 'meta.property-list.css', 'meta.property-value.css', 'constant.numeric.css', 'keyword.other.unit.px.css']
expect(tokens[11]).toEqual value: ";", scopes: ['source.css.less', 'meta.property-list.css', 'punctuation.terminator.rule.css']
expect(tokens[12]).toEqual value: " ", scopes: ['source.css.less', 'meta.property-list.css']
expect(tokens[13]).toEqual value: "}", scopes: ['source.css.less', 'meta.property-list.css', 'punctuation.section.property-list.end.bracket.curly.css']
it 'parses variable interpolation in selectors', ->
{tokens} = grammar.tokenizeLine '.@{selector} { color: #0ee; }'
expect(tokens[0]).toEqual value: '.', scopes: ['source.css.less', 'entity.other.attribute-name.class.css', 'punctuation.definition.entity.css']
expect(tokens[1]).toEqual value: '@{selector}', scopes: ['source.css.less', 'entity.other.attribute-name.class.css', 'variable.other.interpolation.less']
expect(tokens[2]).toEqual value: " ", scopes: ['source.css.less']
expect(tokens[3]).toEqual value: "{", scopes: ['source.css.less', 'meta.property-list.css', 'punctuation.section.property-list.begin.bracket.curly.css']
expect(tokens[4]).toEqual value: " ", scopes: ['source.css.less', 'meta.property-list.css']
it 'parses variable interpolation in properties', ->
{tokens} = grammar.tokenizeLine '.foo { @{property}: #0ee; }'
expect(tokens[0]).toEqual value: ".", scopes: ['source.css.less', 'entity.other.attribute-name.class.css', 'punctuation.definition.entity.css']
expect(tokens[1]).toEqual value: "foo", scopes: ['source.css.less', 'entity.other.attribute-name.class.css']
expect(tokens[2]).toEqual value: " ", scopes: ['source.css.less']
expect(tokens[3]).toEqual value: "{", scopes: ['source.css.less', 'meta.property-list.css', 'punctuation.section.property-list.begin.bracket.curly.css']
expect(tokens[4]).toEqual value: " ", scopes: ['source.css.less', 'meta.property-list.css']
expect(tokens[5]).toEqual value: '@{property}', scopes: ['source.css.less', 'meta.property-list.css', 'variable.other.interpolation.less']
expect(tokens[6]).toEqual value: ":", scopes: ['source.css.less', 'meta.property-list.css', 'punctuation.separator.key-value.css']
expect(tokens[7]).toEqual value: " ", scopes: ['source.css.less', 'meta.property-list.css']
it 'parses variable interpolation in urls', ->
{tokens} = grammar.tokenizeLine '.foo { background: #F0F0F0 url("@{var}/img.png"); }";'
expect(tokens[8]).toEqual value: "#F0F0F0", scopes: ['source.css.less', 'meta.property-list.css', 'meta.property-value.css', 'constant.other.rgb-value.css']
expect(tokens[10]).toEqual value: "url", scopes: ['source.css.less', 'meta.property-list.css', 'meta.property-value.css', 'support.function.any-method.builtin.url.css']
expect(tokens[11]).toEqual value: "(", scopes: ['source.css.less', 'meta.property-list.css', 'meta.property-value.css', 'support.function.any-method.builtin.url.css', 'meta.brace.round.css']
expect(tokens[13]).toEqual value: "@{var}", scopes: ['source.css.less', 'meta.property-list.css', 'meta.property-value.css', 'support.function.any-method.builtin.url.css', 'string.quoted.double.css', 'variable.other.interpolation.less']
expect(tokens[14]).toEqual value: "/img.png", scopes: ['source.css.less', 'meta.property-list.css', 'meta.property-value.css', 'support.function.any-method.builtin.url.css', 'string.quoted.double.css']
it 'parses variable interpolation in imports', ->
{tokens} = grammar.tokenizeLine '@import "@{var}/tidal-wave.less";'
expect(tokens[0]).toEqual value: "@", scopes: ['source.css.less', 'meta.at-rule.import.css', 'keyword.control.at-rule.import.less', 'punctuation.definition.keyword.less']
expect(tokens[1]).toEqual value: "import", scopes: ['source.css.less', 'meta.at-rule.import.css', 'keyword.control.at-rule.import.less']
expect(tokens[2]).toEqual value: " ", scopes: ['source.css.less', 'meta.at-rule.import.css']
expect(tokens[3]).toEqual value: "\"", scopes: ['source.css.less', 'meta.at-rule.import.css', 'string.quoted.double.css', 'punctuation.definition.string.begin.css']
expect(tokens[4]).toEqual value: "@{var}", scopes: ['source.css.less', 'meta.at-rule.import.css', 'string.quoted.double.css', 'variable.other.interpolation.less']
it 'parses options in import statements', ->
{tokens} = grammar.tokenizeLine '@import (optional, reference) "theme";'
expect(tokens[0]).toEqual value: "@", scopes: ['source.css.less', 'meta.at-rule.import.css', 'keyword.control.at-rule.import.less', 'punctuation.definition.keyword.less']
expect(tokens[1]).toEqual value: "import", scopes: ['source.css.less', 'meta.at-rule.import.css', 'keyword.control.at-rule.import.less']
expect(tokens[4]).toEqual value: "optional", scopes: ['source.css.less', 'meta.at-rule.import.css', 'keyword.control.import.option.less']
expect(tokens[5]).toEqual value: ",", scopes: ['source.css.less', 'meta.at-rule.import.css', 'punctuation.separator.list.comma.css']
expect(tokens[7]).toEqual value: "reference", scopes: ['source.css.less', 'meta.at-rule.import.css', 'keyword.control.import.option.less']
expect(tokens[11]).toEqual value: "theme", scopes: ['source.css.less', 'meta.at-rule.import.css', 'string.quoted.double.css']
it 'parses built-in functions in property values', ->
{tokens} = grammar.tokenizeLine '.foo { border: 1px solid rgba(0,0,0); }'
expect(tokens[0]).toEqual value: ".", scopes: ['source.css.less', 'entity.other.attribute-name.class.css', 'punctuation.definition.entity.css']
expect(tokens[1]).toEqual value: "foo", scopes: ['source.css.less', 'entity.other.attribute-name.class.css']
expect(tokens[2]).toEqual value: " ", scopes: ['source.css.less']
expect(tokens[3]).toEqual value: "{", scopes: ['source.css.less', 'meta.property-list.css', 'punctuation.section.property-list.begin.bracket.curly.css']
expect(tokens[4]).toEqual value: " ", scopes: ['source.css.less', 'meta.property-list.css']
expect(tokens[5]).toEqual value: "border", scopes: ['source.css.less', 'meta.property-list.css', 'support.type.property-name.css']
expect(tokens[6]).toEqual value: ":", scopes: ['source.css.less', 'meta.property-list.css', 'punctuation.separator.key-value.css']
expect(tokens[8]).toEqual value: "1", scopes: ['source.css.less', 'meta.property-list.css', 'meta.property-value.css', 'constant.numeric.css']
expect(tokens[9]).toEqual value: "px", scopes: ['source.css.less', 'meta.property-list.css', 'meta.property-value.css', 'constant.numeric.css', 'keyword.other.unit.px.css']
expect(tokens[11]).toEqual value: "solid", scopes: ['source.css.less', 'meta.property-list.css', 'meta.property-value.css', 'support.constant.property-value.css']
expect(tokens[13]).toEqual value: "rgba", scopes: ['source.css.less', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.color.css', 'support.function.misc.css']
expect(tokens[14]).toEqual value: "(", scopes: ['source.css.less', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.color.css', 'punctuation.section.function.begin.bracket.round.css']
expect(tokens[15]).toEqual value: "0", scopes: ['source.css.less', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.color.css', 'constant.numeric.css']
expect(tokens[16]).toEqual value: ",", scopes: ['source.css.less', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.color.css', 'punctuation.separator.list.comma.css']
expect(tokens[17]).toEqual value: "0", scopes: ['source.css.less', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.color.css', 'constant.numeric.css']
expect(tokens[18]).toEqual value: ",", scopes: ['source.css.less', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.color.css', 'punctuation.separator.list.comma.css']
expect(tokens[21]).toEqual value: ";", scopes: ['source.css.less', 'meta.property-list.css', 'punctuation.terminator.rule.css']
it 'parses linear-gradient', ->
{tokens} = grammar.tokenizeLine '.foo { background: linear-gradient(white, black); }'
expect(tokens[5]).toEqual value: "background", scopes: ['source.css.less', 'meta.property-list.css', 'support.type.property-name.css']
expect(tokens[6]).toEqual value: ":", scopes: ['source.css.less', 'meta.property-list.css', 'punctuation.separator.key-value.css']
expect(tokens[7]).toEqual value: " ", scopes: ['source.css.less', 'meta.property-list.css']
expect(tokens[8]).toEqual value: "linear-gradient", scopes: ['source.css.less', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.gradient.css', 'support.function.gradient.css']
expect(tokens[9]).toEqual value: "(", scopes: ['source.css.less', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.gradient.css', 'punctuation.section.function.begin.bracket.round.css']
it 'parses transform functions', ->
{tokens} = grammar.tokenizeLine '.foo { transform: scaleY(1); }'
expect(tokens[5]).toEqual value: "transform", scopes: ['source.css.less', 'meta.property-list.css', 'support.type.property-name.css']
expect(tokens[6]).toEqual value: ":", scopes: ['source.css.less', 'meta.property-list.css', 'punctuation.separator.key-value.css']
expect(tokens[7]).toEqual value: " ", scopes: ['source.css.less', 'meta.property-list.css']
expect(tokens[8]).toEqual value: "scaleY", scopes: ['source.css.less', 'meta.property-list.css', 'meta.property-value.css', 'support.function.transform.css']
expect(tokens[9]).toEqual value: "(", scopes: ['source.css.less', 'meta.property-list.css', 'meta.property-value.css', 'punctuation.section.function.begin.bracket.round.css']
it 'parses blend modes', ->
{tokens} = grammar.tokenizeLine '.foo { background-blend-mode: color-dodge; }'
expect(tokens[5]).toEqual value: "background-blend-mode", scopes: ['source.css.less', 'meta.property-list.css', 'support.type.property-name.css']
expect(tokens[6]).toEqual value: ":", scopes: ['source.css.less', 'meta.property-list.css', 'punctuation.separator.key-value.css']
expect(tokens[7]).toEqual value: " ", scopes: ['source.css.less', 'meta.property-list.css']
expect(tokens[8]).toEqual value: "color-dodge", scopes: ['source.css.less', 'meta.property-list.css', 'meta.property-value.css', 'support.constant.property-value.css']
expect(tokens[9]).toEqual value: ";", scopes: ['source.css.less', 'meta.property-list.css', 'punctuation.terminator.rule.css']
it 'parses non-quoted urls', ->
{tokens} = grammar.tokenizeLine '.foo { background: url(http://%20/2.png) }'
expect(tokens[8]).toEqual value: "url", scopes: ['source.css.less', 'meta.property-list.css', 'meta.property-value.css', 'support.function.any-method.builtin.url.css']
expect(tokens[9]).toEqual value: "(", scopes: ['source.css.less', 'meta.property-list.css', 'meta.property-value.css', 'support.function.any-method.builtin.url.css', 'meta.brace.round.css']
expect(tokens[10]).toEqual value: "http://%20/2.png", scopes: ['source.css.less', 'meta.property-list.css', 'meta.property-value.css', 'support.function.any-method.builtin.url.css', 'string.url.css']
it 'parses non-quoted relative urls', ->
{tokens} = grammar.tokenizeLine '.foo { background: url(../path/to/image.png) }'
expect(tokens[8]).toEqual value: "url", scopes: ['source.css.less', 'meta.property-list.css', 'meta.property-value.css', 'support.function.any-method.builtin.url.css']
expect(tokens[9]).toEqual value: "(", scopes: ['source.css.less', 'meta.property-list.css', 'meta.property-value.css', 'support.function.any-method.builtin.url.css', 'meta.brace.round.css']
expect(tokens[10]).toEqual value: "../path/to/image.png", scopes: ['source.css.less', 'meta.property-list.css', 'meta.property-value.css', 'support.function.any-method.builtin.url.css', 'string.url.css']
it 'parses non-quoted urls followed by a format', ->
{tokens} = grammar.tokenizeLine '@font-face { src: url(http://example.com/font.woff) format("woff"); }'
expect(tokens[8]).toEqual value: 'url', scopes: ['source.css.less', 'meta.property-list.css', 'meta.property-value.css', 'support.function.any-method.builtin.url.css']
expect(tokens[9]).toEqual value: "(", scopes: ['source.css.less', 'meta.property-list.css', 'meta.property-value.css', 'support.function.any-method.builtin.url.css', 'meta.brace.round.css']
expect(tokens[10]).toEqual value: "http://example.com/font.woff", scopes: ['source.css.less', 'meta.property-list.css', 'meta.property-value.css', 'support.function.any-method.builtin.url.css', 'string.url.css']
expect(tokens[11]).toEqual value: ")", scopes: ['source.css.less', 'meta.property-list.css', 'meta.property-value.css', 'support.function.any-method.builtin.url.css', 'meta.brace.round.css']
expect(tokens[13]).toEqual value: "format", scopes: ['source.css.less', 'meta.property-list.css', 'meta.property-value.css', 'support.function.any-method.builtin.less']
it 'parses the "true" value', ->
{tokens} = grammar.tokenizeLine '@var: true;'
expect(tokens[4]).toEqual value: "true", scopes: ['source.css.less', 'constant.language.boolean.less']
it 'parses mixin guards', ->
{tokens} = grammar.tokenizeLine '.mixin() when (isnumber(@b)) and (default()), (ispixel(@a)) and not (@a < 0) { }'
expect(tokens[4]).toEqual value: "when", scopes: ['source.css.less', 'keyword.control.logical.operator.less']
expect(tokens[7]).toEqual value: "isnumber", scopes: ['source.css.less', 'support.function.type-checking.less']
expect(tokens[14]).toEqual value: "and", scopes: ['source.css.less', 'keyword.control.logical.operator.less']
expect(tokens[17]).toEqual value: "default", scopes: ['source.css.less', 'support.function.default.less']
expect(tokens[21]).toEqual value: ",", scopes: ['source.css.less', 'punctuation.separator.list.comma.css']
expect(tokens[24]).toEqual value: "ispixel", scopes: ['source.css.less', 'support.function.unit-checking.less']
expect(tokens[31]).toEqual value: "and", scopes: ['source.css.less', 'keyword.control.logical.operator.less']
expect(tokens[33]).toEqual value: "not", scopes: ['source.css.less', 'keyword.control.logical.operator.less']
expect(tokens[39]).toEqual value: "<", scopes: ['source.css.less', 'keyword.operator.less']
describe 'strings', ->
it 'tokenizes single-quote strings', ->
{tokens} = grammar.tokenizeLine ".a { content: 'hi' }"
expect(tokens[8]).toEqual value: "'", scopes: ['source.css.less', 'meta.property-list.css', 'meta.property-value.css', 'string.quoted.single.css', 'punctuation.definition.string.begin.css']
expect(tokens[9]).toEqual value: 'hi', scopes: ['source.css.less', 'meta.property-list.css', 'meta.property-value.css', 'string.quoted.single.css']
expect(tokens[10]).toEqual value: "'", scopes: ['source.css.less', 'meta.property-list.css', 'meta.property-value.css', 'string.quoted.single.css', 'punctuation.definition.string.end.css']
it 'tokenizes double-quote strings', ->
{tokens} = grammar.tokenizeLine '.a { content: "hi" }'
expect(tokens[8]).toEqual value: '"', scopes: ['source.css.less', 'meta.property-list.css', 'meta.property-value.css', 'string.quoted.double.css', 'punctuation.definition.string.begin.css']
expect(tokens[9]).toEqual value: 'hi', scopes: ['source.css.less', 'meta.property-list.css', 'meta.property-value.css', 'string.quoted.double.css']
expect(tokens[10]).toEqual value: '"', scopes: ['source.css.less', 'meta.property-list.css', 'meta.property-value.css', 'string.quoted.double.css', 'punctuation.definition.string.end.css']
it 'tokenizes escape characters', ->
{tokens} = grammar.tokenizeLine ".a { content: '\\abcdef' }"
expect(tokens[9]).toEqual value: '\\abcdef', scopes: ['source.css.less', 'meta.property-list.css', 'meta.property-value.css', 'string.quoted.single.css', 'constant.character.escape.css']
{tokens} = grammar.tokenizeLine '.a { content: "\\abcdef" }'
expect(tokens[9]).toEqual value: '\\abcdef', scopes: ['source.css.less', 'meta.property-list.css', 'meta.property-value.css', 'string.quoted.double.css', 'constant.character.escape.css']

View File

@ -0,0 +1,428 @@
describe("Less grammar", () => {
let grammar = null;
beforeEach(() => {
waitsForPromise(() => atom.packages.activatePackage("language-css"));
waitsForPromise(() => atom.packages.activatePackage("language-less"));
runs(() => grammar = atom.grammars.grammarForScopeName("source.css.less"));
});
it("parses the grammar", () => {
expect(grammar).toBeDefined();
expect(grammar.scopeName).toBe("source.css.less");
});
it("parses numbers", () => {
let {tokens} = grammar.tokenizeLine(" 10");
expect(tokens).toHaveLength(2);
expect(tokens[0]).toEqual({value: " ", scopes: ['source.css.less']});
expect(tokens[1]).toEqual({value: "10", scopes: ['source.css.less', 'constant.numeric.css']});
({tokens} = grammar.tokenizeLine("-.1"));
expect(tokens).toHaveLength(1);
expect(tokens[0]).toEqual({value: "-.1", scopes: ['source.css.less', 'constant.numeric.css']});
({tokens} = grammar.tokenizeLine(".4"));
expect(tokens).toHaveLength(1);
expect(tokens[0]).toEqual({value: ".4", scopes: ['source.css.less', 'constant.numeric.css']});
});
it('parses color names', () => {
const {tokens} = grammar.tokenizeLine('.foo { color: rebeccapurple; background: whitesmoke; }');
expect(tokens[8]).toEqual({value: "rebeccapurple", scopes: ['source.css.less', 'meta.property-list.css', 'meta.property-value.css', 'support.constant.color.w3c-extended-color-name.css']});
expect(tokens[14]).toEqual({value: "whitesmoke", scopes: ['source.css.less', 'meta.property-list.css', 'meta.property-value.css', 'support.constant.color.w3c-extended-color-name.css']});
});
it("parses property names", () => {
let {tokens} = grammar.tokenizeLine("{display: none;}");
expect(tokens[1]).toEqual({value: "display", scopes: ['source.css.less', 'meta.property-list.css', 'support.type.property-name.css']});
({tokens} = grammar.tokenizeLine("{displaya: none;}"));
expect(tokens[1]).toEqual({value: "displaya", scopes: ['source.css.less', 'meta.property-list.css']});
});
it("parses property names distinctly from property values with the same text", () => {
let {tokens} = grammar.tokenizeLine("{left: left;}");
expect(tokens).toHaveLength(7);
expect(tokens[1]).toEqual({value: "left", scopes: ['source.css.less', 'meta.property-list.css', 'support.type.property-name.css']});
expect(tokens[2]).toEqual({value: ":", scopes: ['source.css.less', 'meta.property-list.css', 'punctuation.separator.key-value.css']});
expect(tokens[3]).toEqual({value: " ", scopes: ['source.css.less', 'meta.property-list.css']});
expect(tokens[4]).toEqual({value: "left", scopes: ['source.css.less', 'meta.property-list.css', 'meta.property-value.css', 'support.constant.property-value.css']});
expect(tokens[5]).toEqual({value: ";", scopes: ['source.css.less', 'meta.property-list.css', 'punctuation.terminator.rule.css']});
({tokens} = grammar.tokenizeLine("{left:left;}"));
expect(tokens).toHaveLength(6);
expect(tokens[1]).toEqual({value: "left", scopes: ['source.css.less', 'meta.property-list.css', 'support.type.property-name.css']});
expect(tokens[2]).toEqual({value: ":", scopes: ['source.css.less', 'meta.property-list.css', 'punctuation.separator.key-value.css']});
expect(tokens[3]).toEqual({value: "left", scopes: ['source.css.less', 'meta.property-list.css', 'meta.property-value.css', 'support.constant.property-value.css']});
expect(tokens[4]).toEqual({value: ";", scopes: ['source.css.less', 'meta.property-list.css', 'punctuation.terminator.rule.css']});
});
it("parses property names distinctly from element selectors with the same prefix", () => {
const {tokens} = grammar.tokenizeLine("{table-layout: fixed;}");
expect(tokens).toHaveLength(7);
expect(tokens[1]).toEqual({value: "table-layout", scopes: ['source.css.less', 'meta.property-list.css', 'support.type.property-name.css']});
expect(tokens[2]).toEqual({value: ":", scopes: ['source.css.less', 'meta.property-list.css', 'punctuation.separator.key-value.css']});
expect(tokens[3]).toEqual({value: " ", scopes: ['source.css.less', 'meta.property-list.css']});
expect(tokens[4]).toEqual({value: "fixed", scopes: ['source.css.less', 'meta.property-list.css', 'meta.property-value.css', 'support.constant.property-value.css']});
expect(tokens[5]).toEqual({value: ";", scopes: ['source.css.less', 'meta.property-list.css', 'punctuation.terminator.rule.css']});
});
it("does not parse @media conditions as a property-list", () => {
const {tokens} = grammar.tokenizeLine('@media (min-resolution: 2dppx) {}');
expect(tokens[4].scopes).not.toContain('support.type.property-name.css');
expect(tokens[7].scopes).not.toContain('meta.property-value.css');
expect(tokens[11].scopes).not.toContain('meta.property-value.css');
});
it("parses @media features", () => {
const {tokens} = grammar.tokenizeLine('@media (min-width: 100px) {}');
expect(tokens[0]).toEqual({value: "@", scopes: ['source.css.less', 'meta.at-rule.media.css', 'keyword.control.at-rule.media.css', 'punctuation.definition.keyword.css']});
expect(tokens[1]).toEqual({value: "media", scopes: ['source.css.less', 'meta.at-rule.media.css', 'keyword.control.at-rule.media.css']});
expect(tokens[4]).toEqual({value: "min-width", scopes: ['source.css.less', 'support.type.property-name.media.css']});
expect(tokens[7]).toEqual({value: "100", scopes: ['source.css.less', 'constant.numeric.css']});
expect(tokens[8]).toEqual({value: "px", scopes: ['source.css.less', 'constant.numeric.css', 'keyword.other.unit.px.css']});
});
it("parses @media orientation", () => {
const {tokens} = grammar.tokenizeLine('@media (orientation: portrait){}');
expect(tokens[4]).toEqual({value: "orientation", scopes: ['source.css.less', 'support.type.property-name.media.css']});
expect(tokens[7]).toEqual({value: "portrait", scopes: ['source.css.less', 'support.constant.property-value.media-property.media.css']});
});
it("parses parent selector", () => {
let {tokens} = grammar.tokenizeLine('& .foo {}');
expect(tokens[0]).toEqual({value: "&", scopes: ['source.css.less', 'entity.other.attribute-name.parent-selector.css', 'punctuation.definition.entity.css']});
expect(tokens[1]).toEqual({value: " ", scopes: ['source.css.less']});
expect(tokens[2]).toEqual({value: ".", scopes: ['source.css.less', 'entity.other.attribute-name.class.css', 'punctuation.definition.entity.css']});
expect(tokens[3]).toEqual({value: "foo", scopes: ['source.css.less', 'entity.other.attribute-name.class.css']});
expect(tokens[4]).toEqual({value: " ", scopes: ['source.css.less']});
expect(tokens[5]).toEqual({value: "{", scopes: ['source.css.less', 'meta.property-list.css', 'punctuation.section.property-list.begin.bracket.curly.css']});
expect(tokens[6]).toEqual({value: "}", scopes: ['source.css.less', 'meta.property-list.css', 'punctuation.section.property-list.end.bracket.curly.css']});
({tokens} = grammar.tokenizeLine('&:hover {}'));
expect(tokens[0]).toEqual({value: "&", scopes: ['source.css.less', 'entity.other.attribute-name.parent-selector.css', 'punctuation.definition.entity.css']});
expect(tokens[1]).toEqual({value: ":", scopes: ['source.css.less', 'entity.other.attribute-name.pseudo-class.css', 'punctuation.definition.entity.css']});
expect(tokens[2]).toEqual({value: "hover", scopes: ['source.css.less', 'entity.other.attribute-name.pseudo-class.css']});
expect(tokens[3]).toEqual({value: " ", scopes: ['source.css.less']});
expect(tokens[4]).toEqual({value: "{", scopes: ['source.css.less', 'meta.property-list.css', 'punctuation.section.property-list.begin.bracket.curly.css']});
expect(tokens[5]).toEqual({value: "}", scopes: ['source.css.less', 'meta.property-list.css', 'punctuation.section.property-list.end.bracket.curly.css']});
});
it("parses pseudo element", () => {
const {tokens} = grammar.tokenizeLine('.foo::after {}');
expect(tokens[0]).toEqual({value: ".", scopes: ['source.css.less', 'entity.other.attribute-name.class.css', 'punctuation.definition.entity.css']});
expect(tokens[1]).toEqual({value: "foo", scopes: ['source.css.less', 'entity.other.attribute-name.class.css']});
expect(tokens[2]).toEqual({value: "::", scopes: ['source.css.less', 'entity.other.attribute-name.pseudo-element.css', 'punctuation.definition.entity.css']});
expect(tokens[3]).toEqual({value: "after", scopes: ['source.css.less', 'entity.other.attribute-name.pseudo-element.css']});
expect(tokens[4]).toEqual({value: " ", scopes: ['source.css.less']});
expect(tokens[5]).toEqual({value: "{", scopes: ['source.css.less', 'meta.property-list.css', 'punctuation.section.property-list.begin.bracket.curly.css']});
expect(tokens[6]).toEqual({value: "}", scopes: ['source.css.less', 'meta.property-list.css', 'punctuation.section.property-list.end.bracket.curly.css']});
});
it("parses id selectors", () => {
let {tokens} = grammar.tokenizeLine("#abc {}");
expect(tokens[0]).toEqual({value: "#", scopes: ['source.css.less', 'meta.selector.css', 'entity.other.attribute-name.id', 'punctuation.definition.entity.css']});
expect(tokens[1]).toEqual({value: "abc", scopes: ['source.css.less', 'meta.selector.css', 'entity.other.attribute-name.id']});
({tokens} = grammar.tokenizeLine("#abc-123 {}"));
expect(tokens[0]).toEqual({value: "#", scopes: ['source.css.less', 'meta.selector.css', 'entity.other.attribute-name.id', 'punctuation.definition.entity.css']});
expect(tokens[1]).toEqual({value: "abc-123", scopes: ['source.css.less', 'meta.selector.css', 'entity.other.attribute-name.id']});
});
it("parses custom selectors", () => {
const {tokens} = grammar.tokenizeLine("abc-123-xyz {}");
expect(tokens[0]).toEqual({value: "abc-123-xyz", scopes: ['source.css.less', 'entity.name.tag.custom.css']});
});
it("parses pseudo classes", () => {
const {tokens} = grammar.tokenizeLine(".foo:hover { span:last-of-type { font-weight: bold; } }");
expect(tokens[0]).toEqual({value: ".", scopes: ['source.css.less', 'entity.other.attribute-name.class.css', 'punctuation.definition.entity.css']});
expect(tokens[1]).toEqual({value: "foo", scopes: ['source.css.less', 'entity.other.attribute-name.class.css']});
expect(tokens[2]).toEqual({value: ":", scopes: ['source.css.less', 'entity.other.attribute-name.pseudo-class.css', 'punctuation.definition.entity.css']});
expect(tokens[3]).toEqual({value: "hover", scopes: ['source.css.less', 'entity.other.attribute-name.pseudo-class.css']});
expect(tokens[4]).toEqual({value: " ", scopes: ['source.css.less']});
expect(tokens[5]).toEqual({value: "{", scopes: ['source.css.less', 'meta.property-list.css', 'punctuation.section.property-list.begin.bracket.curly.css']});
expect(tokens[6]).toEqual({value: " ", scopes: ['source.css.less', 'meta.property-list.css']});
expect(tokens[7]).toEqual({value: "span", scopes: ['source.css.less', 'meta.property-list.css', 'entity.name.tag.css']});
expect(tokens[8]).toEqual({value: ":", scopes: ['source.css.less', 'meta.property-list.css', 'entity.other.attribute-name.pseudo-class.css', 'punctuation.definition.entity.css']});
expect(tokens[9]).toEqual({value: "last-of-type", scopes: ['source.css.less', 'meta.property-list.css', 'entity.other.attribute-name.pseudo-class.css']});
expect(tokens[10]).toEqual({value: " ", scopes: ['source.css.less', 'meta.property-list.css']});
expect(tokens[11]).toEqual({value: "{", scopes: ['source.css.less', 'meta.property-list.css', 'meta.property-list.css', 'punctuation.section.property-list.begin.bracket.curly.css']});
expect(tokens[12]).toEqual({value: " ", scopes: ['source.css.less', 'meta.property-list.css', 'meta.property-list.css']});
expect(tokens[13]).toEqual({value: "font-weight", scopes: ['source.css.less', 'meta.property-list.css', 'meta.property-list.css', 'support.type.property-name.css']});
expect(tokens[14]).toEqual({value: ":", scopes: ['source.css.less', 'meta.property-list.css', 'meta.property-list.css', 'punctuation.separator.key-value.css']});
});
it('parses nested multiple lines with pseudo-classes', () => {
const lines = grammar.tokenizeLines(`\
a { p:hover,
p:active { color: blue; } }\
`
);
expect(lines[0][0]).toEqual({value: 'a', scopes: ['source.css.less', 'entity.name.tag.css']});
expect(lines[0][1]).toEqual({value: ' ', scopes: ['source.css.less']});
expect(lines[0][2]).toEqual({value: '{', scopes: ['source.css.less', 'meta.property-list.css', 'punctuation.section.property-list.begin.bracket.curly.css']});
expect(lines[0][3]).toEqual({value: ' ', scopes: ['source.css.less', 'meta.property-list.css']});
expect(lines[0][4]).toEqual({value: 'p', scopes: ['source.css.less', 'meta.property-list.css', 'entity.name.tag.css']});
expect(lines[0][5]).toEqual({value: ':', scopes: ['source.css.less', 'meta.property-list.css', 'entity.other.attribute-name.pseudo-class.css', 'punctuation.definition.entity.css']});
expect(lines[0][6]).toEqual({value: 'hover', scopes: ['source.css.less', 'meta.property-list.css', 'entity.other.attribute-name.pseudo-class.css']});
expect(lines[0][7]).toEqual({value: ',', scopes: ['source.css.less', 'meta.property-list.css', 'punctuation.separator.list.comma.css']});
expect(lines[1][0]).toEqual({value: 'p', scopes: ['source.css.less', 'meta.property-list.css', 'entity.name.tag.css']});
expect(lines[1][1]).toEqual({value: ':', scopes: ['source.css.less', 'meta.property-list.css', 'entity.other.attribute-name.pseudo-class.css', 'punctuation.definition.entity.css']});
expect(lines[1][2]).toEqual({value: 'active', scopes: ['source.css.less', 'meta.property-list.css', 'entity.other.attribute-name.pseudo-class.css']});
expect(lines[1][3]).toEqual({value: ' ', scopes: ['source.css.less', 'meta.property-list.css']});
expect(lines[1][4]).toEqual({value: '{', scopes: ['source.css.less', 'meta.property-list.css', 'meta.property-list.css', 'punctuation.section.property-list.begin.bracket.curly.css']});
expect(lines[1][5]).toEqual({value: ' ', scopes: ['source.css.less', 'meta.property-list.css', 'meta.property-list.css']});
});
it("parses property lists", () => {
const {tokens} = grammar.tokenizeLine(".foo { display: table-row; }");
expect(tokens).toHaveLength(12);
expect(tokens[0]).toEqual({value: ".", scopes: ['source.css.less', 'entity.other.attribute-name.class.css', 'punctuation.definition.entity.css']});
expect(tokens[1]).toEqual({value: "foo", scopes: ['source.css.less', 'entity.other.attribute-name.class.css']});
expect(tokens[2]).toEqual({value: " ", scopes: ['source.css.less']});
expect(tokens[3]).toEqual({value: "{", scopes: ['source.css.less', 'meta.property-list.css', 'punctuation.section.property-list.begin.bracket.curly.css']});
expect(tokens[4]).toEqual({value: " ", scopes: ['source.css.less', 'meta.property-list.css']});
expect(tokens[5]).toEqual({value: "display", scopes: ['source.css.less', 'meta.property-list.css', 'support.type.property-name.css']});
expect(tokens[6]).toEqual({value: ":", scopes: ['source.css.less', 'meta.property-list.css', 'punctuation.separator.key-value.css']});
expect(tokens[7]).toEqual({value: " ", scopes: ['source.css.less', 'meta.property-list.css']});
expect(tokens[8]).toEqual({value: "table-row", scopes: ['source.css.less', 'meta.property-list.css', 'meta.property-value.css', 'support.constant.property-value.css']});
expect(tokens[9]).toEqual({value: ";", scopes: ['source.css.less', 'meta.property-list.css', 'punctuation.terminator.rule.css']});
expect(tokens[10]).toEqual({value: " ", scopes: ['source.css.less', 'meta.property-list.css']});
expect(tokens[11]).toEqual({value: "}", scopes: ['source.css.less', 'meta.property-list.css', 'punctuation.section.property-list.end.bracket.curly.css']});
});
it('parses font lists', () => {
const {tokens} = grammar.tokenizeLine('.foo { font-family: "Some Font Name", serif; }');
expect(tokens[5]).toEqual({value: 'font-family', scopes: ['source.css.less', 'meta.property-list.css', 'support.type.property-name.css']});
expect(tokens[9]).toEqual({value: 'Some Font Name', scopes: ['source.css.less', 'meta.property-list.css', 'meta.property-value.css', 'string.quoted.double.css']});
expect(tokens[13]).toEqual({value: 'serif', scopes: ['source.css.less', 'meta.property-list.css', 'meta.property-value.css', 'support.constant.font-name.css']});
});
it('parses an incomplete property list', () => {
const {tokens} = grammar.tokenizeLine('.foo { border: none}');
expect(tokens[5]).toEqual({value: 'border', scopes: ['source.css.less', 'meta.property-list.css', 'support.type.property-name.css']});
expect(tokens[8]).toEqual({value: 'none', scopes: ['source.css.less', 'meta.property-list.css', 'meta.property-value.css', 'support.constant.property-value.css']});
expect(tokens[9]).toEqual({value: '}', scopes: ['source.css.less', 'meta.property-list.css', 'punctuation.section.property-list.end.bracket.curly.css']});
});
it('parses multiple lines of an incomplete property-list', () => {
const lines = grammar.tokenizeLines(`\
very-custom { color: inherit }
another-one { display: none; }\
`
);
expect(lines[0][0]).toEqual({value: 'very-custom', scopes: ['source.css.less', 'entity.name.tag.custom.css']});
expect(lines[0][4]).toEqual({value: 'color', scopes: ['source.css.less', 'meta.property-list.css', 'support.type.property-name.css']});
expect(lines[0][7]).toEqual({value: 'inherit', scopes: ['source.css.less', 'meta.property-list.css', 'meta.property-value.css', 'support.constant.property-value.css']});
expect(lines[0][9]).toEqual({value: '}', scopes: ['source.css.less', 'meta.property-list.css', 'punctuation.section.property-list.end.bracket.curly.css']});
expect(lines[1][0]).toEqual({value: 'another-one', scopes: ['source.css.less', 'entity.name.tag.custom.css']});
expect(lines[1][10]).toEqual({value: '}', scopes: ['source.css.less', 'meta.property-list.css', 'punctuation.section.property-list.end.bracket.curly.css']});
});
it("parses variables", () => {
const {tokens} = grammar.tokenizeLine(".foo { border: @bar; }");
expect(tokens[0]).toEqual({value: ".", scopes: ['source.css.less', 'entity.other.attribute-name.class.css', 'punctuation.definition.entity.css']});
expect(tokens[1]).toEqual({value: "foo", scopes: ['source.css.less', 'entity.other.attribute-name.class.css']});
expect(tokens[2]).toEqual({value: " ", scopes: ['source.css.less']});
expect(tokens[3]).toEqual({value: "{", scopes: ['source.css.less', 'meta.property-list.css', 'punctuation.section.property-list.begin.bracket.curly.css']});
expect(tokens[4]).toEqual({value: " ", scopes: ['source.css.less', 'meta.property-list.css']});
expect(tokens[5]).toEqual({value: "border", scopes: ['source.css.less', 'meta.property-list.css', 'support.type.property-name.css']});
expect(tokens[6]).toEqual({value: ":", scopes: ['source.css.less', 'meta.property-list.css', 'punctuation.separator.key-value.css']});
expect(tokens[7]).toEqual({value: " ", scopes: ['source.css.less', 'meta.property-list.css']});
expect(tokens[8]).toEqual({value: "@", scopes: ['source.css.less', 'meta.property-list.css', 'meta.property-value.css', 'variable.other.less', 'punctuation.definition.variable.less']});
expect(tokens[9]).toEqual({value: "bar", scopes: ['source.css.less', 'meta.property-list.css', 'meta.property-value.css', 'variable.other.less']});
expect(tokens[10]).toEqual({value: ";", scopes: ['source.css.less', 'meta.property-list.css', 'punctuation.terminator.rule.css']});
expect(tokens[11]).toEqual({value: " ", scopes: ['source.css.less', 'meta.property-list.css']});
expect(tokens[12]).toEqual({value: "}", scopes: ['source.css.less', 'meta.property-list.css', 'punctuation.section.property-list.end.bracket.curly.css']});
});
it("parses css variables", () => {
const {tokens} = grammar.tokenizeLine(".foo { --spacing-unit: 6px; }");
expect(tokens[0]).toEqual({value: ".", scopes: ['source.css.less', 'entity.other.attribute-name.class.css', 'punctuation.definition.entity.css']});
expect(tokens[1]).toEqual({value: "foo", scopes: ['source.css.less', 'entity.other.attribute-name.class.css']});
expect(tokens[2]).toEqual({value: " ", scopes: ['source.css.less']});
expect(tokens[3]).toEqual({value: "{", scopes: ['source.css.less', 'meta.property-list.css', 'punctuation.section.property-list.begin.bracket.curly.css']});
expect(tokens[4]).toEqual({value: " ", scopes: ['source.css.less', 'meta.property-list.css']});
expect(tokens[5]).toEqual({value: "--", scopes: ['source.css.less', 'meta.property-list.css', 'variable.other.less', 'punctuation.definition.variable.less']});
expect(tokens[6]).toEqual({value: "spacing-unit", scopes: ['source.css.less', 'meta.property-list.css', 'variable.other.less']});
expect(tokens[7]).toEqual({value: ":", scopes: ['source.css.less', 'meta.property-list.css', 'punctuation.separator.key-value.css']});
expect(tokens[8]).toEqual({value: " ", scopes: ['source.css.less', 'meta.property-list.css']});
expect(tokens[9]).toEqual({value: "6", scopes: ['source.css.less', 'meta.property-list.css', 'meta.property-value.css', 'constant.numeric.css']});
expect(tokens[10]).toEqual({value: "px", scopes: ['source.css.less', 'meta.property-list.css', 'meta.property-value.css', 'constant.numeric.css', 'keyword.other.unit.px.css']});
expect(tokens[11]).toEqual({value: ";", scopes: ['source.css.less', 'meta.property-list.css', 'punctuation.terminator.rule.css']});
expect(tokens[12]).toEqual({value: " ", scopes: ['source.css.less', 'meta.property-list.css']});
expect(tokens[13]).toEqual({value: "}", scopes: ['source.css.less', 'meta.property-list.css', 'punctuation.section.property-list.end.bracket.curly.css']});
});
it('parses variable interpolation in selectors', () => {
const {tokens} = grammar.tokenizeLine('.@{selector} { color: #0ee; }');
expect(tokens[0]).toEqual({value: '.', scopes: ['source.css.less', 'entity.other.attribute-name.class.css', 'punctuation.definition.entity.css']});
expect(tokens[1]).toEqual({value: '@{selector}', scopes: ['source.css.less', 'entity.other.attribute-name.class.css', 'variable.other.interpolation.less']});
expect(tokens[2]).toEqual({value: " ", scopes: ['source.css.less']});
expect(tokens[3]).toEqual({value: "{", scopes: ['source.css.less', 'meta.property-list.css', 'punctuation.section.property-list.begin.bracket.curly.css']});
expect(tokens[4]).toEqual({value: " ", scopes: ['source.css.less', 'meta.property-list.css']});
});
it('parses variable interpolation in properties', () => {
const {tokens} = grammar.tokenizeLine('.foo { @{property}: #0ee; }');
expect(tokens[0]).toEqual({value: ".", scopes: ['source.css.less', 'entity.other.attribute-name.class.css', 'punctuation.definition.entity.css']});
expect(tokens[1]).toEqual({value: "foo", scopes: ['source.css.less', 'entity.other.attribute-name.class.css']});
expect(tokens[2]).toEqual({value: " ", scopes: ['source.css.less']});
expect(tokens[3]).toEqual({value: "{", scopes: ['source.css.less', 'meta.property-list.css', 'punctuation.section.property-list.begin.bracket.curly.css']});
expect(tokens[4]).toEqual({value: " ", scopes: ['source.css.less', 'meta.property-list.css']});
expect(tokens[5]).toEqual({value: '@{property}', scopes: ['source.css.less', 'meta.property-list.css', 'variable.other.interpolation.less']});
expect(tokens[6]).toEqual({value: ":", scopes: ['source.css.less', 'meta.property-list.css', 'punctuation.separator.key-value.css']});
expect(tokens[7]).toEqual({value: " ", scopes: ['source.css.less', 'meta.property-list.css']});
});
it('parses variable interpolation in urls', () => {
const {tokens} = grammar.tokenizeLine('.foo { background: #F0F0F0 url("@{var}/img.png"); }";');
expect(tokens[8]).toEqual({value: "#F0F0F0", scopes: ['source.css.less', 'meta.property-list.css', 'meta.property-value.css', 'constant.other.rgb-value.css']});
expect(tokens[10]).toEqual({value: "url", scopes: ['source.css.less', 'meta.property-list.css', 'meta.property-value.css', 'support.function.any-method.builtin.url.css']});
expect(tokens[11]).toEqual({value: "(", scopes: ['source.css.less', 'meta.property-list.css', 'meta.property-value.css', 'support.function.any-method.builtin.url.css', 'meta.brace.round.css']});
expect(tokens[13]).toEqual({value: "@{var}", scopes: ['source.css.less', 'meta.property-list.css', 'meta.property-value.css', 'support.function.any-method.builtin.url.css', 'string.quoted.double.css', 'variable.other.interpolation.less']});
expect(tokens[14]).toEqual({value: "/img.png", scopes: ['source.css.less', 'meta.property-list.css', 'meta.property-value.css', 'support.function.any-method.builtin.url.css', 'string.quoted.double.css']});
});
it('parses variable interpolation in imports', () => {
const {tokens} = grammar.tokenizeLine('@import "@{var}/tidal-wave.less";');
expect(tokens[0]).toEqual({value: "@", scopes: ['source.css.less', 'meta.at-rule.import.css', 'keyword.control.at-rule.import.less', 'punctuation.definition.keyword.less']});
expect(tokens[1]).toEqual({value: "import", scopes: ['source.css.less', 'meta.at-rule.import.css', 'keyword.control.at-rule.import.less']});
expect(tokens[2]).toEqual({value: " ", scopes: ['source.css.less', 'meta.at-rule.import.css']});
expect(tokens[3]).toEqual({value: "\"", scopes: ['source.css.less', 'meta.at-rule.import.css', 'string.quoted.double.css', 'punctuation.definition.string.begin.css']});
expect(tokens[4]).toEqual({value: "@{var}", scopes: ['source.css.less', 'meta.at-rule.import.css', 'string.quoted.double.css', 'variable.other.interpolation.less']});
});
it('parses options in import statements', () => {
const {tokens} = grammar.tokenizeLine('@import (optional, reference) "theme";');
expect(tokens[0]).toEqual({value: "@", scopes: ['source.css.less', 'meta.at-rule.import.css', 'keyword.control.at-rule.import.less', 'punctuation.definition.keyword.less']});
expect(tokens[1]).toEqual({value: "import", scopes: ['source.css.less', 'meta.at-rule.import.css', 'keyword.control.at-rule.import.less']});
expect(tokens[4]).toEqual({value: "optional", scopes: ['source.css.less', 'meta.at-rule.import.css', 'keyword.control.import.option.less']});
expect(tokens[5]).toEqual({value: ",", scopes: ['source.css.less', 'meta.at-rule.import.css', 'punctuation.separator.list.comma.css']});
expect(tokens[7]).toEqual({value: "reference", scopes: ['source.css.less', 'meta.at-rule.import.css', 'keyword.control.import.option.less']});
expect(tokens[11]).toEqual({value: "theme", scopes: ['source.css.less', 'meta.at-rule.import.css', 'string.quoted.double.css']});
});
it('parses built-in functions in property values', () => {
const {tokens} = grammar.tokenizeLine('.foo { border: 1px solid rgba(0,0,0); }');
expect(tokens[0]).toEqual({value: ".", scopes: ['source.css.less', 'entity.other.attribute-name.class.css', 'punctuation.definition.entity.css']});
expect(tokens[1]).toEqual({value: "foo", scopes: ['source.css.less', 'entity.other.attribute-name.class.css']});
expect(tokens[2]).toEqual({value: " ", scopes: ['source.css.less']});
expect(tokens[3]).toEqual({value: "{", scopes: ['source.css.less', 'meta.property-list.css', 'punctuation.section.property-list.begin.bracket.curly.css']});
expect(tokens[4]).toEqual({value: " ", scopes: ['source.css.less', 'meta.property-list.css']});
expect(tokens[5]).toEqual({value: "border", scopes: ['source.css.less', 'meta.property-list.css', 'support.type.property-name.css']});
expect(tokens[6]).toEqual({value: ":", scopes: ['source.css.less', 'meta.property-list.css', 'punctuation.separator.key-value.css']});
expect(tokens[8]).toEqual({value: "1", scopes: ['source.css.less', 'meta.property-list.css', 'meta.property-value.css', 'constant.numeric.css']});
expect(tokens[9]).toEqual({value: "px", scopes: ['source.css.less', 'meta.property-list.css', 'meta.property-value.css', 'constant.numeric.css', 'keyword.other.unit.px.css']});
expect(tokens[11]).toEqual({value: "solid", scopes: ['source.css.less', 'meta.property-list.css', 'meta.property-value.css', 'support.constant.property-value.css']});
expect(tokens[13]).toEqual({value: "rgba", scopes: ['source.css.less', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.color.css', 'support.function.misc.css']});
expect(tokens[14]).toEqual({value: "(", scopes: ['source.css.less', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.color.css', 'punctuation.section.function.begin.bracket.round.css']});
expect(tokens[15]).toEqual({value: "0", scopes: ['source.css.less', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.color.css', 'constant.numeric.css']});
expect(tokens[16]).toEqual({value: ",", scopes: ['source.css.less', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.color.css', 'punctuation.separator.list.comma.css']});
expect(tokens[17]).toEqual({value: "0", scopes: ['source.css.less', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.color.css', 'constant.numeric.css']});
expect(tokens[18]).toEqual({value: ",", scopes: ['source.css.less', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.color.css', 'punctuation.separator.list.comma.css']});
expect(tokens[21]).toEqual({value: ";", scopes: ['source.css.less', 'meta.property-list.css', 'punctuation.terminator.rule.css']});
});
it('parses linear-gradient', () => {
const {tokens} = grammar.tokenizeLine('.foo { background: linear-gradient(white, black); }');
expect(tokens[5]).toEqual({value: "background", scopes: ['source.css.less', 'meta.property-list.css', 'support.type.property-name.css']});
expect(tokens[6]).toEqual({value: ":", scopes: ['source.css.less', 'meta.property-list.css', 'punctuation.separator.key-value.css']});
expect(tokens[7]).toEqual({value: " ", scopes: ['source.css.less', 'meta.property-list.css']});
expect(tokens[8]).toEqual({value: "linear-gradient", scopes: ['source.css.less', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.gradient.css', 'support.function.gradient.css']});
expect(tokens[9]).toEqual({value: "(", scopes: ['source.css.less', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.gradient.css', 'punctuation.section.function.begin.bracket.round.css']});
});
it('parses transform functions', () => {
const {tokens} = grammar.tokenizeLine('.foo { transform: scaleY(1); }');
expect(tokens[5]).toEqual({value: "transform", scopes: ['source.css.less', 'meta.property-list.css', 'support.type.property-name.css']});
expect(tokens[6]).toEqual({value: ":", scopes: ['source.css.less', 'meta.property-list.css', 'punctuation.separator.key-value.css']});
expect(tokens[7]).toEqual({value: " ", scopes: ['source.css.less', 'meta.property-list.css']});
expect(tokens[8]).toEqual({value: "scaleY", scopes: ['source.css.less', 'meta.property-list.css', 'meta.property-value.css', 'support.function.transform.css']});
expect(tokens[9]).toEqual({value: "(", scopes: ['source.css.less', 'meta.property-list.css', 'meta.property-value.css', 'punctuation.section.function.begin.bracket.round.css']});
});
it('parses blend modes', () => {
const {tokens} = grammar.tokenizeLine('.foo { background-blend-mode: color-dodge; }');
expect(tokens[5]).toEqual({value: "background-blend-mode", scopes: ['source.css.less', 'meta.property-list.css', 'support.type.property-name.css']});
expect(tokens[6]).toEqual({value: ":", scopes: ['source.css.less', 'meta.property-list.css', 'punctuation.separator.key-value.css']});
expect(tokens[7]).toEqual({value: " ", scopes: ['source.css.less', 'meta.property-list.css']});
expect(tokens[8]).toEqual({value: "color-dodge", scopes: ['source.css.less', 'meta.property-list.css', 'meta.property-value.css', 'support.constant.property-value.css']});
expect(tokens[9]).toEqual({value: ";", scopes: ['source.css.less', 'meta.property-list.css', 'punctuation.terminator.rule.css']});
});
it('parses non-quoted urls', () => {
const {tokens} = grammar.tokenizeLine('.foo { background: url(http://%20/2.png) }');
expect(tokens[8]).toEqual({value: "url", scopes: ['source.css.less', 'meta.property-list.css', 'meta.property-value.css', 'support.function.any-method.builtin.url.css']});
expect(tokens[9]).toEqual({value: "(", scopes: ['source.css.less', 'meta.property-list.css', 'meta.property-value.css', 'support.function.any-method.builtin.url.css', 'meta.brace.round.css']});
expect(tokens[10]).toEqual({value: "http://%20/2.png", scopes: ['source.css.less', 'meta.property-list.css', 'meta.property-value.css', 'support.function.any-method.builtin.url.css', 'string.url.css']});
});
it('parses non-quoted relative urls', () => {
const {tokens} = grammar.tokenizeLine('.foo { background: url(../path/to/image.png) }');
expect(tokens[8]).toEqual({value: "url", scopes: ['source.css.less', 'meta.property-list.css', 'meta.property-value.css', 'support.function.any-method.builtin.url.css']});
expect(tokens[9]).toEqual({value: "(", scopes: ['source.css.less', 'meta.property-list.css', 'meta.property-value.css', 'support.function.any-method.builtin.url.css', 'meta.brace.round.css']});
expect(tokens[10]).toEqual({value: "../path/to/image.png", scopes: ['source.css.less', 'meta.property-list.css', 'meta.property-value.css', 'support.function.any-method.builtin.url.css', 'string.url.css']});
});
it('parses non-quoted urls followed by a format', () => {
const {tokens} = grammar.tokenizeLine('@font-face { src: url(http://example.com/font.woff) format("woff"); }');
expect(tokens[8]).toEqual({value: 'url', scopes: ['source.css.less', 'meta.property-list.css', 'meta.property-value.css', 'support.function.any-method.builtin.url.css']});
expect(tokens[9]).toEqual({value: "(", scopes: ['source.css.less', 'meta.property-list.css', 'meta.property-value.css', 'support.function.any-method.builtin.url.css', 'meta.brace.round.css']});
expect(tokens[10]).toEqual({value: "http://example.com/font.woff", scopes: ['source.css.less', 'meta.property-list.css', 'meta.property-value.css', 'support.function.any-method.builtin.url.css', 'string.url.css']});
expect(tokens[11]).toEqual({value: ")", scopes: ['source.css.less', 'meta.property-list.css', 'meta.property-value.css', 'support.function.any-method.builtin.url.css', 'meta.brace.round.css']});
expect(tokens[13]).toEqual({value: "format", scopes: ['source.css.less', 'meta.property-list.css', 'meta.property-value.css', 'support.function.any-method.builtin.less']});
});
it('parses the "true" value', () => {
const {tokens} = grammar.tokenizeLine('@var: true;');
expect(tokens[4]).toEqual({value: "true", scopes: ['source.css.less', 'constant.language.boolean.less']});
});
it('parses mixin guards', () => {
const {tokens} = grammar.tokenizeLine('.mixin() when (isnumber(@b)) and (default()), (ispixel(@a)) and not (@a < 0) { }');
expect(tokens[4]).toEqual({value: "when", scopes: ['source.css.less', 'keyword.control.logical.operator.less']});
expect(tokens[7]).toEqual({value: "isnumber", scopes: ['source.css.less', 'support.function.type-checking.less']});
expect(tokens[14]).toEqual({value: "and", scopes: ['source.css.less', 'keyword.control.logical.operator.less']});
expect(tokens[17]).toEqual({value: "default", scopes: ['source.css.less', 'support.function.default.less']});
expect(tokens[21]).toEqual({value: ",", scopes: ['source.css.less', 'punctuation.separator.list.comma.css']});
expect(tokens[24]).toEqual({value: "ispixel", scopes: ['source.css.less', 'support.function.unit-checking.less']});
expect(tokens[31]).toEqual({value: "and", scopes: ['source.css.less', 'keyword.control.logical.operator.less']});
expect(tokens[33]).toEqual({value: "not", scopes: ['source.css.less', 'keyword.control.logical.operator.less']});
expect(tokens[39]).toEqual({value: "<", scopes: ['source.css.less', 'keyword.operator.less']});
});
describe('strings', () => {
it('tokenizes single-quote strings', () => {
const {tokens} = grammar.tokenizeLine(".a { content: 'hi' }");
expect(tokens[8]).toEqual({value: "'", scopes: ['source.css.less', 'meta.property-list.css', 'meta.property-value.css', 'string.quoted.single.css', 'punctuation.definition.string.begin.css']});
expect(tokens[9]).toEqual({value: 'hi', scopes: ['source.css.less', 'meta.property-list.css', 'meta.property-value.css', 'string.quoted.single.css']});
expect(tokens[10]).toEqual({value: "'", scopes: ['source.css.less', 'meta.property-list.css', 'meta.property-value.css', 'string.quoted.single.css', 'punctuation.definition.string.end.css']});
});
it('tokenizes double-quote strings', () => {
const {tokens} = grammar.tokenizeLine('.a { content: "hi" }');
expect(tokens[8]).toEqual({value: '"', scopes: ['source.css.less', 'meta.property-list.css', 'meta.property-value.css', 'string.quoted.double.css', 'punctuation.definition.string.begin.css']});
expect(tokens[9]).toEqual({value: 'hi', scopes: ['source.css.less', 'meta.property-list.css', 'meta.property-value.css', 'string.quoted.double.css']});
expect(tokens[10]).toEqual({value: '"', scopes: ['source.css.less', 'meta.property-list.css', 'meta.property-value.css', 'string.quoted.double.css', 'punctuation.definition.string.end.css']});
});
it('tokenizes escape characters', () => {
let {tokens} = grammar.tokenizeLine(".a { content: '\\abcdef' }");
expect(tokens[9]).toEqual({value: '\\abcdef', scopes: ['source.css.less', 'meta.property-list.css', 'meta.property-value.css', 'string.quoted.single.css', 'constant.character.escape.css']});
({tokens} = grammar.tokenizeLine('.a { content: "\\abcdef" }'));
expect(tokens[9]).toEqual({value: '\\abcdef', scopes: ['source.css.less', 'meta.property-list.css', 'meta.property-value.css', 'string.quoted.double.css', 'constant.character.escape.css']});
});
});
});

View File

@ -1,251 +0,0 @@
# NOTE: This spec file doesn't use Coffeescript extended quotes (""")
# because Make does not support spaces for indentation (which this spec file is using)
# So we have to settle with \n\t single-line notation
describe "Makefile grammar", ->
grammar = null
beforeEach ->
waitsForPromise ->
atom.packages.activatePackage("language-make")
runs ->
grammar = atom.grammars.grammarForScopeName("source.makefile")
it "parses the grammar", ->
expect(grammar).toBeTruthy()
expect(grammar.scopeName).toBe "source.makefile"
it "selects the Makefile grammar for files that start with a hashbang make -f command", ->
expect(atom.grammars.selectGrammar('', '#!/usr/bin/make -f')).toBe grammar
it "parses comments correctly", ->
lines = grammar.tokenizeLines '#foo\n\t#bar\n#foo\\\nbar'
expect(lines[0][0]).toEqual value: '#', scopes: ['source.makefile', 'comment.line.number-sign.makefile', 'punctuation.definition.comment.makefile']
expect(lines[0][1]).toEqual value: 'foo', scopes: ['source.makefile', 'comment.line.number-sign.makefile']
expect(lines[1][0]).toEqual value: '\t', scopes: ['source.makefile', 'punctuation.whitespace.comment.leading.makefile']
expect(lines[1][1]).toEqual value: '#', scopes: ['source.makefile', 'comment.line.number-sign.makefile', 'punctuation.definition.comment.makefile']
expect(lines[1][2]).toEqual value: 'bar', scopes: ['source.makefile', 'comment.line.number-sign.makefile']
expect(lines[2][0]).toEqual value: '#', scopes: ['source.makefile', 'comment.line.number-sign.makefile', 'punctuation.definition.comment.makefile']
expect(lines[2][1]).toEqual value: 'foo', scopes: ['source.makefile', 'comment.line.number-sign.makefile']
expect(lines[2][2]).toEqual value: '\\', scopes: ['source.makefile', 'comment.line.number-sign.makefile', 'constant.character.escape.continuation.makefile']
expect(lines[3][0]).toEqual value: 'bar', scopes: ['source.makefile', 'comment.line.number-sign.makefile']
lines = grammar.tokenizeLines '# comment\\\nshould still be a comment\nnot a comment'
expect(lines[0][0]).toEqual value: '#', scopes: ['source.makefile', 'comment.line.number-sign.makefile', 'punctuation.definition.comment.makefile']
expect(lines[0][1]).toEqual value: ' comment', scopes: ['source.makefile', 'comment.line.number-sign.makefile']
expect(lines[0][2]).toEqual value: '\\', scopes: ['source.makefile', 'comment.line.number-sign.makefile', 'constant.character.escape.continuation.makefile']
expect(lines[1][0]).toEqual value: 'should still be a comment', scopes: ['source.makefile', 'comment.line.number-sign.makefile']
expect(lines[2][0]).toEqual value: 'not a comment', scopes: ['source.makefile']
it "parses recipes", ->
waitsForPromise ->
atom.packages.activatePackage("language-shellscript")
runs ->
lines = grammar.tokenizeLines 'all: foo.bar\n\ttest\n\nclean: foo\n\trm -fr foo.bar'
expect(lines[0][0]).toEqual value: 'all', scopes: ['source.makefile', 'meta.scope.target.makefile', 'entity.name.function.target.makefile']
expect(lines[3][0]).toEqual value: 'clean', scopes: ['source.makefile', 'meta.scope.target.makefile', 'entity.name.function.target.makefile']
# TODO: Enable these specs after language-shellscript@0.25.0 is on stable
# lines = grammar.tokenizeLines 'help: # Show this help\n\t@command grep --extended-regexp \'^[a-zA-Z_-]+:.*?# .*$$\' $(MAKEFILE_LIST) | sort | awk \'BEGIN {FS = ":.*?# "}; {printf "\\033[1;39m%-15s\\033[0;39m %s\\n", $$1, $$2}\''
# expect(lines[0][0]).toEqual value: 'help', scopes: ['source.makefile', 'meta.scope.target.makefile', 'entity.name.function.target.makefile']
# expect(lines[0][1]).toEqual value: ':', scopes: ['source.makefile', 'meta.scope.target.makefile', 'punctuation.separator.key-value.makefile']
# expect(lines[0][3]).toEqual value: '#', scopes: ['source.makefile', 'meta.scope.target.makefile', 'meta.scope.prerequisites.makefile', 'comment.line.number-sign.makefile', 'punctuation.definition.comment.makefile']
# expect(lines[1][0]).toEqual value: '\t', scopes: ['source.makefile', 'meta.scope.target.makefile', 'meta.scope.recipe.makefile']
# expect(lines[1][1]).toEqual value: '@command grep --extended-regexp ', scopes: ['source.makefile', 'meta.scope.target.makefile', 'meta.scope.recipe.makefile']
# expect(lines[1][2]).toEqual value: '\'', scopes: ['source.makefile', 'meta.scope.target.makefile', 'meta.scope.recipe.makefile', 'string.quoted.single.shell', 'punctuation.definition.string.begin.shell']
testFunctionCall = (functionName) ->
{tokens} = grammar.tokenizeLine 'foo: echo $(' + functionName + ' /foo/bar.txt)'
expect(tokens[4]).toEqual value: functionName, scopes: ['source.makefile', 'meta.scope.target.makefile', 'meta.scope.prerequisites.makefile', 'string.interpolated.makefile', 'meta.scope.function-call.makefile', 'support.function.' + functionName + '.makefile']
it "parses `subst` correctly", ->
testFunctionCall('subst')
it "parses `patsubst` correctly", ->
testFunctionCall('patsubst')
it "parses `strip` correctly", ->
testFunctionCall('strip')
it "parses `findstring` correctly", ->
testFunctionCall('findstring')
it "parses `filter` correctly", ->
testFunctionCall('filter')
it "parses `sort` correctly", ->
testFunctionCall('sort')
it "parses `word` correctly", ->
testFunctionCall('word')
it "parses `wordlist` correctly", ->
testFunctionCall('wordlist')
it "parses `firstword` correctly", ->
testFunctionCall('firstword')
it "parses `lastword` correctly", ->
testFunctionCall('lastword')
it "parses `dir` correctly", ->
testFunctionCall('dir')
it "parses `notdir` correctly", ->
testFunctionCall('notdir')
it "parses `suffix` correctly", ->
testFunctionCall('suffix')
it "parses `basename` correctly", ->
testFunctionCall('basename')
it "parses `addsuffix` correctly", ->
testFunctionCall('addsuffix')
it "parses `addprefix` correctly", ->
testFunctionCall('addprefix')
it "parses `join` correctly", ->
testFunctionCall('join')
it "parses `wildcard` correctly", ->
testFunctionCall('wildcard')
it "parses `realpath` correctly", ->
testFunctionCall('realpath')
it "parses `abspath` correctly", ->
testFunctionCall('abspath')
it "parses `if` correctly", ->
testFunctionCall('if')
it "parses `or` correctly", ->
testFunctionCall('or')
it "parses `and` correctly", ->
testFunctionCall('and')
it "parses `foreach` correctly", ->
testFunctionCall('foreach')
it "parses `file` correctly", ->
testFunctionCall('file')
it "parses `call` correctly", ->
testFunctionCall('call')
it "parses `value` correctly", ->
testFunctionCall('value')
it "parses `eval` correctly", ->
testFunctionCall('eval')
it "parses `error` correctly", ->
testFunctionCall('error')
it "parses `warning` correctly", ->
testFunctionCall('warning')
it "parses `info` correctly", ->
testFunctionCall('info')
it "parses `shell` correctly", ->
testFunctionCall('shell')
it "parses `guile` correctly", ->
testFunctionCall('guile')
it "parses targets with line breaks in body", ->
lines = grammar.tokenizeLines 'foo:\n\techo $(basename /foo/bar.txt)'
expect(lines[1][3]).toEqual value: 'basename', scopes: ['source.makefile', 'meta.scope.target.makefile', 'meta.scope.recipe.makefile', 'string.interpolated.makefile', 'meta.scope.function-call.makefile', 'support.function.basename.makefile']
it "continues matching prerequisites after reaching a line continuation character", ->
waitsForPromise ->
atom.packages.activatePackage("language-shellscript")
runs ->
lines = grammar.tokenizeLines 'hello: a b c \\\n d e f\n\techo "test"'
expect(lines[0][3]).toEqual value: '\\', scopes: ['source.makefile', 'meta.scope.target.makefile', 'meta.scope.prerequisites.makefile', 'constant.character.escape.continuation.makefile']
expect(lines[1][0]).toEqual value: ' d e f', scopes: ['source.makefile', 'meta.scope.target.makefile', 'meta.scope.prerequisites.makefile']
expect(lines[2][1]).toEqual value: 'echo', scopes: ['source.makefile', 'meta.scope.target.makefile', 'meta.scope.recipe.makefile', 'support.function.builtin.shell']
it "parses nested interpolated strings and function calls correctly", ->
waitsForPromise ->
atom.packages.activatePackage("language-shellscript")
runs ->
lines = grammar.tokenizeLines 'default:\n\t$(eval MESSAGE=$(shell node -pe "decodeURIComponent(process.argv.pop())" "${MSG}"))'
expect(lines[1][1]).toEqual value: '$(', scopes: ['source.makefile', 'meta.scope.target.makefile', 'meta.scope.recipe.makefile', 'string.interpolated.makefile', 'punctuation.definition.variable.makefile']
expect(lines[1][2]).toEqual value: 'eval', scopes: ['source.makefile', 'meta.scope.target.makefile', 'meta.scope.recipe.makefile', 'string.interpolated.makefile', 'meta.scope.function-call.makefile', 'support.function.eval.makefile']
expect(lines[1][5]).toEqual value: '$(', scopes: ['source.makefile', 'meta.scope.target.makefile', 'meta.scope.recipe.makefile', 'string.interpolated.makefile', 'meta.scope.function-call.makefile', 'string.interpolated.makefile', 'punctuation.definition.variable.makefile']
expect(lines[1][6]).toEqual value: 'shell', scopes: ['source.makefile', 'meta.scope.target.makefile', 'meta.scope.recipe.makefile', 'string.interpolated.makefile', 'meta.scope.function-call.makefile', 'string.interpolated.makefile', 'meta.scope.function-call.makefile', 'support.function.shell.makefile']
expect(lines[1][9]).toEqual value: '"', scopes: ['source.makefile', 'meta.scope.target.makefile', 'meta.scope.recipe.makefile', 'string.interpolated.makefile', 'meta.scope.function-call.makefile', 'string.interpolated.makefile', 'meta.scope.function-call.makefile', 'string.quoted.double.shell', 'punctuation.definition.string.begin.shell']
expect(lines[1][10]).toEqual value: 'decodeURIComponent(process.argv.pop())', scopes: ['source.makefile', 'meta.scope.target.makefile', 'meta.scope.recipe.makefile', 'string.interpolated.makefile', 'meta.scope.function-call.makefile', 'string.interpolated.makefile', 'meta.scope.function-call.makefile', 'string.quoted.double.shell']
expect(lines[1][11]).toEqual value: '"', scopes: ['source.makefile', 'meta.scope.target.makefile', 'meta.scope.recipe.makefile', 'string.interpolated.makefile', 'meta.scope.function-call.makefile', 'string.interpolated.makefile', 'meta.scope.function-call.makefile', 'string.quoted.double.shell', 'punctuation.definition.string.end.shell']
expect(lines[1][14]).toEqual value: '${', scopes: ['source.makefile', 'meta.scope.target.makefile', 'meta.scope.recipe.makefile', 'string.interpolated.makefile', 'meta.scope.function-call.makefile', 'string.interpolated.makefile', 'meta.scope.function-call.makefile', 'string.quoted.double.shell', 'variable.other.bracket.shell', 'punctuation.definition.variable.shell']
expect(lines[1][16]).toEqual value: '}', scopes: ['source.makefile', 'meta.scope.target.makefile', 'meta.scope.recipe.makefile', 'string.interpolated.makefile', 'meta.scope.function-call.makefile', 'string.interpolated.makefile', 'meta.scope.function-call.makefile', 'string.quoted.double.shell', 'variable.other.bracket.shell', 'punctuation.definition.variable.shell']
expect(lines[1][18]).toEqual value: ')', scopes: ['source.makefile', 'meta.scope.target.makefile', 'meta.scope.recipe.makefile', 'string.interpolated.makefile', 'meta.scope.function-call.makefile', 'string.interpolated.makefile', 'punctuation.definition.variable.makefile']
expect(lines[1][19]).toEqual value: ')', scopes: ['source.makefile', 'meta.scope.target.makefile', 'meta.scope.recipe.makefile', 'string.interpolated.makefile', 'punctuation.definition.variable.makefile']
it "recognizes global functions", ->
{tokens} = grammar.tokenizeLine '$(foreach util,$(EXES),$(eval $(call BUILD_EXE,$(util))))'
expect(tokens[0]).toEqual value: '$(', scopes: ['source.makefile', 'string.interpolated.makefile', 'punctuation.definition.variable.makefile']
it "parses `origin` correctly", ->
waitsForPromise ->
atom.packages.activatePackage("language-shellscript")
runs ->
lines = grammar.tokenizeLines 'default:\n\t$(origin 1)'
expect(lines[1][1]).toEqual value: '$(', scopes: ['source.makefile', 'meta.scope.target.makefile', 'meta.scope.recipe.makefile', 'string.interpolated.makefile', 'punctuation.definition.variable.makefile']
expect(lines[1][2]).toEqual value: 'origin', scopes: ['source.makefile', 'meta.scope.target.makefile', 'meta.scope.recipe.makefile', 'string.interpolated.makefile', 'meta.scope.function-call.makefile', 'support.function.origin.makefile']
expect(lines[1][4]).toEqual value: '1', scopes: ['source.makefile', 'meta.scope.target.makefile', 'meta.scope.recipe.makefile', 'string.interpolated.makefile', 'meta.scope.function-call.makefile', 'variable.other.makefile']
expect(lines[1][5]).toEqual value: ')', scopes: ['source.makefile', 'meta.scope.target.makefile', 'meta.scope.recipe.makefile', 'string.interpolated.makefile', 'punctuation.definition.variable.makefile']
it "parses `flavor` correctly", ->
waitsForPromise ->
atom.packages.activatePackage("language-shellscript")
runs ->
lines = grammar.tokenizeLines 'default:\n\t$(flavor 1)'
expect(lines[1][1]).toEqual value: '$(', scopes: ['source.makefile', 'meta.scope.target.makefile', 'meta.scope.recipe.makefile', 'string.interpolated.makefile', 'punctuation.definition.variable.makefile']
expect(lines[1][2]).toEqual value: 'flavor', scopes: ['source.makefile', 'meta.scope.target.makefile', 'meta.scope.recipe.makefile', 'string.interpolated.makefile', 'meta.scope.function-call.makefile', 'support.function.flavor.makefile']
expect(lines[1][4]).toEqual value: '1', scopes: ['source.makefile', 'meta.scope.target.makefile', 'meta.scope.recipe.makefile', 'string.interpolated.makefile', 'meta.scope.function-call.makefile', 'variable.other.makefile']
expect(lines[1][5]).toEqual value: ')', scopes: ['source.makefile', 'meta.scope.target.makefile', 'meta.scope.recipe.makefile', 'string.interpolated.makefile', 'punctuation.definition.variable.makefile']
it "tokenizes variable assignments", ->
operators = ['=', '?=', ':=', '+=']
for operator in operators
{tokens} = grammar.tokenizeLine "SOMEVAR #{operator} whatever"
expect(tokens[0]).toEqual value: 'SOMEVAR', scopes: ['source.makefile', 'variable.other.makefile']
expect(tokens[1]).toEqual value: ' ', scopes: ['source.makefile']
expect(tokens[2]).toEqual value: operator, scopes: ['source.makefile', 'keyword.operator.assignment.makefile']
expect(tokens[3]).toEqual value: ' whatever', scopes: ['source.makefile']
{tokens} = grammar.tokenizeLine '`$om3_V@R! := whatever'
expect(tokens[0]).toEqual value: '`$om3_V@R!', scopes: ['source.makefile', 'variable.other.makefile']
expect(tokens[1]).toEqual value: ' ', scopes: ['source.makefile']
expect(tokens[2]).toEqual value: ':=', scopes: ['source.makefile', 'keyword.operator.assignment.makefile']
expect(tokens[3]).toEqual value: ' whatever', scopes: ['source.makefile']
lines = grammar.tokenizeLines 'SOMEVAR = OTHER\\\nVAR'
expect(lines[0][0]).toEqual value: 'SOMEVAR', scopes: ['source.makefile', 'variable.other.makefile']
expect(lines[0][3]).toEqual value: ' OTHER', scopes: ['source.makefile']
expect(lines[0][4]).toEqual value: '\\', scopes: ['source.makefile', 'constant.character.escape.continuation.makefile']
lines = grammar.tokenizeLines 'SOMEVAR := foo # bar explanation\nOTHERVAR := bar'
expect(lines[0][0]).toEqual value: 'SOMEVAR', scopes: ['source.makefile', 'variable.other.makefile']
expect(lines[0][4]).toEqual value: '#', scopes: ['source.makefile', 'comment.line.number-sign.makefile', 'punctuation.definition.comment.makefile']
expect(lines[1][0]).toEqual value: 'OTHERVAR', scopes: ['source.makefile', 'variable.other.makefile']

View File

@ -0,0 +1,226 @@
// NOTE: This spec file doesn't use Coffeescript extended quotes (""")
// because Make does not support spaces for indentation (which this spec file is using)
// So we have to settle with \n\t single-line notation
describe("Makefile grammar", () => {
let grammar = null;
beforeEach(() => {
waitsForPromise(() => atom.packages.activatePackage("language-make"));
runs(() => grammar = atom.grammars.grammarForScopeName("source.makefile"));
});
it("parses the grammar", () => {
expect(grammar).toBeTruthy();
expect(grammar.scopeName).toBe("source.makefile");
});
it("selects the Makefile grammar for files that start with a hashbang make -f command", () => expect(atom.grammars.selectGrammar('', '#!/usr/bin/make -f')).toBe(grammar));
it("parses comments correctly", () => {
let lines = grammar.tokenizeLines('#foo\n\t#bar\n#foo\\\nbar');
expect(lines[0][0]).toEqual({value: '#', scopes: ['source.makefile', 'comment.line.number-sign.makefile', 'punctuation.definition.comment.makefile']});
expect(lines[0][1]).toEqual({value: 'foo', scopes: ['source.makefile', 'comment.line.number-sign.makefile']});
expect(lines[1][0]).toEqual({value: '\t', scopes: ['source.makefile', 'punctuation.whitespace.comment.leading.makefile']});
expect(lines[1][1]).toEqual({value: '#', scopes: ['source.makefile', 'comment.line.number-sign.makefile', 'punctuation.definition.comment.makefile']});
expect(lines[1][2]).toEqual({value: 'bar', scopes: ['source.makefile', 'comment.line.number-sign.makefile']});
expect(lines[2][0]).toEqual({value: '#', scopes: ['source.makefile', 'comment.line.number-sign.makefile', 'punctuation.definition.comment.makefile']});
expect(lines[2][1]).toEqual({value: 'foo', scopes: ['source.makefile', 'comment.line.number-sign.makefile']});
expect(lines[2][2]).toEqual({value: '\\', scopes: ['source.makefile', 'comment.line.number-sign.makefile', 'constant.character.escape.continuation.makefile']});
expect(lines[3][0]).toEqual({value: 'bar', scopes: ['source.makefile', 'comment.line.number-sign.makefile']});
lines = grammar.tokenizeLines('# comment\\\nshould still be a comment\nnot a comment');
expect(lines[0][0]).toEqual({value: '#', scopes: ['source.makefile', 'comment.line.number-sign.makefile', 'punctuation.definition.comment.makefile']});
expect(lines[0][1]).toEqual({value: ' comment', scopes: ['source.makefile', 'comment.line.number-sign.makefile']});
expect(lines[0][2]).toEqual({value: '\\', scopes: ['source.makefile', 'comment.line.number-sign.makefile', 'constant.character.escape.continuation.makefile']});
expect(lines[1][0]).toEqual({value: 'should still be a comment', scopes: ['source.makefile', 'comment.line.number-sign.makefile']});
expect(lines[2][0]).toEqual({value: 'not a comment', scopes: ['source.makefile']});
});
it("parses recipes", () => {
waitsForPromise(() => atom.packages.activatePackage("language-shellscript"));
runs(() => {
const lines = grammar.tokenizeLines('all: foo.bar\n\ttest\n\nclean: foo\n\trm -fr foo.bar');
expect(lines[0][0]).toEqual({value: 'all', scopes: ['source.makefile', 'meta.scope.target.makefile', 'entity.name.function.target.makefile']});
expect(lines[3][0]).toEqual({value: 'clean', scopes: ['source.makefile', 'meta.scope.target.makefile', 'entity.name.function.target.makefile']});});
});
// TODO: Enable these specs after language-shellscript@0.25.0 is on stable
// lines = grammar.tokenizeLines 'help: # Show this help\n\t@command grep --extended-regexp \'^[a-zA-Z_-]+:.*?# .*$$\' $(MAKEFILE_LIST) | sort | awk \'BEGIN {FS = ":.*?# "}; {printf "\\033[1;39m%-15s\\033[0;39m %s\\n", $$1, $$2}\''
// expect(lines[0][0]).toEqual value: 'help', scopes: ['source.makefile', 'meta.scope.target.makefile', 'entity.name.function.target.makefile']
// expect(lines[0][1]).toEqual value: ':', scopes: ['source.makefile', 'meta.scope.target.makefile', 'punctuation.separator.key-value.makefile']
// expect(lines[0][3]).toEqual value: '#', scopes: ['source.makefile', 'meta.scope.target.makefile', 'meta.scope.prerequisites.makefile', 'comment.line.number-sign.makefile', 'punctuation.definition.comment.makefile']
// expect(lines[1][0]).toEqual value: '\t', scopes: ['source.makefile', 'meta.scope.target.makefile', 'meta.scope.recipe.makefile']
// expect(lines[1][1]).toEqual value: '@command grep --extended-regexp ', scopes: ['source.makefile', 'meta.scope.target.makefile', 'meta.scope.recipe.makefile']
// expect(lines[1][2]).toEqual value: '\'', scopes: ['source.makefile', 'meta.scope.target.makefile', 'meta.scope.recipe.makefile', 'string.quoted.single.shell', 'punctuation.definition.string.begin.shell']
const testFunctionCall = function(functionName) {
const {tokens} = grammar.tokenizeLine('foo: echo $(' + functionName + ' /foo/bar.txt)');
expect(tokens[4]).toEqual({value: functionName, scopes: ['source.makefile', 'meta.scope.target.makefile', 'meta.scope.prerequisites.makefile', 'string.interpolated.makefile', 'meta.scope.function-call.makefile', 'support.function.' + functionName + '.makefile']});
};
it("parses `subst` correctly", () => testFunctionCall('subst'));
it("parses `patsubst` correctly", () => testFunctionCall('patsubst'));
it("parses `strip` correctly", () => testFunctionCall('strip'));
it("parses `findstring` correctly", () => testFunctionCall('findstring'));
it("parses `filter` correctly", () => testFunctionCall('filter'));
it("parses `sort` correctly", () => testFunctionCall('sort'));
it("parses `word` correctly", () => testFunctionCall('word'));
it("parses `wordlist` correctly", () => testFunctionCall('wordlist'));
it("parses `firstword` correctly", () => testFunctionCall('firstword'));
it("parses `lastword` correctly", () => testFunctionCall('lastword'));
it("parses `dir` correctly", () => testFunctionCall('dir'));
it("parses `notdir` correctly", () => testFunctionCall('notdir'));
it("parses `suffix` correctly", () => testFunctionCall('suffix'));
it("parses `basename` correctly", () => testFunctionCall('basename'));
it("parses `addsuffix` correctly", () => testFunctionCall('addsuffix'));
it("parses `addprefix` correctly", () => testFunctionCall('addprefix'));
it("parses `join` correctly", () => testFunctionCall('join'));
it("parses `wildcard` correctly", () => testFunctionCall('wildcard'));
it("parses `realpath` correctly", () => testFunctionCall('realpath'));
it("parses `abspath` correctly", () => testFunctionCall('abspath'));
it("parses `if` correctly", () => testFunctionCall('if'));
it("parses `or` correctly", () => testFunctionCall('or'));
it("parses `and` correctly", () => testFunctionCall('and'));
it("parses `foreach` correctly", () => testFunctionCall('foreach'));
it("parses `file` correctly", () => testFunctionCall('file'));
it("parses `call` correctly", () => testFunctionCall('call'));
it("parses `value` correctly", () => testFunctionCall('value'));
it("parses `eval` correctly", () => testFunctionCall('eval'));
it("parses `error` correctly", () => testFunctionCall('error'));
it("parses `warning` correctly", () => testFunctionCall('warning'));
it("parses `info` correctly", () => testFunctionCall('info'));
it("parses `shell` correctly", () => testFunctionCall('shell'));
it("parses `guile` correctly", () => testFunctionCall('guile'));
it("parses targets with line breaks in body", () => {
const lines = grammar.tokenizeLines('foo:\n\techo $(basename /foo/bar.txt)');
expect(lines[1][3]).toEqual({value: 'basename', scopes: ['source.makefile', 'meta.scope.target.makefile', 'meta.scope.recipe.makefile', 'string.interpolated.makefile', 'meta.scope.function-call.makefile', 'support.function.basename.makefile']});
});
it("continues matching prerequisites after reaching a line continuation character", () => {
waitsForPromise(() => atom.packages.activatePackage("language-shellscript"));
runs(() => {
const lines = grammar.tokenizeLines('hello: a b c \\\n d e f\n\techo "test"');
expect(lines[0][3]).toEqual({value: '\\', scopes: ['source.makefile', 'meta.scope.target.makefile', 'meta.scope.prerequisites.makefile', 'constant.character.escape.continuation.makefile']});
expect(lines[1][0]).toEqual({value: ' d e f', scopes: ['source.makefile', 'meta.scope.target.makefile', 'meta.scope.prerequisites.makefile']});
expect(lines[2][1]).toEqual({value: 'echo', scopes: ['source.makefile', 'meta.scope.target.makefile', 'meta.scope.recipe.makefile', 'support.function.builtin.shell']});});
});
it("parses nested interpolated strings and function calls correctly", () => {
waitsForPromise(() => atom.packages.activatePackage("language-shellscript"));
runs(() => {
const lines = grammar.tokenizeLines('default:\n\t$(eval MESSAGE=$(shell node -pe "decodeURIComponent(process.argv.pop())" "${MSG}"))');
expect(lines[1][1]).toEqual({value: '$(', scopes: ['source.makefile', 'meta.scope.target.makefile', 'meta.scope.recipe.makefile', 'string.interpolated.makefile', 'punctuation.definition.variable.makefile']});
expect(lines[1][2]).toEqual({value: 'eval', scopes: ['source.makefile', 'meta.scope.target.makefile', 'meta.scope.recipe.makefile', 'string.interpolated.makefile', 'meta.scope.function-call.makefile', 'support.function.eval.makefile']});
expect(lines[1][5]).toEqual({value: '$(', scopes: ['source.makefile', 'meta.scope.target.makefile', 'meta.scope.recipe.makefile', 'string.interpolated.makefile', 'meta.scope.function-call.makefile', 'string.interpolated.makefile', 'punctuation.definition.variable.makefile']});
expect(lines[1][6]).toEqual({value: 'shell', scopes: ['source.makefile', 'meta.scope.target.makefile', 'meta.scope.recipe.makefile', 'string.interpolated.makefile', 'meta.scope.function-call.makefile', 'string.interpolated.makefile', 'meta.scope.function-call.makefile', 'support.function.shell.makefile']});
expect(lines[1][9]).toEqual({value: '"', scopes: ['source.makefile', 'meta.scope.target.makefile', 'meta.scope.recipe.makefile', 'string.interpolated.makefile', 'meta.scope.function-call.makefile', 'string.interpolated.makefile', 'meta.scope.function-call.makefile', 'string.quoted.double.shell', 'punctuation.definition.string.begin.shell']});
expect(lines[1][10]).toEqual({value: 'decodeURIComponent(process.argv.pop())', scopes: ['source.makefile', 'meta.scope.target.makefile', 'meta.scope.recipe.makefile', 'string.interpolated.makefile', 'meta.scope.function-call.makefile', 'string.interpolated.makefile', 'meta.scope.function-call.makefile', 'string.quoted.double.shell']});
expect(lines[1][11]).toEqual({value: '"', scopes: ['source.makefile', 'meta.scope.target.makefile', 'meta.scope.recipe.makefile', 'string.interpolated.makefile', 'meta.scope.function-call.makefile', 'string.interpolated.makefile', 'meta.scope.function-call.makefile', 'string.quoted.double.shell', 'punctuation.definition.string.end.shell']});
expect(lines[1][14]).toEqual({value: '${', scopes: ['source.makefile', 'meta.scope.target.makefile', 'meta.scope.recipe.makefile', 'string.interpolated.makefile', 'meta.scope.function-call.makefile', 'string.interpolated.makefile', 'meta.scope.function-call.makefile', 'string.quoted.double.shell', 'variable.other.bracket.shell', 'punctuation.definition.variable.shell']});
expect(lines[1][16]).toEqual({value: '}', scopes: ['source.makefile', 'meta.scope.target.makefile', 'meta.scope.recipe.makefile', 'string.interpolated.makefile', 'meta.scope.function-call.makefile', 'string.interpolated.makefile', 'meta.scope.function-call.makefile', 'string.quoted.double.shell', 'variable.other.bracket.shell', 'punctuation.definition.variable.shell']});
expect(lines[1][18]).toEqual({value: ')', scopes: ['source.makefile', 'meta.scope.target.makefile', 'meta.scope.recipe.makefile', 'string.interpolated.makefile', 'meta.scope.function-call.makefile', 'string.interpolated.makefile', 'punctuation.definition.variable.makefile']});
expect(lines[1][19]).toEqual({value: ')', scopes: ['source.makefile', 'meta.scope.target.makefile', 'meta.scope.recipe.makefile', 'string.interpolated.makefile', 'punctuation.definition.variable.makefile']});});
});
it("recognizes global functions", () => {
const {tokens} = grammar.tokenizeLine('$(foreach util,$(EXES),$(eval $(call BUILD_EXE,$(util))))');
expect(tokens[0]).toEqual({value: '$(', scopes: ['source.makefile', 'string.interpolated.makefile', 'punctuation.definition.variable.makefile']});
});
it("parses `origin` correctly", () => {
waitsForPromise(() => atom.packages.activatePackage("language-shellscript"));
runs(() => {
const lines = grammar.tokenizeLines('default:\n\t$(origin 1)');
expect(lines[1][1]).toEqual({value: '$(', scopes: ['source.makefile', 'meta.scope.target.makefile', 'meta.scope.recipe.makefile', 'string.interpolated.makefile', 'punctuation.definition.variable.makefile']});
expect(lines[1][2]).toEqual({value: 'origin', scopes: ['source.makefile', 'meta.scope.target.makefile', 'meta.scope.recipe.makefile', 'string.interpolated.makefile', 'meta.scope.function-call.makefile', 'support.function.origin.makefile']});
expect(lines[1][4]).toEqual({value: '1', scopes: ['source.makefile', 'meta.scope.target.makefile', 'meta.scope.recipe.makefile', 'string.interpolated.makefile', 'meta.scope.function-call.makefile', 'variable.other.makefile']});
expect(lines[1][5]).toEqual({value: ')', scopes: ['source.makefile', 'meta.scope.target.makefile', 'meta.scope.recipe.makefile', 'string.interpolated.makefile', 'punctuation.definition.variable.makefile']});});
});
it("parses `flavor` correctly", () => {
waitsForPromise(() => atom.packages.activatePackage("language-shellscript"));
runs(() => {
const lines = grammar.tokenizeLines('default:\n\t$(flavor 1)');
expect(lines[1][1]).toEqual({value: '$(', scopes: ['source.makefile', 'meta.scope.target.makefile', 'meta.scope.recipe.makefile', 'string.interpolated.makefile', 'punctuation.definition.variable.makefile']});
expect(lines[1][2]).toEqual({value: 'flavor', scopes: ['source.makefile', 'meta.scope.target.makefile', 'meta.scope.recipe.makefile', 'string.interpolated.makefile', 'meta.scope.function-call.makefile', 'support.function.flavor.makefile']});
expect(lines[1][4]).toEqual({value: '1', scopes: ['source.makefile', 'meta.scope.target.makefile', 'meta.scope.recipe.makefile', 'string.interpolated.makefile', 'meta.scope.function-call.makefile', 'variable.other.makefile']});
expect(lines[1][5]).toEqual({value: ')', scopes: ['source.makefile', 'meta.scope.target.makefile', 'meta.scope.recipe.makefile', 'string.interpolated.makefile', 'punctuation.definition.variable.makefile']});});
});
it("tokenizes variable assignments", () => {
let tokens;
const operators = ['=', '?=', ':=', '+='];
for (let operator of Array.from(operators)) {
({tokens} = grammar.tokenizeLine(`SOMEVAR ${operator} whatever`));
expect(tokens[0]).toEqual({value: 'SOMEVAR', scopes: ['source.makefile', 'variable.other.makefile']});
expect(tokens[1]).toEqual({value: ' ', scopes: ['source.makefile']});
expect(tokens[2]).toEqual({value: operator, scopes: ['source.makefile', 'keyword.operator.assignment.makefile']});
expect(tokens[3]).toEqual({value: ' whatever', scopes: ['source.makefile']});
}
({tokens} = grammar.tokenizeLine('`$om3_V@R! := whatever'));
expect(tokens[0]).toEqual({value: '`$om3_V@R!', scopes: ['source.makefile', 'variable.other.makefile']});
expect(tokens[1]).toEqual({value: ' ', scopes: ['source.makefile']});
expect(tokens[2]).toEqual({value: ':=', scopes: ['source.makefile', 'keyword.operator.assignment.makefile']});
expect(tokens[3]).toEqual({value: ' whatever', scopes: ['source.makefile']});
let lines = grammar.tokenizeLines('SOMEVAR = OTHER\\\nVAR');
expect(lines[0][0]).toEqual({value: 'SOMEVAR', scopes: ['source.makefile', 'variable.other.makefile']});
expect(lines[0][3]).toEqual({value: ' OTHER', scopes: ['source.makefile']});
expect(lines[0][4]).toEqual({value: '\\', scopes: ['source.makefile', 'constant.character.escape.continuation.makefile']});
lines = grammar.tokenizeLines('SOMEVAR := foo # bar explanation\nOTHERVAR := bar');
expect(lines[0][0]).toEqual({value: 'SOMEVAR', scopes: ['source.makefile', 'variable.other.makefile']});
expect(lines[0][4]).toEqual({value: '#', scopes: ['source.makefile', 'comment.line.number-sign.makefile', 'punctuation.definition.comment.makefile']});
expect(lines[1][0]).toEqual({value: 'OTHERVAR', scopes: ['source.makefile', 'variable.other.makefile']});
});
});

View File

@ -1,104 +0,0 @@
describe 'Mustache grammar', ->
grammar = null
beforeEach ->
waitsForPromise ->
atom.packages.activatePackage('language-html')
waitsForPromise ->
atom.packages.activatePackage('language-mustache')
runs ->
grammar = atom.grammars.grammarForScopeName('text.html.mustache')
it 'parses the grammar', ->
expect(grammar).toBeTruthy()
expect(grammar.scopeName).toBe 'text.html.mustache'
it 'parses expressions', ->
{tokens} = grammar.tokenizeLine("{{name}}")
expect(tokens[0]).toEqual value: '{{', scopes: ['text.html.mustache', 'meta.tag.template.mustache', 'entity.name.tag.mustache']
expect(tokens[1]).toEqual value: 'name', scopes: ['text.html.mustache', 'meta.tag.template.mustache']
expect(tokens[2]).toEqual value: '}}', scopes: ['text.html.mustache', 'meta.tag.template.mustache', 'entity.name.tag.mustache']
it 'parses expressions in HTML attributes', ->
{tokens} = grammar.tokenizeLine("<a href='{{test}}'></a>")
expect(tokens[6]).toEqual value: '{{', scopes: ['text.html.mustache', 'meta.tag.inline.a.html', 'meta.attribute-with-value.html', 'string.quoted.single.html', 'meta.tag.template.mustache', 'entity.name.tag.mustache']
expect(tokens[8]).toEqual value: '}}', scopes: ['text.html.mustache', 'meta.tag.inline.a.html', 'meta.attribute-with-value.html', 'string.quoted.single.html', 'meta.tag.template.mustache', 'entity.name.tag.mustache']
expect(tokens[9]).toEqual value: "'", scopes: ['text.html.mustache', 'meta.tag.inline.a.html', 'meta.attribute-with-value.html', 'string.quoted.single.html', 'punctuation.definition.string.end.html']
it 'parses block comments', ->
{tokens} = grammar.tokenizeLine("{{!--{{comment}}--}}")
expect(tokens[0]).toEqual value: '{{!--', scopes: ['text.html.mustache', 'comment.block.mustache', 'punctuation.definition.comment.mustache']
expect(tokens[1]).toEqual value: '{{comment}}', scopes: ['text.html.mustache', 'comment.block.mustache']
expect(tokens[2]).toEqual value: '--}}', scopes: ['text.html.mustache', 'comment.block.mustache', 'punctuation.definition.comment.mustache']
it 'parses comments', ->
{tokens} = grammar.tokenizeLine("{{!comment}}")
expect(tokens[0]).toEqual value: '{{!', scopes: ['text.html.mustache', 'comment.block.mustache', 'punctuation.definition.comment.mustache']
expect(tokens[1]).toEqual value: 'comment', scopes: ['text.html.mustache', 'comment.block.mustache']
expect(tokens[2]).toEqual value: '}}', scopes: ['text.html.mustache', 'comment.block.mustache', 'punctuation.definition.comment.mustache']
it 'parses block expression', ->
{tokens} = grammar.tokenizeLine("{{#each people}}")
expect(tokens[0]).toEqual value: '{{', scopes: ['text.html.mustache', 'meta.tag.template.mustache', 'entity.name.tag.mustache']
expect(tokens[1]).toEqual value: '#', scopes: ['text.html.mustache', 'meta.tag.template.mustache', 'entity.name.tag.mustache', 'punctuation.definition.block.begin.mustache']
expect(tokens[2]).toEqual value: 'each', scopes: ['text.html.mustache', 'meta.tag.template.mustache', 'entity.name.tag.mustache', 'entity.name.function.mustache']
expect(tokens[3]).toEqual value: ' people', scopes: ['text.html.mustache', 'meta.tag.template.mustache']
expect(tokens[4]).toEqual value: '}}', scopes: ['text.html.mustache', 'meta.tag.template.mustache', 'entity.name.tag.mustache']
{tokens} = grammar.tokenizeLine("{{# nested.block }}")
expect(tokens[0]).toEqual value: '{{', scopes: ['text.html.mustache', 'meta.tag.template.mustache', 'entity.name.tag.mustache']
expect(tokens[1]).toEqual value: '#', scopes: ['text.html.mustache', 'meta.tag.template.mustache', 'entity.name.tag.mustache', 'punctuation.definition.block.begin.mustache']
expect(tokens[3]).toEqual value: 'nested.block', scopes: ['text.html.mustache', 'meta.tag.template.mustache', 'entity.name.tag.mustache', 'entity.name.function.mustache']
expect(tokens[5]).toEqual value: '}}', scopes: ['text.html.mustache', 'meta.tag.template.mustache', 'entity.name.tag.mustache']
{tokens} = grammar.tokenizeLine("{{^repo}}")
expect(tokens[0]).toEqual value: '{{', scopes: ['text.html.mustache', 'meta.tag.template.mustache', 'entity.name.tag.mustache']
expect(tokens[1]).toEqual value: '^', scopes: ['text.html.mustache', 'meta.tag.template.mustache', 'entity.name.tag.mustache', 'punctuation.definition.block.begin.mustache']
expect(tokens[2]).toEqual value: 'repo', scopes: ['text.html.mustache', 'meta.tag.template.mustache', 'entity.name.tag.mustache', 'entity.name.function.mustache']
expect(tokens[3]).toEqual value: '}}', scopes: ['text.html.mustache', 'meta.tag.template.mustache', 'entity.name.tag.mustache']
{tokens} = grammar.tokenizeLine("{{/if}}")
expect(tokens[0]).toEqual value: '{{', scopes: ['text.html.mustache', 'meta.tag.template.mustache', 'entity.name.tag.mustache']
expect(tokens[1]).toEqual value: '/', scopes: ['text.html.mustache', 'meta.tag.template.mustache', 'entity.name.tag.mustache', 'punctuation.definition.block.end.mustache']
expect(tokens[2]).toEqual value: 'if', scopes: ['text.html.mustache', 'meta.tag.template.mustache', 'entity.name.tag.mustache', 'entity.name.function.mustache']
expect(tokens[3]).toEqual value: '}}', scopes: ['text.html.mustache', 'meta.tag.template.mustache', 'entity.name.tag.mustache']
it 'parses unescaped expressions', ->
{tokens} = grammar.tokenizeLine("{{{do not escape me}}}")
expect(tokens[0]).toEqual value: '{{{', scopes: ['text.html.mustache', 'meta.tag.template.raw.mustache', 'entity.name.tag.mustache']
expect(tokens[1]).toEqual value: 'do not escape me', scopes: ['text.html.mustache', 'meta.tag.template.raw.mustache']
expect(tokens[2]).toEqual value: '}}}', scopes: ['text.html.mustache', 'meta.tag.template.raw.mustache', 'entity.name.tag.mustache']
it 'does not tokenize tags within tags', ->
{tokens} = grammar.tokenizeLine("{{test{{test}}}}")
expect(tokens[0]).toEqual value: '{{', scopes: ['text.html.mustache', 'meta.tag.template.mustache', 'entity.name.tag.mustache']
expect(tokens[1]).toEqual value: 'test{{test', scopes: ['text.html.mustache', 'meta.tag.template.mustache']
expect(tokens[2]).toEqual value: '}}', scopes: ['text.html.mustache', 'meta.tag.template.mustache', 'entity.name.tag.mustache']
expect(tokens[3]).toEqual value: '}}', scopes: ['text.html.mustache']
it 'does not tokenize comments within comments', ->
{tokens} = grammar.tokenizeLine("{{!test{{!test}}}}")
expect(tokens[0]).toEqual value: '{{!', scopes: ['text.html.mustache', 'comment.block.mustache', 'punctuation.definition.comment.mustache']
expect(tokens[1]).toEqual value: 'test{{!test', scopes: ['text.html.mustache', 'comment.block.mustache']
expect(tokens[2]).toEqual value: '}}', scopes: ['text.html.mustache', 'comment.block.mustache', 'punctuation.definition.comment.mustache']
expect(tokens[3]).toEqual value: '}}', scopes: ['text.html.mustache']
it 'does not tokenize Mustache expressions inside HTML comments', ->
{tokens} = grammar.tokenizeLine("<!--{{test}}-->")
expect(tokens[0]).toEqual value: '<!--', scopes: ['text.html.mustache', 'comment.block.html', 'punctuation.definition.comment.html']
expect(tokens[1]).toEqual value: '{{test}}', scopes: ['text.html.mustache', 'comment.block.html']
expect(tokens[2]).toEqual value: '-->', scopes: ['text.html.mustache', 'comment.block.html', 'punctuation.definition.comment.html']

View File

@ -0,0 +1,114 @@
describe('Mustache grammar', () => {
let grammar = null;
beforeEach(() => {
waitsForPromise(() => atom.packages.activatePackage('language-html'));
waitsForPromise(() => atom.packages.activatePackage('language-mustache'));
runs(() => grammar = atom.grammars.grammarForScopeName('text.html.mustache'));
});
it('parses the grammar', () => {
expect(grammar).toBeTruthy();
expect(grammar.scopeName).toBe('text.html.mustache');
});
it('parses expressions', () => {
const {tokens} = grammar.tokenizeLine("{{name}}");
expect(tokens[0]).toEqual({value: '{{', scopes: ['text.html.mustache', 'meta.tag.template.mustache', 'entity.name.tag.mustache']});
expect(tokens[1]).toEqual({value: 'name', scopes: ['text.html.mustache', 'meta.tag.template.mustache']});
expect(tokens[2]).toEqual({value: '}}', scopes: ['text.html.mustache', 'meta.tag.template.mustache', 'entity.name.tag.mustache']});
});
it('parses expressions in HTML attributes', () => {
const {tokens} = grammar.tokenizeLine("<a href='{{test}}'></a>");
expect(tokens[6]).toEqual({value: '{{', scopes: ['text.html.mustache', 'meta.tag.inline.a.html', 'meta.attribute-with-value.html', 'string.quoted.single.html', 'meta.tag.template.mustache', 'entity.name.tag.mustache']});
expect(tokens[8]).toEqual({value: '}}', scopes: ['text.html.mustache', 'meta.tag.inline.a.html', 'meta.attribute-with-value.html', 'string.quoted.single.html', 'meta.tag.template.mustache', 'entity.name.tag.mustache']});
expect(tokens[9]).toEqual({value: "'", scopes: ['text.html.mustache', 'meta.tag.inline.a.html', 'meta.attribute-with-value.html', 'string.quoted.single.html', 'punctuation.definition.string.end.html']});
});
it('parses block comments', () => {
const {tokens} = grammar.tokenizeLine("{{!--{{comment}}--}}");
expect(tokens[0]).toEqual({value: '{{!--', scopes: ['text.html.mustache', 'comment.block.mustache', 'punctuation.definition.comment.mustache']});
expect(tokens[1]).toEqual({value: '{{comment}}', scopes: ['text.html.mustache', 'comment.block.mustache']});
expect(tokens[2]).toEqual({value: '--}}', scopes: ['text.html.mustache', 'comment.block.mustache', 'punctuation.definition.comment.mustache']});
});
it('parses comments', () => {
const {tokens} = grammar.tokenizeLine("{{!comment}}");
expect(tokens[0]).toEqual({value: '{{!', scopes: ['text.html.mustache', 'comment.block.mustache', 'punctuation.definition.comment.mustache']});
expect(tokens[1]).toEqual({value: 'comment', scopes: ['text.html.mustache', 'comment.block.mustache']});
expect(tokens[2]).toEqual({value: '}}', scopes: ['text.html.mustache', 'comment.block.mustache', 'punctuation.definition.comment.mustache']});
});
it('parses block expression', () => {
let {tokens} = grammar.tokenizeLine("{{#each people}}");
expect(tokens[0]).toEqual({value: '{{', scopes: ['text.html.mustache', 'meta.tag.template.mustache', 'entity.name.tag.mustache']});
expect(tokens[1]).toEqual({value: '#', scopes: ['text.html.mustache', 'meta.tag.template.mustache', 'entity.name.tag.mustache', 'punctuation.definition.block.begin.mustache']});
expect(tokens[2]).toEqual({value: 'each', scopes: ['text.html.mustache', 'meta.tag.template.mustache', 'entity.name.tag.mustache', 'entity.name.function.mustache']});
expect(tokens[3]).toEqual({value: ' people', scopes: ['text.html.mustache', 'meta.tag.template.mustache']});
expect(tokens[4]).toEqual({value: '}}', scopes: ['text.html.mustache', 'meta.tag.template.mustache', 'entity.name.tag.mustache']});
({tokens} = grammar.tokenizeLine("{{# nested.block }}"));
expect(tokens[0]).toEqual({value: '{{', scopes: ['text.html.mustache', 'meta.tag.template.mustache', 'entity.name.tag.mustache']});
expect(tokens[1]).toEqual({value: '#', scopes: ['text.html.mustache', 'meta.tag.template.mustache', 'entity.name.tag.mustache', 'punctuation.definition.block.begin.mustache']});
expect(tokens[3]).toEqual({value: 'nested.block', scopes: ['text.html.mustache', 'meta.tag.template.mustache', 'entity.name.tag.mustache', 'entity.name.function.mustache']});
expect(tokens[5]).toEqual({value: '}}', scopes: ['text.html.mustache', 'meta.tag.template.mustache', 'entity.name.tag.mustache']});
({tokens} = grammar.tokenizeLine("{{^repo}}"));
expect(tokens[0]).toEqual({value: '{{', scopes: ['text.html.mustache', 'meta.tag.template.mustache', 'entity.name.tag.mustache']});
expect(tokens[1]).toEqual({value: '^', scopes: ['text.html.mustache', 'meta.tag.template.mustache', 'entity.name.tag.mustache', 'punctuation.definition.block.begin.mustache']});
expect(tokens[2]).toEqual({value: 'repo', scopes: ['text.html.mustache', 'meta.tag.template.mustache', 'entity.name.tag.mustache', 'entity.name.function.mustache']});
expect(tokens[3]).toEqual({value: '}}', scopes: ['text.html.mustache', 'meta.tag.template.mustache', 'entity.name.tag.mustache']});
({tokens} = grammar.tokenizeLine("{{/if}}"));
expect(tokens[0]).toEqual({value: '{{', scopes: ['text.html.mustache', 'meta.tag.template.mustache', 'entity.name.tag.mustache']});
expect(tokens[1]).toEqual({value: '/', scopes: ['text.html.mustache', 'meta.tag.template.mustache', 'entity.name.tag.mustache', 'punctuation.definition.block.end.mustache']});
expect(tokens[2]).toEqual({value: 'if', scopes: ['text.html.mustache', 'meta.tag.template.mustache', 'entity.name.tag.mustache', 'entity.name.function.mustache']});
expect(tokens[3]).toEqual({value: '}}', scopes: ['text.html.mustache', 'meta.tag.template.mustache', 'entity.name.tag.mustache']});
});
it('parses unescaped expressions', () => {
const {tokens} = grammar.tokenizeLine("{{{do not escape me}}}");
expect(tokens[0]).toEqual({value: '{{{', scopes: ['text.html.mustache', 'meta.tag.template.raw.mustache', 'entity.name.tag.mustache']});
expect(tokens[1]).toEqual({value: 'do not escape me', scopes: ['text.html.mustache', 'meta.tag.template.raw.mustache']});
expect(tokens[2]).toEqual({value: '}}}', scopes: ['text.html.mustache', 'meta.tag.template.raw.mustache', 'entity.name.tag.mustache']});
});
it('does not tokenize tags within tags', () => {
const {tokens} = grammar.tokenizeLine("{{test{{test}}}}");
expect(tokens[0]).toEqual({value: '{{', scopes: ['text.html.mustache', 'meta.tag.template.mustache', 'entity.name.tag.mustache']});
expect(tokens[1]).toEqual({value: 'test{{test', scopes: ['text.html.mustache', 'meta.tag.template.mustache']});
expect(tokens[2]).toEqual({value: '}}', scopes: ['text.html.mustache', 'meta.tag.template.mustache', 'entity.name.tag.mustache']});
expect(tokens[3]).toEqual({value: '}}', scopes: ['text.html.mustache']});
});
it('does not tokenize comments within comments', () => {
const {tokens} = grammar.tokenizeLine("{{!test{{!test}}}}");
expect(tokens[0]).toEqual({value: '{{!', scopes: ['text.html.mustache', 'comment.block.mustache', 'punctuation.definition.comment.mustache']});
expect(tokens[1]).toEqual({value: 'test{{!test', scopes: ['text.html.mustache', 'comment.block.mustache']});
expect(tokens[2]).toEqual({value: '}}', scopes: ['text.html.mustache', 'comment.block.mustache', 'punctuation.definition.comment.mustache']});
expect(tokens[3]).toEqual({value: '}}', scopes: ['text.html.mustache']});
});
it('does not tokenize Mustache expressions inside HTML comments', () => {
const {tokens} = grammar.tokenizeLine("<!--{{test}}-->");
expect(tokens[0]).toEqual({value: '<!--', scopes: ['text.html.mustache', 'comment.block.html', 'punctuation.definition.comment.html']});
expect(tokens[1]).toEqual({value: '{{test}}', scopes: ['text.html.mustache', 'comment.block.html']});
expect(tokens[2]).toEqual({value: '-->', scopes: ['text.html.mustache', 'comment.block.html', 'punctuation.definition.comment.html']});
});
});

View File

@ -1,49 +0,0 @@
describe 'Language-Objective-C', ->
grammar = null
beforeEach ->
waitsForPromise ->
atom.packages.activatePackage('language-objective-c')
waitsForPromise ->
atom.packages.activatePackage('language-c')
describe "Objective-C", ->
beforeEach ->
grammar = atom.grammars.grammarForScopeName('source.objc')
it 'parses the grammar', ->
expect(grammar).toBeTruthy()
expect(grammar.scopeName).toBe 'source.objc'
it 'tokenizes classes', ->
lines = grammar.tokenizeLines '''
@interface Thing
@property (nonatomic, strong) NSArray *items;
@end
'''
expect(lines[0][1]).toEqual value: 'interface', scopes: ["source.objc", "meta.interface-or-protocol.objc", "storage.type.objc"]
expect(lines[0][3]).toEqual value: 'Thing', scopes: ["source.objc", "meta.interface-or-protocol.objc", "entity.name.type.objc"]
describe "Objective-C++", ->
beforeEach ->
grammar = atom.grammars.grammarForScopeName('source.objcpp')
it 'parses the grammar', ->
expect(grammar).toBeTruthy()
expect(grammar.scopeName).toBe 'source.objcpp'
it 'tokenizes classes', ->
lines = grammar.tokenizeLines '''
class Thing1 {
vector<int> items;
};
@interface Thing2
@property (nonatomic, strong) NSArray *items;
@end
'''
expect(lines[0][2].value).toBe 'Thing1'
expect(lines[4][3]).toEqual value: 'Thing2', scopes: ["source.objcpp", "meta.interface-or-protocol.objc", "entity.name.type.objc"]

View File

@ -0,0 +1,56 @@
describe('Language-Objective-C', () => {
let grammar = null;
beforeEach(() => {
waitsForPromise(() => atom.packages.activatePackage('language-objective-c'));
waitsForPromise(() => atom.packages.activatePackage('language-c'));
});
describe("Objective-C", () => {
beforeEach(() => grammar = atom.grammars.grammarForScopeName('source.objc'));
it('parses the grammar', () => {
expect(grammar).toBeTruthy();
expect(grammar.scopeName).toBe('source.objc');
});
it('tokenizes classes', () => {
const lines = grammar.tokenizeLines(`\
@interface Thing
@property (nonatomic, strong) NSArray *items;
@end\
`
);
expect(lines[0][1]).toEqual({value: 'interface', scopes: ["source.objc", "meta.interface-or-protocol.objc", "storage.type.objc"]});
expect(lines[0][3]).toEqual({value: 'Thing', scopes: ["source.objc", "meta.interface-or-protocol.objc", "entity.name.type.objc"]});
});
});
describe("Objective-C++", () => {
beforeEach(() => grammar = atom.grammars.grammarForScopeName('source.objcpp'));
it('parses the grammar', () => {
expect(grammar).toBeTruthy();
return expect(grammar.scopeName).toBe('source.objcpp');
});
it('tokenizes classes', () => {
const lines = grammar.tokenizeLines(`\
class Thing1 {
vector<int> items;
};
@interface Thing2
@property (nonatomic, strong) NSArray *items;
@end\
`
);
expect(lines[0][2].value).toBe('Thing1');
expect(lines[4][3]).toEqual({value: 'Thing2', scopes: ["source.objcpp", "meta.interface-or-protocol.objc", "entity.name.type.objc"]});
});
});
});

View File

@ -1,293 +0,0 @@
describe "Perl 6 grammar", ->
grammar = null
beforeEach ->
waitsForPromise ->
atom.packages.activatePackage("language-perl")
runs ->
grammar = atom.grammars.grammarForScopeName("source.perl6")
it "parses the grammar", ->
expect(grammar).toBeDefined()
expect(grammar.scopeName).toBe "source.perl6"
describe "identifiers", ->
it "should match simple scalar identifiers", ->
{tokens} = grammar.tokenizeLine('$a')
expect(tokens[0]).toEqual value: '$a', scopes: [
'source.perl6'
'variable.other.identifier.perl6'
]
it "should match simple array identifiers", ->
{tokens} = grammar.tokenizeLine('@a')
expect(tokens[0]).toEqual value: '@a', scopes: [
'source.perl6'
'variable.other.identifier.perl6'
]
it "should match simple hash identifiers", ->
{tokens} = grammar.tokenizeLine('%a')
expect(tokens[0]).toEqual value: '%a', scopes: [
'source.perl6'
'variable.other.identifier.perl6'
]
it "should match simple hash identifiers", ->
{tokens} = grammar.tokenizeLine('&a')
expect(tokens[0]).toEqual value: '&a', scopes: [
'source.perl6'
'variable.other.identifier.perl6'
]
it "should match unicode identifiers", ->
{tokens} = grammar.tokenizeLine('$cööl-páttérn')
expect(tokens[0]).toEqual value: '$cööl-páttérn', scopes: [
'source.perl6'
'variable.other.identifier.perl6'
]
it "should match identifiers with multiple dashes which can contain other keywords", ->
{tokens} = grammar.tokenizeLine('start-from-here')
expect(tokens.length).toEqual 1
expect(tokens[0]).toEqual value: 'start-from-here', scopes: [
'source.perl6'
'routine.name.perl6'
]
it "should match identifiers with dash which can contain other keywords", ->
{tokens} = grammar.tokenizeLine('start-here')
expect(tokens.length).toEqual 1
expect(tokens[0]).toEqual value: 'start-here', scopes: [
'source.perl6'
'routine.name.perl6'
]
it "should match identifiers with dash which can contain other keywords", ->
{tokens} = grammar.tokenizeLine('is-required')
expect(tokens.length).toEqual 1
expect(tokens[0]).toEqual value: 'is-required', scopes: [
'source.perl6'
'routine.name.perl6'
]
it "should match identifiers with dash which can contain other keywords", ->
{tokens} = grammar.tokenizeLine('is-utf8')
expect(tokens.length).toEqual 1
expect(tokens[0]).toEqual value: 'is-utf8', scopes: [
'source.perl6'
'routine.name.perl6'
]
it "should match identifiers with a dangling match", ->
{tokens} = grammar.tokenizeLine('is-')
expect(tokens.length).toEqual 2
expect(tokens[0]).toEqual value: 'is', scopes: [
'source.perl6'
'routine.name.perl6'
]
expect(tokens[1]).toEqual value: '-', scopes: [
'source.perl6'
]
it "should not match scalar identifiers with a dash followed by a number", ->
{tokens} = grammar.tokenizeLine('$foo-1')
expect(tokens.length).toEqual 2
expect(tokens[0]).toEqual value: '$foo', scopes: [
'source.perl6'
'variable.other.identifier.perl6'
]
expect(tokens[1]).toEqual value: '-1', scopes: [
'source.perl6'
]
describe "strings", ->
it "should tokenize simple strings", ->
{tokens} = grammar.tokenizeLine('"abc"')
expect(tokens.length).toEqual 3
expect(tokens[0]).toEqual value: '"', scopes: [
'source.perl6'
'string.quoted.double.perl6'
'punctuation.definition.string.begin.perl6'
]
expect(tokens[1]).toEqual value: 'abc', scopes: [
'source.perl6'
'string.quoted.double.perl6'
]
expect(tokens[2]).toEqual value: '"', scopes: [
'source.perl6'
'string.quoted.double.perl6'
'punctuation.definition.string.end.perl6'
]
describe "modules", ->
it "should parse package declarations", ->
{tokens} = grammar.tokenizeLine("class Johnny's::Super-Cool::cööl-páttérn::Module")
expect(tokens.length).toEqual 3
expect(tokens[0]).toEqual value: 'class', scopes: [
'source.perl6'
'meta.class.perl6'
'storage.type.class.perl6'
]
expect(tokens[1]).toEqual
value: ' '
scopes: [
'source.perl6'
'meta.class.perl6'
]
expect(tokens[2]).toEqual
value: 'Johnny\'s::Super-Cool::cööl-páttérn::Module'
scopes: [
'source.perl6'
'meta.class.perl6'
'entity.name.type.class.perl6'
]
describe "comments", ->
it "should parse comments", ->
{tokens} = grammar.tokenizeLine("# this is the comment")
expect(tokens.length).toEqual 3
expect(tokens[0]).toEqual
value: '#'
scopes: [
'source.perl6'
'comment.line.number-sign.perl6'
'punctuation.definition.comment.perl6'
]
expect(tokens[1]).toEqual
value: ' this is the comment',
scopes: [
'source.perl6'
'comment.line.number-sign.perl6'
]
describe "firstLineMatch", ->
it "recognises interpreter directives", ->
valid = """
#!perl6 -w
#! perl6 -w
#!/usr/sbin/perl6 foo
#!/usr/bin/perl6 foo=bar/
#!/usr/sbin/perl6
#!/usr/sbin/perl6 foo bar baz
#!/usr/bin/env perl6
#!/usr/bin/env bin/perl6
#!/usr/bin/perl6
#!/bin/perl6
#!/usr/bin/perl6 --script=usr/bin
#! /usr/bin/env A=003 B=149 C=150 D=xzd E=base64 F=tar G=gz H=head I=tail perl6
#!\t/usr/bin/env --foo=bar perl6 --quu=quux
#! /usr/bin/perl6
#!/usr/bin/env perl6
"""
for line in valid.split /\n/
expect(grammar.firstLineRegex.findNextMatchSync(line)).not.toBeNull()
invalid = """
#! pearl6
#!/bin/perl 6
perl6
#perl6
\x20#!/usr/sbin/perl6
\t#!/usr/sbin/perl6
#!
#!\x20
#!/usr/bin/env
#!/usr/bin/env-perl6
#! /usr/binperl6
#!\t/usr/bin/env --perl6=bar
"""
for line in invalid.split /\n/
expect(grammar.firstLineRegex.findNextMatchSync(line)).toBeNull()
it "recognises the Perl6 pragma", ->
line = "use v6;"
expect(grammar.firstLineRegex.findNextMatchSync(line)).not.toBeNull()
it "recognises Emacs modelines", ->
modelines = """
#-*-perl6-*-
#-*-mode:perl6-*-
/* -*-perl6-*- */
// -*- PERL6 -*-
/* -*- mode:perl6 -*- */
// -*- font:bar;mode:Perl6 -*-
// -*- font:bar;mode:Perl6;foo:bar; -*-
// -*-font:mode;mode:perl6-*-
" -*-foo:bar;mode:Perl6;bar:foo-*- ";
" -*-font-mode:foo;mode:Perl6;foo-bar:quux-*-"
"-*-font:x;foo:bar; mode : pErL6;bar:foo;foooooo:baaaaar;fo:ba;-*-";
"-*- font:x;foo : bar ; mode : pErL6 ; bar : foo ; foooooo:baaaaar;fo:ba-*-";
"""
for line in modelines.split /\n/
expect(grammar.firstLineRegex.findNextMatchSync(line)).not.toBeNull()
invalid = """
/* --*perl6-*- */
/* -*-- perl6 -*-
/* -*- -- perl6 -*-
/* -*- perl6 -;- -*-
// -*- iPERL6 -*-
// -*- perl 6 -*-
// -*- perl6-stuff -*-
/* -*- model:perl6 -*-
/* -*- indent-mode:perl6 -*-
// -*- font:mode;Perl6 -*-
// -*- mode: -*- Perl6
// -*- mode: grok-with-perl6 -*-
// -*-font:mode;mode:perl6--*-
"""
for line in invalid.split /\n/
expect(grammar.firstLineRegex.findNextMatchSync(line)).toBeNull()
it "recognises Vim modelines", ->
valid = """
vim: se filetype=perl6:
# vim: se ft=perl6:
# vim: set ft=perl6:
# vim: set filetype=Perl6:
# vim: ft=perl6
# vim: syntax=pERl6
# vim: se syntax=PERL6:
# ex: syntax=perl6
# vim:ft=perl6
# vim600: ft=perl6
# vim>600: set ft=perl6:
# vi:noai:sw=3 ts=6 ft=perl6
# vi::::::::::noai:::::::::::: ft=perl6
# vim:ts=4:sts=4:sw=4:noexpandtab:ft=perl6
# vi:: noai : : : : sw =3 ts =6 ft =perl6
# vim: ts=4: pi sts=4: ft=perl6: noexpandtab: sw=4:
# vim: ts=4 sts=4: ft=perl6 noexpandtab:
# vim:noexpandtab sts=4 ft=perl6 ts=4
# vim:noexpandtab:ft=perl6
# vim:ts=4:sts=4 ft=perl6:noexpandtab:\x20
# vim:noexpandtab titlestring=hi\|there\\\\ ft=perl6 ts=4
"""
for line in valid.split /\n/
expect(grammar.firstLineRegex.findNextMatchSync(line)).not.toBeNull()
invalid = """
ex: se filetype=perl6:
_vi: se filetype=perl6:
vi: se filetype=perl6
# vim set ft=perl6o
# vim: soft=perl6
# vim: hairy-syntax=perl6:
# vim set ft=perl6:
# vim: setft=perl6:
# vim: se ft=perl6 backupdir=tmp
# vim: set ft=perl6 set cmdheight=1
# vim:noexpandtab sts:4 ft:perl6 ts:4
# vim:noexpandtab titlestring=hi\\|there\\ ft=perl6 ts=4
# vim:noexpandtab titlestring=hi\\|there\\\\\\ ft=perl6 ts=4
"""
for line in invalid.split /\n/
expect(grammar.firstLineRegex.findNextMatchSync(line)).toBeNull()
# Local variables:
# mode: CoffeeScript
# End:

View File

@ -0,0 +1,333 @@
describe("Perl 6 grammar", () => {
let grammar = null;
beforeEach(() => {
waitsForPromise(() => atom.packages.activatePackage("language-perl"));
runs(() => grammar = atom.grammars.grammarForScopeName("source.perl6"));
});
it("parses the grammar", () => {
expect(grammar).toBeDefined();
expect(grammar.scopeName).toBe("source.perl6");
});
describe("identifiers", () => {
it("should match simple scalar identifiers", () => {
const {tokens} = grammar.tokenizeLine('$a');
expect(tokens[0]).toEqual({value: '$a', scopes: [
'source.perl6',
'variable.other.identifier.perl6'
]});
});
it("should match simple array identifiers", () => {
const {tokens} = grammar.tokenizeLine('@a');
expect(tokens[0]).toEqual({value: '@a', scopes: [
'source.perl6',
'variable.other.identifier.perl6'
]});
});
it("should match simple hash identifiers", () => {
const {tokens} = grammar.tokenizeLine('%a');
expect(tokens[0]).toEqual({value: '%a', scopes: [
'source.perl6',
'variable.other.identifier.perl6'
]});
});
it("should match simple hash identifiers", () => {
const {tokens} = grammar.tokenizeLine('&a');
expect(tokens[0]).toEqual({value: '&a', scopes: [
'source.perl6',
'variable.other.identifier.perl6'
]});
});
it("should match unicode identifiers", () => {
const {tokens} = grammar.tokenizeLine('$cööl-páttérn');
expect(tokens[0]).toEqual({value: '$cööl-páttérn', scopes: [
'source.perl6',
'variable.other.identifier.perl6'
]});
});
it("should match identifiers with multiple dashes which can contain other keywords", () => {
const {tokens} = grammar.tokenizeLine('start-from-here');
expect(tokens.length).toEqual(1);
expect(tokens[0]).toEqual({value: 'start-from-here', scopes: [
'source.perl6',
'routine.name.perl6'
]});
});
it("should match identifiers with dash which can contain other keywords", () => {
const {tokens} = grammar.tokenizeLine('start-here');
expect(tokens.length).toEqual(1);
expect(tokens[0]).toEqual({value: 'start-here', scopes: [
'source.perl6',
'routine.name.perl6'
]});
});
it("should match identifiers with dash which can contain other keywords", () => {
const {tokens} = grammar.tokenizeLine('is-required');
expect(tokens.length).toEqual(1);
expect(tokens[0]).toEqual({value: 'is-required', scopes: [
'source.perl6',
'routine.name.perl6'
]});
});
it("should match identifiers with dash which can contain other keywords", () => {
const {tokens} = grammar.tokenizeLine('is-utf8');
expect(tokens.length).toEqual(1);
expect(tokens[0]).toEqual({value: 'is-utf8', scopes: [
'source.perl6',
'routine.name.perl6'
]});
});
it("should match identifiers with a dangling match", () => {
const {tokens} = grammar.tokenizeLine('is-');
expect(tokens.length).toEqual(2);
expect(tokens[0]).toEqual({value: 'is', scopes: [
'source.perl6',
'routine.name.perl6'
]});
expect(tokens[1]).toEqual({value: '-', scopes: [
'source.perl6'
]});
});
it("should not match scalar identifiers with a dash followed by a number", () => {
const {tokens} = grammar.tokenizeLine('$foo-1');
expect(tokens.length).toEqual(2);
expect(tokens[0]).toEqual({value: '$foo', scopes: [
'source.perl6',
'variable.other.identifier.perl6'
]});
expect(tokens[1]).toEqual({value: '-1', scopes: [
'source.perl6'
]});
});
});
describe("strings", () => it("should tokenize simple strings", () => {
const {tokens} = grammar.tokenizeLine('"abc"');
expect(tokens.length).toEqual(3);
expect(tokens[0]).toEqual({value: '"', scopes: [
'source.perl6',
'string.quoted.double.perl6',
'punctuation.definition.string.begin.perl6'
]});
expect(tokens[1]).toEqual({value: 'abc', scopes: [
'source.perl6',
'string.quoted.double.perl6'
]});
expect(tokens[2]).toEqual({value: '"', scopes: [
'source.perl6',
'string.quoted.double.perl6',
'punctuation.definition.string.end.perl6'
]});
}));
describe("modules", () => it("should parse package declarations", () => {
const {tokens} = grammar.tokenizeLine("class Johnny's::Super-Cool::cööl-páttérn::Module");
expect(tokens.length).toEqual(3);
expect(tokens[0]).toEqual({value: 'class', scopes: [
'source.perl6',
'meta.class.perl6',
'storage.type.class.perl6'
]});
expect(tokens[1]).toEqual({
value: ' ',
scopes: [
'source.perl6',
'meta.class.perl6'
]});
expect(tokens[2]).toEqual({
value: 'Johnny\'s::Super-Cool::cööl-páttérn::Module',
scopes: [
'source.perl6',
'meta.class.perl6',
'entity.name.type.class.perl6'
]});
}));
describe("comments", () => it("should parse comments", () => {
const {tokens} = grammar.tokenizeLine("# this is the comment");
expect(tokens.length).toEqual(3);
expect(tokens[0]).toEqual({
value: '#',
scopes: [
'source.perl6',
'comment.line.number-sign.perl6',
'punctuation.definition.comment.perl6'
]});
expect(tokens[1]).toEqual({
value: ' this is the comment',
scopes: [
'source.perl6',
'comment.line.number-sign.perl6'
]});
}));
describe("firstLineMatch", () => {
it("recognises interpreter directives", () => {
let line;
const valid = `\
#!perl6 -w
#! perl6 -w
#!/usr/sbin/perl6 foo
#!/usr/bin/perl6 foo=bar/
#!/usr/sbin/perl6
#!/usr/sbin/perl6 foo bar baz
#!/usr/bin/env perl6
#!/usr/bin/env bin/perl6
#!/usr/bin/perl6
#!/bin/perl6
#!/usr/bin/perl6 --script=usr/bin
#! /usr/bin/env A=003 B=149 C=150 D=xzd E=base64 F=tar G=gz H=head I=tail perl6
#!\t/usr/bin/env --foo=bar perl6 --quu=quux
#! /usr/bin/perl6
#!/usr/bin/env perl6\
`;
for (line of Array.from(valid.split(/\n/))) {
expect(grammar.firstLineRegex.findNextMatchSync(line)).not.toBeNull();
}
const invalid = `\
#! pearl6
#!/bin/perl 6
perl6
#perl6
\x20#!/usr/sbin/perl6
\t#!/usr/sbin/perl6
#!
#!\x20
#!/usr/bin/env
#!/usr/bin/env-perl6
#! /usr/binperl6
#!\t/usr/bin/env --perl6=bar\
`;
return (() => {
const result = [];
for (line of Array.from(invalid.split(/\n/))) {
result.push(expect(grammar.firstLineRegex.findNextMatchSync(line)).toBeNull());
}
return result;
})();
});
it("recognises the Perl6 pragma", () => {
const line = "use v6;";
expect(grammar.firstLineRegex.findNextMatchSync(line)).not.toBeNull();
});
it("recognises Emacs modelines", () => {
let line;
const modelines = `\
#-*-perl6-*-
#-*-mode:perl6-*-
/* -*-perl6-*- */
// -*- PERL6 -*-
/* -*- mode:perl6 -*- */
// -*- font:bar;mode:Perl6 -*-
// -*- font:bar;mode:Perl6;foo:bar; -*-
// -*-font:mode;mode:perl6-*-
" -*-foo:bar;mode:Perl6;bar:foo-*- ";
" -*-font-mode:foo;mode:Perl6;foo-bar:quux-*-"
"-*-font:x;foo:bar; mode : pErL6;bar:foo;foooooo:baaaaar;fo:ba;-*-";
"-*- font:x;foo : bar ; mode : pErL6 ; bar : foo ; foooooo:baaaaar;fo:ba-*-";\
`;
for (line of Array.from(modelines.split(/\n/))) {
expect(grammar.firstLineRegex.findNextMatchSync(line)).not.toBeNull();
}
const invalid = `\
/* --*perl6-*- */
/* -*-- perl6 -*-
/* -*- -- perl6 -*-
/* -*- perl6 -;- -*-
// -*- iPERL6 -*-
// -*- perl 6 -*-
// -*- perl6-stuff -*-
/* -*- model:perl6 -*-
/* -*- indent-mode:perl6 -*-
// -*- font:mode;Perl6 -*-
// -*- mode: -*- Perl6
// -*- mode: grok-with-perl6 -*-
// -*-font:mode;mode:perl6--*-\
`;
return (() => {
const result = [];
for (line of Array.from(invalid.split(/\n/))) {
result.push(expect(grammar.firstLineRegex.findNextMatchSync(line)).toBeNull());
}
return result;
})();
});
it("recognises Vim modelines", () => {
let line;
const valid = `\
vim: se filetype=perl6:
# vim: se ft=perl6:
# vim: set ft=perl6:
# vim: set filetype=Perl6:
# vim: ft=perl6
# vim: syntax=pERl6
# vim: se syntax=PERL6:
# ex: syntax=perl6
# vim:ft=perl6
# vim600: ft=perl6
# vim>600: set ft=perl6:
# vi:noai:sw=3 ts=6 ft=perl6
# vi::::::::::noai:::::::::::: ft=perl6
# vim:ts=4:sts=4:sw=4:noexpandtab:ft=perl6
# vi:: noai : : : : sw =3 ts =6 ft =perl6
# vim: ts=4: pi sts=4: ft=perl6: noexpandtab: sw=4:
# vim: ts=4 sts=4: ft=perl6 noexpandtab:
# vim:noexpandtab sts=4 ft=perl6 ts=4
# vim:noexpandtab:ft=perl6
# vim:ts=4:sts=4 ft=perl6:noexpandtab:\x20
# vim:noexpandtab titlestring=hi\|there\\\\ ft=perl6 ts=4\
`;
for (line of Array.from(valid.split(/\n/))) {
expect(grammar.firstLineRegex.findNextMatchSync(line)).not.toBeNull();
}
const invalid = `\
ex: se filetype=perl6:
_vi: se filetype=perl6:
vi: se filetype=perl6
# vim set ft=perl6o
# vim: soft=perl6
# vim: hairy-syntax=perl6:
# vim set ft=perl6:
# vim: setft=perl6:
# vim: se ft=perl6 backupdir=tmp
# vim: set ft=perl6 set cmdheight=1
# vim:noexpandtab sts:4 ft:perl6 ts:4
# vim:noexpandtab titlestring=hi\\|there\\ ft=perl6 ts=4
# vim:noexpandtab titlestring=hi\\|there\\\\\\ ft=perl6 ts=4\
`;
return (() => {
const result = [];
for (line of Array.from(invalid.split(/\n/))) {
result.push(expect(grammar.firstLineRegex.findNextMatchSync(line)).toBeNull());
}
return result;
})();
});
});
});
// Local variables:
// mode: CoffeeScript
// End:

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@ -1,296 +0,0 @@
describe 'PHP in HTML', ->
grammar = null
beforeEach ->
waitsForPromise ->
atom.packages.activatePackage 'language-php'
waitsForPromise ->
# While not used explicitly in any tests, we still activate language-html
# to mirror how language-php behaves outside of specs
atom.packages.activatePackage 'language-html'
runs ->
grammar = atom.grammars.grammarForScopeName 'text.html.php'
it 'parses the grammar', ->
expect(grammar).toBeTruthy()
expect(grammar.scopeName).toBe 'text.html.php'
describe 'PHP tags', ->
it 'tokenizes starting and closing PHP tags on the same line', ->
startTags = ['<?php', '<?=', '<?']
for startTag in startTags
tokens = grammar.tokenizeLines "#{startTag} /* stuff */ ?>"
expect(tokens[0][0]).toEqual value: startTag, scopes: ['text.html.php', 'meta.embedded.line.php', 'punctuation.section.embedded.begin.php']
expect(tokens[0][1]).toEqual value: ' ', scopes: ['text.html.php', 'meta.embedded.line.php', 'source.php']
expect(tokens[0][2]).toEqual value: '/*', scopes: ['text.html.php', 'meta.embedded.line.php', 'source.php', 'comment.block.php', 'punctuation.definition.comment.php']
expect(tokens[0][4]).toEqual value: '*/', scopes: ['text.html.php', 'meta.embedded.line.php', 'source.php', 'comment.block.php', 'punctuation.definition.comment.php']
expect(tokens[0][5]).toEqual value: ' ', scopes: ['text.html.php', 'meta.embedded.line.php', 'source.php']
expect(tokens[0][6]).toEqual value: '?', scopes: ['text.html.php', 'meta.embedded.line.php', 'punctuation.section.embedded.end.php', 'source.php']
expect(tokens[0][7]).toEqual value: '>', scopes: ['text.html.php', 'meta.embedded.line.php', 'punctuation.section.embedded.end.php']
it 'tokenizes starting and closing PHP tags on different lines', ->
startTags = ['<?php', '<?=', '<?']
for startTag in startTags
tokens = grammar.tokenizeLines "#{startTag}\n/* stuff */ ?>"
expect(tokens[0][0]).toEqual value: startTag, scopes: ['text.html.php', 'meta.embedded.block.php', 'punctuation.section.embedded.begin.php']
expect(tokens[1][0]).toEqual value: '/*', scopes: ['text.html.php', 'meta.embedded.block.php', 'source.php', 'comment.block.php', 'punctuation.definition.comment.php']
expect(tokens[1][2]).toEqual value: '*/', scopes: ['text.html.php', 'meta.embedded.block.php', 'source.php', 'comment.block.php', 'punctuation.definition.comment.php']
expect(tokens[1][3]).toEqual value: ' ', scopes: ['text.html.php', 'meta.embedded.block.php', 'source.php']
expect(tokens[1][4]).toEqual value: '?', scopes: ['text.html.php', 'meta.embedded.block.php', 'punctuation.section.embedded.end.php', 'source.php']
expect(tokens[1][5]).toEqual value: '>', scopes: ['text.html.php', 'meta.embedded.block.php', 'punctuation.section.embedded.end.php']
tokens = grammar.tokenizeLines "#{startTag} /* stuff */\n?>"
expect(tokens[0][0]).toEqual value: startTag, scopes: ['text.html.php', 'meta.embedded.block.php', 'punctuation.section.embedded.begin.php']
expect(tokens[0][1]).toEqual value: ' ', scopes: ['text.html.php', 'meta.embedded.block.php', 'source.php']
expect(tokens[0][2]).toEqual value: '/*', scopes: ['text.html.php', 'meta.embedded.block.php', 'source.php', 'comment.block.php', 'punctuation.definition.comment.php']
expect(tokens[0][4]).toEqual value: '*/', scopes: ['text.html.php', 'meta.embedded.block.php', 'source.php', 'comment.block.php', 'punctuation.definition.comment.php']
expect(tokens[1][0]).toEqual value: '?', scopes: ['text.html.php', 'meta.embedded.block.php', 'punctuation.section.embedded.end.php', 'source.php']
expect(tokens[1][1]).toEqual value: '>', scopes: ['text.html.php', 'meta.embedded.block.php', 'punctuation.section.embedded.end.php']
tokens = grammar.tokenizeLines "#{startTag}\n/* stuff */\n?>"
expect(tokens[0][0]).toEqual value: startTag, scopes: ['text.html.php', 'meta.embedded.block.php', 'punctuation.section.embedded.begin.php']
expect(tokens[1][0]).toEqual value: '/*', scopes: ['text.html.php', 'meta.embedded.block.php', 'source.php', 'comment.block.php', 'punctuation.definition.comment.php']
expect(tokens[1][2]).toEqual value: '*/', scopes: ['text.html.php', 'meta.embedded.block.php', 'source.php', 'comment.block.php', 'punctuation.definition.comment.php']
expect(tokens[2][0]).toEqual value: '?', scopes: ['text.html.php', 'meta.embedded.block.php', 'punctuation.section.embedded.end.php', 'source.php']
expect(tokens[2][1]).toEqual value: '>', scopes: ['text.html.php', 'meta.embedded.block.php', 'punctuation.section.embedded.end.php']
it 'tokenizes `include` on the same line as <?php', ->
# https://github.com/atom/language-php/issues/154
{tokens} = grammar.tokenizeLine "<?php include 'test'?>"
expect(tokens[2]).toEqual value: 'include', scopes: ['text.html.php', 'meta.embedded.line.php', 'source.php', 'meta.include.php', 'keyword.control.import.include.php']
expect(tokens[4]).toEqual value: "'", scopes: ['text.html.php', 'meta.embedded.line.php', 'source.php', 'meta.include.php', 'string.quoted.single.php', 'punctuation.definition.string.begin.php']
expect(tokens[6]).toEqual value: "'", scopes: ['text.html.php', 'meta.embedded.line.php', 'source.php', 'meta.include.php', 'string.quoted.single.php', 'punctuation.definition.string.end.php']
expect(tokens[7]).toEqual value: '?', scopes: ['text.html.php', 'meta.embedded.line.php', 'punctuation.section.embedded.end.php', 'source.php']
expect(tokens[8]).toEqual value: '>', scopes: ['text.html.php', 'meta.embedded.line.php', 'punctuation.section.embedded.end.php']
it 'tokenizes namespaces immediately following <?php', ->
{tokens} = grammar.tokenizeLine '<?php namespace Test;'
expect(tokens[1]).toEqual value: ' ', scopes: ['text.html.php', 'meta.embedded.block.php', 'source.php', 'meta.namespace.php']
expect(tokens[2]).toEqual value: 'namespace', scopes: ['text.html.php', 'meta.embedded.block.php', 'source.php', 'meta.namespace.php', 'keyword.other.namespace.php']
expect(tokens[3]).toEqual value: ' ', scopes: ['text.html.php', 'meta.embedded.block.php', 'source.php', 'meta.namespace.php']
expect(tokens[4]).toEqual value: 'Test', scopes: ['text.html.php', 'meta.embedded.block.php', 'source.php', 'meta.namespace.php', 'entity.name.type.namespace.php']
expect(tokens[5]).toEqual value: ';', scopes: ['text.html.php', 'meta.embedded.block.php', 'source.php', 'punctuation.terminator.expression.php']
it 'does not tokenize PHP tag syntax within PHP syntax itself inside HTML script tags (regression)', ->
lines = grammar.tokenizeLines '''
<script>
<?php
/*
?>
<?php
*/
?>
</script>
'''
expect(lines[0][0]).toEqual value: '<', scopes: ['text.html.php', 'meta.tag.script.html', 'punctuation.definition.tag.html']
expect(lines[0][1]).toEqual value: 'script', scopes: ['text.html.php', 'meta.tag.script.html', 'entity.name.tag.script.html']
expect(lines[0][2]).toEqual value: '>', scopes: ['text.html.php', 'meta.tag.script.html', 'punctuation.definition.tag.html']
expect(lines[1][0]).toEqual value: '<?php', scopes: ['text.html.php', 'meta.tag.script.html', 'source.js.embedded.html', 'meta.embedded.block.php', 'punctuation.section.embedded.begin.php']
expect(lines[2][0]).toEqual value: ' ', scopes: ['text.html.php', 'meta.tag.script.html', 'source.js.embedded.html', 'meta.embedded.block.php', 'source.php']
expect(lines[2][1]).toEqual value: '/*', scopes: ['text.html.php', 'meta.tag.script.html', 'source.js.embedded.html', 'meta.embedded.block.php', 'source.php', 'comment.block.php', 'punctuation.definition.comment.php']
expect(lines[3][0]).toEqual value: ' ?>', scopes: ['text.html.php', 'meta.tag.script.html', 'source.js.embedded.html', 'meta.embedded.block.php', 'source.php', 'comment.block.php']
expect(lines[4][0]).toEqual value: ' <?php', scopes: ['text.html.php', 'meta.tag.script.html', 'source.js.embedded.html', 'meta.embedded.block.php', 'source.php', 'comment.block.php']
expect(lines[5][0]).toEqual value: ' ', scopes: ['text.html.php', 'meta.tag.script.html', 'source.js.embedded.html', 'meta.embedded.block.php', 'source.php', 'comment.block.php']
expect(lines[5][1]).toEqual value: '*/', scopes: ['text.html.php', 'meta.tag.script.html', 'source.js.embedded.html', 'meta.embedded.block.php', 'source.php', 'comment.block.php', 'punctuation.definition.comment.php']
expect(lines[6][0]).toEqual value: '?', scopes: ['text.html.php', 'meta.tag.script.html', 'source.js.embedded.html', 'meta.embedded.block.php', 'punctuation.section.embedded.end.php', 'source.php']
expect(lines[6][1]).toEqual value: '>', scopes: ['text.html.php', 'meta.tag.script.html', 'source.js.embedded.html', 'meta.embedded.block.php', 'punctuation.section.embedded.end.php']
expect(lines[7][0]).toEqual value: '</', scopes: ['text.html.php', 'meta.tag.script.html', 'punctuation.definition.tag.html']
expect(lines[7][1]).toEqual value: 'script', scopes: ['text.html.php', 'meta.tag.script.html', 'entity.name.tag.script.html']
expect(lines[7][2]).toEqual value: '>', scopes: ['text.html.php', 'meta.tag.script.html', 'punctuation.definition.tag.html']
it 'does not tokenize PHP tag syntax within PHP syntax itself inside HTML attributes (regression)', ->
{tokens} = grammar.tokenizeLine '<img src="<?php /* ?> <?php */ ?>" />'
expect(tokens[0]).toEqual value: '<', scopes: ['text.html.php', 'meta.tag.inline.img.html', 'punctuation.definition.tag.begin.html']
expect(tokens[1]).toEqual value: 'img', scopes: ['text.html.php', 'meta.tag.inline.img.html', 'entity.name.tag.inline.img.html']
expect(tokens[2]).toEqual value: ' ', scopes: ['text.html.php', 'meta.tag.inline.img.html']
expect(tokens[3]).toEqual value: 'src', scopes: ['text.html.php', 'meta.tag.inline.img.html', 'meta.attribute-with-value.html', 'entity.other.attribute-name.html']
expect(tokens[4]).toEqual value: '=', scopes: ['text.html.php', 'meta.tag.inline.img.html', 'meta.attribute-with-value.html', 'punctuation.separator.key-value.html']
expect(tokens[5]).toEqual value: '"', scopes: ['text.html.php', 'meta.tag.inline.img.html', 'meta.attribute-with-value.html', 'string.quoted.double.html', 'punctuation.definition.string.begin.html']
expect(tokens[6]).toEqual value: '<?php', scopes: ['text.html.php', 'meta.tag.inline.img.html', 'meta.attribute-with-value.html', 'string.quoted.double.html', 'meta.embedded.line.php', 'punctuation.section.embedded.begin.php']
expect(tokens[7]).toEqual value: ' ', scopes: ['text.html.php', 'meta.tag.inline.img.html', 'meta.attribute-with-value.html', 'string.quoted.double.html', 'meta.embedded.line.php', 'source.php']
expect(tokens[8]).toEqual value: '/*', scopes: ['text.html.php', 'meta.tag.inline.img.html', 'meta.attribute-with-value.html', 'string.quoted.double.html', 'meta.embedded.line.php', 'source.php', 'comment.block.php', 'punctuation.definition.comment.php']
expect(tokens[9]).toEqual value: ' ?> <?php ', scopes: ['text.html.php', 'meta.tag.inline.img.html', 'meta.attribute-with-value.html', 'string.quoted.double.html', 'meta.embedded.line.php', 'source.php', 'comment.block.php']
expect(tokens[10]).toEqual value: '*/', scopes: ['text.html.php', 'meta.tag.inline.img.html', 'meta.attribute-with-value.html', 'string.quoted.double.html', 'meta.embedded.line.php', 'source.php', 'comment.block.php', 'punctuation.definition.comment.php']
expect(tokens[11]).toEqual value: ' ', scopes: ['text.html.php', 'meta.tag.inline.img.html', 'meta.attribute-with-value.html', 'string.quoted.double.html', 'meta.embedded.line.php', 'source.php']
expect(tokens[12]).toEqual value: '?', scopes: ['text.html.php', 'meta.tag.inline.img.html', 'meta.attribute-with-value.html', 'string.quoted.double.html', 'meta.embedded.line.php', 'punctuation.section.embedded.end.php', 'source.php']
expect(tokens[13]).toEqual value: '>', scopes: ['text.html.php', 'meta.tag.inline.img.html', 'meta.attribute-with-value.html', 'string.quoted.double.html', 'meta.embedded.line.php', 'punctuation.section.embedded.end.php']
expect(tokens[14]).toEqual value: '"', scopes: ['text.html.php', 'meta.tag.inline.img.html', 'meta.attribute-with-value.html', 'string.quoted.double.html', 'punctuation.definition.string.end.html']
expect(tokens[15]).toEqual value: ' />', scopes: ['text.html.php', 'meta.tag.inline.img.html', 'punctuation.definition.tag.end.html']
describe 'shebang', ->
it 'recognises shebang on the first line of document', ->
lines = grammar.tokenizeLines '''
#!/usr/bin/env php
<?php echo "test"; ?>
'''
expect(lines[0][0]).toEqual value: '#!', scopes: ['text.html.php', 'comment.line.shebang.php', 'punctuation.definition.comment.php']
expect(lines[0][1]).toEqual value: '/usr/bin/env php', scopes: ['text.html.php', 'comment.line.shebang.php']
expect(lines[1][0]).toEqual value: '<?php', scopes: ['text.html.php', 'meta.embedded.line.php', 'punctuation.section.embedded.begin.php']
it 'does not recognize shebang on any of the other lines', ->
lines = grammar.tokenizeLines '''
#!/usr/bin/env php
<?php echo "test"; ?>
'''
expect(lines[1][0]).toEqual value: '#!/usr/bin/env php', scopes: ['text.html.php']
describe 'firstLineMatch', ->
it 'recognises opening PHP tags', ->
valid = '''
<?php
<?PHP
<?=
<?
<? echo "test";
<?="test"
<?php namespace foo;
'''
for line in valid.split /\n/
expect(grammar.firstLineRegex.findNextMatchSync(line)).not.toBeNull()
# Do not allow matching XML declaration until the grammar scoring system takes into account
# the length of the first line match so that longer matches get the priority over shorter matches.
expect(grammar.firstLineRegex.findNextMatchSync('<?xml version="1.0" encoding="UTF-8"?>')).toBeNull()
it 'recognises interpreter directives', ->
valid = '''
#!/usr/bin/php
#!/usr/bin/php foo=bar/
#!/usr/sbin/php5
#!/usr/sbin/php7 foo bar baz
#!/usr/bin/php perl
#!/usr/bin/php4 bin/perl
#!/usr/bin/env php
#!/bin/php
#!/usr/bin/php --script=usr/bin
#! /usr/bin/env A=003 B=149 C=150 D=xzd E=base64 F=tar G=gz H=head I=tail php
#!\t/usr/bin/env --foo=bar php --quu=quux
#! /usr/bin/php
#!/usr/bin/env php
'''
for line in valid.split /\n/
expect(grammar.firstLineRegex.findNextMatchSync(line)).not.toBeNull()
invalid = '''
\x20#!/usr/sbin/php
\t#!/usr/sbin/php
#!/usr/bin/env-php/node-env/
#!/usr/bin/env-php
#! /usr/binphp
#!/usr/bin.php
#!\t/usr/bin/env --php=bar
'''
for line in invalid.split /\n/
expect(grammar.firstLineRegex.findNextMatchSync(line)).toBeNull()
it 'recognises Emacs modelines', ->
valid = '''
#-*- PHP -*-
#-*- mode: PHP -*-
/* -*-php-*- */
// -*- PHP -*-
/* -*- mode:PHP -*- */
// -*- font:bar;mode:pHp -*-
// -*- font:bar;mode:PHP;foo:bar; -*-
// -*-font:mode;mode:php-*-
// -*- foo:bar mode: php bar:baz -*-
"-*-foo:bar;mode:php;bar:foo-*- ";
"-*-font-mode:foo;mode:php;foo-bar:quux-*-"
"-*-font:x;foo:bar; mode : PHP; bar:foo;foooooo:baaaaar;fo:ba;-*-";
"-*- font:x;foo : bar ; mode : php ; bar : foo ; foooooo:baaaaar;fo:ba-*-";
'''
for line in valid.split /\n/
expect(grammar.firstLineRegex.findNextMatchSync(line)).not.toBeNull()
invalid = '''
/* --*php-*- */
/* -*-- php -*-
/* -*- -- PHP -*-
/* -*- PHP -;- -*-
// -*- PHPetrol -*-
// -*- PHP; -*-
// -*- php-stuff -*-
/* -*- model:php -*-
/* -*- indent-mode:php -*-
// -*- font:mode;php -*-
// -*- mode: -*- php
// -*- mode: stop-using-php -*-
// -*-font:mode;mode:php--*-
'''
for line in invalid.split /\n/
expect(grammar.firstLineRegex.findNextMatchSync(line)).toBeNull()
it 'recognises Vim modelines', ->
valid = '''
vim: se filetype=php:
# vim: se ft=php:
# vim: set ft=PHP:
# vim: set filetype=PHP:
# vim: ft=PHTML
# vim: syntax=phtml
# vim: se syntax=php:
# ex: syntax=PHP
# vim:ft=php
# vim600: ft=php
# vim>600: set ft=PHP:
# vi:noai:sw=3 ts=6 ft=phtml
# vi::::::::::noai:::::::::::: ft=phtml
# vim:ts=4:sts=4:sw=4:noexpandtab:ft=phtml
# vi:: noai : : : : sw =3 ts =6 ft =php
# vim: ts=4: pi sts=4: ft=php: noexpandtab: sw=4:
# vim: ts=4 sts=4: ft=php noexpandtab:
# vim:noexpandtab sts=4 ft=php ts=4
# vim:noexpandtab:ft=php
# vim:ts=4:sts=4 ft=phtml:noexpandtab:\x20
# vim:noexpandtab titlestring=hi\|there\\\\ ft=phtml ts=4
'''
for line in valid.split /\n/
expect(grammar.firstLineRegex.findNextMatchSync(line)).not.toBeNull()
invalid = '''
ex: se filetype=php:
_vi: se filetype=php:
vi: se filetype=php
# vim set ft=phpetrol
# vim: soft=php
# vim: clean-syntax=php:
# vim set ft=php:
# vim: setft=php:
# vim: se ft=php backupdir=tmp
# vim: set ft=php set cmdheight=1
# vim:noexpandtab sts:4 ft:php ts:4
# vim:noexpandtab titlestring=hi\\|there\\ ft=php ts=4
# vim:noexpandtab titlestring=hi\\|there\\\\\\ ft=php ts=4
'''
for line in invalid.split /\n/
expect(grammar.firstLineRegex.findNextMatchSync(line)).toBeNull()
it 'should tokenize <?php use Some\\Name ?>', ->
lines = grammar.tokenizeLines '''
<?php use Some\\Name ?>
<article>
'''
expect(lines[0][0]).toEqual value: '<?php', scopes: ['text.html.php', 'meta.embedded.line.php', 'punctuation.section.embedded.begin.php']
expect(lines[0][1]).toEqual value: ' ', scopes: ['text.html.php', 'meta.embedded.line.php', 'source.php']
expect(lines[0][2]).toEqual value: 'use', scopes: ['text.html.php', 'meta.embedded.line.php', 'source.php', 'meta.use.php', 'keyword.other.use.php']
expect(lines[0][3]).toEqual value: ' ', scopes: ['text.html.php', 'meta.embedded.line.php', 'source.php', 'meta.use.php']
expect(lines[0][4]).toEqual value: 'Some', scopes: ['text.html.php', 'meta.embedded.line.php', 'source.php', 'meta.use.php', 'support.other.namespace.php']
expect(lines[0][5]).toEqual value: '\\', scopes: ['text.html.php', 'meta.embedded.line.php', 'source.php', 'meta.use.php', 'support.other.namespace.php', 'punctuation.separator.inheritance.php']
expect(lines[0][6]).toEqual value: 'Name', scopes: ['text.html.php', 'meta.embedded.line.php', 'source.php', 'meta.use.php', 'support.class.php']
expect(lines[0][8]).toEqual value: '?', scopes: ['text.html.php', 'meta.embedded.line.php', 'punctuation.section.embedded.end.php', 'source.php']
expect(lines[0][9]).toEqual value: '>', scopes: ['text.html.php', 'meta.embedded.line.php', 'punctuation.section.embedded.end.php']

View File

@ -0,0 +1,349 @@
describe('PHP in HTML', () => {
let grammar = null;
beforeEach(() => {
waitsForPromise(() => atom.packages.activatePackage('language-php'));
waitsForPromise(() => // While not used explicitly in any tests, we still activate language-html
// to mirror how language-php behaves outside of specs
atom.packages.activatePackage('language-html'));
runs(() => grammar = atom.grammars.grammarForScopeName('text.html.php'));
});
it('parses the grammar', () => {
expect(grammar).toBeTruthy();
expect(grammar.scopeName).toBe('text.html.php');
});
describe('PHP tags', () => {
it('tokenizes starting and closing PHP tags on the same line', () => {
const startTags = ['<?php', '<?=', '<?'];
return (() => {
const result = [];
for (let startTag of startTags) {
const tokens = grammar.tokenizeLines(`${startTag} /* stuff */ ?>`);
expect(tokens[0][0]).toEqual({value: startTag, scopes: ['text.html.php', 'meta.embedded.line.php', 'punctuation.section.embedded.begin.php']});
expect(tokens[0][1]).toEqual({value: ' ', scopes: ['text.html.php', 'meta.embedded.line.php', 'source.php']});
expect(tokens[0][2]).toEqual({value: '/*', scopes: ['text.html.php', 'meta.embedded.line.php', 'source.php', 'comment.block.php', 'punctuation.definition.comment.php']});
expect(tokens[0][4]).toEqual({value: '*/', scopes: ['text.html.php', 'meta.embedded.line.php', 'source.php', 'comment.block.php', 'punctuation.definition.comment.php']});
expect(tokens[0][5]).toEqual({value: ' ', scopes: ['text.html.php', 'meta.embedded.line.php', 'source.php']});
expect(tokens[0][6]).toEqual({value: '?', scopes: ['text.html.php', 'meta.embedded.line.php', 'punctuation.section.embedded.end.php', 'source.php']});
result.push(expect(tokens[0][7]).toEqual({value: '>', scopes: ['text.html.php', 'meta.embedded.line.php', 'punctuation.section.embedded.end.php']}));
}
return result;
})();
});
it('tokenizes starting and closing PHP tags on different lines', () => {
const startTags = ['<?php', '<?=', '<?'];
return (() => {
const result = [];
for (let startTag of startTags) {
let tokens = grammar.tokenizeLines(`${startTag}\n/* stuff */ ?>`);
expect(tokens[0][0]).toEqual({value: startTag, scopes: ['text.html.php', 'meta.embedded.block.php', 'punctuation.section.embedded.begin.php']});
expect(tokens[1][0]).toEqual({value: '/*', scopes: ['text.html.php', 'meta.embedded.block.php', 'source.php', 'comment.block.php', 'punctuation.definition.comment.php']});
expect(tokens[1][2]).toEqual({value: '*/', scopes: ['text.html.php', 'meta.embedded.block.php', 'source.php', 'comment.block.php', 'punctuation.definition.comment.php']});
expect(tokens[1][3]).toEqual({value: ' ', scopes: ['text.html.php', 'meta.embedded.block.php', 'source.php']});
expect(tokens[1][4]).toEqual({value: '?', scopes: ['text.html.php', 'meta.embedded.block.php', 'punctuation.section.embedded.end.php', 'source.php']});
expect(tokens[1][5]).toEqual({value: '>', scopes: ['text.html.php', 'meta.embedded.block.php', 'punctuation.section.embedded.end.php']});
tokens = grammar.tokenizeLines(`${startTag} /* stuff */\n?>`);
expect(tokens[0][0]).toEqual({value: startTag, scopes: ['text.html.php', 'meta.embedded.block.php', 'punctuation.section.embedded.begin.php']});
expect(tokens[0][1]).toEqual({value: ' ', scopes: ['text.html.php', 'meta.embedded.block.php', 'source.php']});
expect(tokens[0][2]).toEqual({value: '/*', scopes: ['text.html.php', 'meta.embedded.block.php', 'source.php', 'comment.block.php', 'punctuation.definition.comment.php']});
expect(tokens[0][4]).toEqual({value: '*/', scopes: ['text.html.php', 'meta.embedded.block.php', 'source.php', 'comment.block.php', 'punctuation.definition.comment.php']});
expect(tokens[1][0]).toEqual({value: '?', scopes: ['text.html.php', 'meta.embedded.block.php', 'punctuation.section.embedded.end.php', 'source.php']});
expect(tokens[1][1]).toEqual({value: '>', scopes: ['text.html.php', 'meta.embedded.block.php', 'punctuation.section.embedded.end.php']});
tokens = grammar.tokenizeLines(`${startTag}\n/* stuff */\n?>`);
expect(tokens[0][0]).toEqual({value: startTag, scopes: ['text.html.php', 'meta.embedded.block.php', 'punctuation.section.embedded.begin.php']});
expect(tokens[1][0]).toEqual({value: '/*', scopes: ['text.html.php', 'meta.embedded.block.php', 'source.php', 'comment.block.php', 'punctuation.definition.comment.php']});
expect(tokens[1][2]).toEqual({value: '*/', scopes: ['text.html.php', 'meta.embedded.block.php', 'source.php', 'comment.block.php', 'punctuation.definition.comment.php']});
expect(tokens[2][0]).toEqual({value: '?', scopes: ['text.html.php', 'meta.embedded.block.php', 'punctuation.section.embedded.end.php', 'source.php']});
result.push(expect(tokens[2][1]).toEqual({value: '>', scopes: ['text.html.php', 'meta.embedded.block.php', 'punctuation.section.embedded.end.php']}));
}
return result;
})();
});
it('tokenizes `include` on the same line as <?php', () => {
// https://github.com/atom/language-php/issues/154
const {tokens} = grammar.tokenizeLine("<?php include 'test'?>");
expect(tokens[2]).toEqual({value: 'include', scopes: ['text.html.php', 'meta.embedded.line.php', 'source.php', 'meta.include.php', 'keyword.control.import.include.php']});
expect(tokens[4]).toEqual({value: "'", scopes: ['text.html.php', 'meta.embedded.line.php', 'source.php', 'meta.include.php', 'string.quoted.single.php', 'punctuation.definition.string.begin.php']});
expect(tokens[6]).toEqual({value: "'", scopes: ['text.html.php', 'meta.embedded.line.php', 'source.php', 'meta.include.php', 'string.quoted.single.php', 'punctuation.definition.string.end.php']});
expect(tokens[7]).toEqual({value: '?', scopes: ['text.html.php', 'meta.embedded.line.php', 'punctuation.section.embedded.end.php', 'source.php']});
expect(tokens[8]).toEqual({value: '>', scopes: ['text.html.php', 'meta.embedded.line.php', 'punctuation.section.embedded.end.php']});
});
it('tokenizes namespaces immediately following <?php', () => {
const {tokens} = grammar.tokenizeLine('<?php namespace Test;');
expect(tokens[1]).toEqual({value: ' ', scopes: ['text.html.php', 'meta.embedded.block.php', 'source.php', 'meta.namespace.php']});
expect(tokens[2]).toEqual({value: 'namespace', scopes: ['text.html.php', 'meta.embedded.block.php', 'source.php', 'meta.namespace.php', 'keyword.other.namespace.php']});
expect(tokens[3]).toEqual({value: ' ', scopes: ['text.html.php', 'meta.embedded.block.php', 'source.php', 'meta.namespace.php']});
expect(tokens[4]).toEqual({value: 'Test', scopes: ['text.html.php', 'meta.embedded.block.php', 'source.php', 'meta.namespace.php', 'entity.name.type.namespace.php']});
expect(tokens[5]).toEqual({value: ';', scopes: ['text.html.php', 'meta.embedded.block.php', 'source.php', 'punctuation.terminator.expression.php']});
});
it('does not tokenize PHP tag syntax within PHP syntax itself inside HTML script tags (regression)', () => {
const lines = grammar.tokenizeLines(`\
<script>
<?php
/*
?>
<?php
*/
?>
</script>\
`
);
expect(lines[0][0]).toEqual({value: '<', scopes: ['text.html.php', 'meta.tag.script.html', 'punctuation.definition.tag.html']});
expect(lines[0][1]).toEqual({value: 'script', scopes: ['text.html.php', 'meta.tag.script.html', 'entity.name.tag.script.html']});
expect(lines[0][2]).toEqual({value: '>', scopes: ['text.html.php', 'meta.tag.script.html', 'punctuation.definition.tag.html']});
expect(lines[1][0]).toEqual({value: '<?php', scopes: ['text.html.php', 'meta.tag.script.html', 'source.js.embedded.html', 'meta.embedded.block.php', 'punctuation.section.embedded.begin.php']});
expect(lines[2][0]).toEqual({value: ' ', scopes: ['text.html.php', 'meta.tag.script.html', 'source.js.embedded.html', 'meta.embedded.block.php', 'source.php']});
expect(lines[2][1]).toEqual({value: '/*', scopes: ['text.html.php', 'meta.tag.script.html', 'source.js.embedded.html', 'meta.embedded.block.php', 'source.php', 'comment.block.php', 'punctuation.definition.comment.php']});
expect(lines[3][0]).toEqual({value: ' ?>', scopes: ['text.html.php', 'meta.tag.script.html', 'source.js.embedded.html', 'meta.embedded.block.php', 'source.php', 'comment.block.php']});
expect(lines[4][0]).toEqual({value: ' <?php', scopes: ['text.html.php', 'meta.tag.script.html', 'source.js.embedded.html', 'meta.embedded.block.php', 'source.php', 'comment.block.php']});
expect(lines[5][0]).toEqual({value: ' ', scopes: ['text.html.php', 'meta.tag.script.html', 'source.js.embedded.html', 'meta.embedded.block.php', 'source.php', 'comment.block.php']});
expect(lines[5][1]).toEqual({value: '*/', scopes: ['text.html.php', 'meta.tag.script.html', 'source.js.embedded.html', 'meta.embedded.block.php', 'source.php', 'comment.block.php', 'punctuation.definition.comment.php']});
expect(lines[6][0]).toEqual({value: '?', scopes: ['text.html.php', 'meta.tag.script.html', 'source.js.embedded.html', 'meta.embedded.block.php', 'punctuation.section.embedded.end.php', 'source.php']});
expect(lines[6][1]).toEqual({value: '>', scopes: ['text.html.php', 'meta.tag.script.html', 'source.js.embedded.html', 'meta.embedded.block.php', 'punctuation.section.embedded.end.php']});
expect(lines[7][0]).toEqual({value: '</', scopes: ['text.html.php', 'meta.tag.script.html', 'punctuation.definition.tag.html']});
expect(lines[7][1]).toEqual({value: 'script', scopes: ['text.html.php', 'meta.tag.script.html', 'entity.name.tag.script.html']});
expect(lines[7][2]).toEqual({value: '>', scopes: ['text.html.php', 'meta.tag.script.html', 'punctuation.definition.tag.html']});
});
it('does not tokenize PHP tag syntax within PHP syntax itself inside HTML attributes (regression)', () => {
const {tokens} = grammar.tokenizeLine('<img src="<?php /* ?> <?php */ ?>" />');
expect(tokens[0]).toEqual({value: '<', scopes: ['text.html.php', 'meta.tag.inline.img.html', 'punctuation.definition.tag.begin.html']});
expect(tokens[1]).toEqual({value: 'img', scopes: ['text.html.php', 'meta.tag.inline.img.html', 'entity.name.tag.inline.img.html']});
expect(tokens[2]).toEqual({value: ' ', scopes: ['text.html.php', 'meta.tag.inline.img.html']});
expect(tokens[3]).toEqual({value: 'src', scopes: ['text.html.php', 'meta.tag.inline.img.html', 'meta.attribute-with-value.html', 'entity.other.attribute-name.html']});
expect(tokens[4]).toEqual({value: '=', scopes: ['text.html.php', 'meta.tag.inline.img.html', 'meta.attribute-with-value.html', 'punctuation.separator.key-value.html']});
expect(tokens[5]).toEqual({value: '"', scopes: ['text.html.php', 'meta.tag.inline.img.html', 'meta.attribute-with-value.html', 'string.quoted.double.html', 'punctuation.definition.string.begin.html']});
expect(tokens[6]).toEqual({value: '<?php', scopes: ['text.html.php', 'meta.tag.inline.img.html', 'meta.attribute-with-value.html', 'string.quoted.double.html', 'meta.embedded.line.php', 'punctuation.section.embedded.begin.php']});
expect(tokens[7]).toEqual({value: ' ', scopes: ['text.html.php', 'meta.tag.inline.img.html', 'meta.attribute-with-value.html', 'string.quoted.double.html', 'meta.embedded.line.php', 'source.php']});
expect(tokens[8]).toEqual({value: '/*', scopes: ['text.html.php', 'meta.tag.inline.img.html', 'meta.attribute-with-value.html', 'string.quoted.double.html', 'meta.embedded.line.php', 'source.php', 'comment.block.php', 'punctuation.definition.comment.php']});
expect(tokens[9]).toEqual({value: ' ?> <?php ', scopes: ['text.html.php', 'meta.tag.inline.img.html', 'meta.attribute-with-value.html', 'string.quoted.double.html', 'meta.embedded.line.php', 'source.php', 'comment.block.php']});
expect(tokens[10]).toEqual({value: '*/', scopes: ['text.html.php', 'meta.tag.inline.img.html', 'meta.attribute-with-value.html', 'string.quoted.double.html', 'meta.embedded.line.php', 'source.php', 'comment.block.php', 'punctuation.definition.comment.php']});
expect(tokens[11]).toEqual({value: ' ', scopes: ['text.html.php', 'meta.tag.inline.img.html', 'meta.attribute-with-value.html', 'string.quoted.double.html', 'meta.embedded.line.php', 'source.php']});
expect(tokens[12]).toEqual({value: '?', scopes: ['text.html.php', 'meta.tag.inline.img.html', 'meta.attribute-with-value.html', 'string.quoted.double.html', 'meta.embedded.line.php', 'punctuation.section.embedded.end.php', 'source.php']});
expect(tokens[13]).toEqual({value: '>', scopes: ['text.html.php', 'meta.tag.inline.img.html', 'meta.attribute-with-value.html', 'string.quoted.double.html', 'meta.embedded.line.php', 'punctuation.section.embedded.end.php']});
expect(tokens[14]).toEqual({value: '"', scopes: ['text.html.php', 'meta.tag.inline.img.html', 'meta.attribute-with-value.html', 'string.quoted.double.html', 'punctuation.definition.string.end.html']});
expect(tokens[15]).toEqual({value: ' />', scopes: ['text.html.php', 'meta.tag.inline.img.html', 'punctuation.definition.tag.end.html']});
});
});
describe('shebang', () => {
it('recognises shebang on the first line of document', () => {
const lines = grammar.tokenizeLines(`\
#!/usr/bin/env php
<?php echo "test"; ?>\
`
);
expect(lines[0][0]).toEqual({value: '#!', scopes: ['text.html.php', 'comment.line.shebang.php', 'punctuation.definition.comment.php']});
expect(lines[0][1]).toEqual({value: '/usr/bin/env php', scopes: ['text.html.php', 'comment.line.shebang.php']});
expect(lines[1][0]).toEqual({value: '<?php', scopes: ['text.html.php', 'meta.embedded.line.php', 'punctuation.section.embedded.begin.php']});
});
it('does not recognize shebang on any of the other lines', () => {
const lines = grammar.tokenizeLines(`\
#!/usr/bin/env php
<?php echo "test"; ?>\
`
);
expect(lines[1][0]).toEqual({value: '#!/usr/bin/env php', scopes: ['text.html.php']});
});
});
describe('firstLineMatch', () => {
it('recognises opening PHP tags', () => {
const valid = `\
<?php
<?PHP
<?=
<?
<? echo "test";
<?="test"
<?php namespace foo;\
`;
for (let line of valid.split(/\n/)) {
expect(grammar.firstLineRegex.findNextMatchSync(line)).not.toBeNull();
}
// Do not allow matching XML declaration until the grammar scoring system takes into account
// the length of the first line match so that longer matches get the priority over shorter matches.
expect(grammar.firstLineRegex.findNextMatchSync('<?xml version="1.0" encoding="UTF-8"?>')).toBeNull();
});
it('recognises interpreter directives', () => {
let line;
const valid = `\
#!/usr/bin/php
#!/usr/bin/php foo=bar/
#!/usr/sbin/php5
#!/usr/sbin/php7 foo bar baz
#!/usr/bin/php perl
#!/usr/bin/php4 bin/perl
#!/usr/bin/env php
#!/bin/php
#!/usr/bin/php --script=usr/bin
#! /usr/bin/env A=003 B=149 C=150 D=xzd E=base64 F=tar G=gz H=head I=tail php
#!\t/usr/bin/env --foo=bar php --quu=quux
#! /usr/bin/php
#!/usr/bin/env php\
`;
for (line of valid.split(/\n/)) {
expect(grammar.firstLineRegex.findNextMatchSync(line)).not.toBeNull();
}
const invalid = `\
\x20#!/usr/sbin/php
\t#!/usr/sbin/php
#!/usr/bin/env-php/node-env/
#!/usr/bin/env-php
#! /usr/binphp
#!/usr/bin.php
#!\t/usr/bin/env --php=bar\
`;
return (() => {
const result = [];
for (line of invalid.split(/\n/)) {
result.push(expect(grammar.firstLineRegex.findNextMatchSync(line)).toBeNull());
}
return result;
})();
});
it('recognises Emacs modelines', () => {
let line;
const valid = `\
#-*- PHP -*-
#-*- mode: PHP -*-
/* -*-php-*- */
// -*- PHP -*-
/* -*- mode:PHP -*- */
// -*- font:bar;mode:pHp -*-
// -*- font:bar;mode:PHP;foo:bar; -*-
// -*-font:mode;mode:php-*-
// -*- foo:bar mode: php bar:baz -*-
"-*-foo:bar;mode:php;bar:foo-*- ";
"-*-font-mode:foo;mode:php;foo-bar:quux-*-"
"-*-font:x;foo:bar; mode : PHP; bar:foo;foooooo:baaaaar;fo:ba;-*-";
"-*- font:x;foo : bar ; mode : php ; bar : foo ; foooooo:baaaaar;fo:ba-*-";\
`;
for (line of valid.split(/\n/)) {
expect(grammar.firstLineRegex.findNextMatchSync(line)).not.toBeNull();
}
const invalid = `\
/* --*php-*- */
/* -*-- php -*-
/* -*- -- PHP -*-
/* -*- PHP -;- -*-
// -*- PHPetrol -*-
// -*- PHP; -*-
// -*- php-stuff -*-
/* -*- model:php -*-
/* -*- indent-mode:php -*-
// -*- font:mode;php -*-
// -*- mode: -*- php
// -*- mode: stop-using-php -*-
// -*-font:mode;mode:php--*-\
`;
return (() => {
const result = [];
for (line of invalid.split(/\n/)) {
result.push(expect(grammar.firstLineRegex.findNextMatchSync(line)).toBeNull());
}
return result;
})();
});
it('recognises Vim modelines', () => {
let line;
const valid = `\
vim: se filetype=php:
# vim: se ft=php:
# vim: set ft=PHP:
# vim: set filetype=PHP:
# vim: ft=PHTML
# vim: syntax=phtml
# vim: se syntax=php:
# ex: syntax=PHP
# vim:ft=php
# vim600: ft=php
# vim>600: set ft=PHP:
# vi:noai:sw=3 ts=6 ft=phtml
# vi::::::::::noai:::::::::::: ft=phtml
# vim:ts=4:sts=4:sw=4:noexpandtab:ft=phtml
# vi:: noai : : : : sw =3 ts =6 ft =php
# vim: ts=4: pi sts=4: ft=php: noexpandtab: sw=4:
# vim: ts=4 sts=4: ft=php noexpandtab:
# vim:noexpandtab sts=4 ft=php ts=4
# vim:noexpandtab:ft=php
# vim:ts=4:sts=4 ft=phtml:noexpandtab:\x20
# vim:noexpandtab titlestring=hi\|there\\\\ ft=phtml ts=4\
`;
for (line of valid.split(/\n/)) {
expect(grammar.firstLineRegex.findNextMatchSync(line)).not.toBeNull();
}
const invalid = `\
ex: se filetype=php:
_vi: se filetype=php:
vi: se filetype=php
# vim set ft=phpetrol
# vim: soft=php
# vim: clean-syntax=php:
# vim set ft=php:
# vim: setft=php:
# vim: se ft=php backupdir=tmp
# vim: set ft=php set cmdheight=1
# vim:noexpandtab sts:4 ft:php ts:4
# vim:noexpandtab titlestring=hi\\|there\\ ft=php ts=4
# vim:noexpandtab titlestring=hi\\|there\\\\\\ ft=php ts=4\
`;
return (() => {
const result = [];
for (line of invalid.split(/\n/)) {
result.push(expect(grammar.firstLineRegex.findNextMatchSync(line)).toBeNull());
}
return result;
})();
});
it('should tokenize <?php use Some\\Name ?>', () => {
const lines = grammar.tokenizeLines(`\
<?php use Some\\Name ?>
<article>\
`
);
expect(lines[0][0]).toEqual({value: '<?php', scopes: ['text.html.php', 'meta.embedded.line.php', 'punctuation.section.embedded.begin.php']});
expect(lines[0][1]).toEqual({value: ' ', scopes: ['text.html.php', 'meta.embedded.line.php', 'source.php']});
expect(lines[0][2]).toEqual({value: 'use', scopes: ['text.html.php', 'meta.embedded.line.php', 'source.php', 'meta.use.php', 'keyword.other.use.php']});
expect(lines[0][3]).toEqual({value: ' ', scopes: ['text.html.php', 'meta.embedded.line.php', 'source.php', 'meta.use.php']});
expect(lines[0][4]).toEqual({value: 'Some', scopes: ['text.html.php', 'meta.embedded.line.php', 'source.php', 'meta.use.php', 'support.other.namespace.php']});
expect(lines[0][5]).toEqual({value: '\\', scopes: ['text.html.php', 'meta.embedded.line.php', 'source.php', 'meta.use.php', 'support.other.namespace.php', 'punctuation.separator.inheritance.php']});
expect(lines[0][6]).toEqual({value: 'Name', scopes: ['text.html.php', 'meta.embedded.line.php', 'source.php', 'meta.use.php', 'support.class.php']});
expect(lines[0][8]).toEqual({value: '?', scopes: ['text.html.php', 'meta.embedded.line.php', 'punctuation.section.embedded.end.php', 'source.php']});
expect(lines[0][9]).toEqual({value: '>', scopes: ['text.html.php', 'meta.embedded.line.php', 'punctuation.section.embedded.end.php']});
});
});
});

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@ -1,84 +1,89 @@
describe 'Python settings', ->
[editor, languageMode] = []
afterEach ->
editor.destroy()
describe('Python settings', () => {
let [editor, languageMode] = [];
beforeEach ->
atom.config.set 'core.useTreeSitterParsers', false
afterEach(() => editor.destroy());
beforeEach(() => {
atom.config.set('core.useTreeSitterParsers', false);
waitsForPromise ->
atom.workspace.open().then (o) ->
editor = o
languageMode = editor.languageMode
waitsForPromise(() => atom.workspace.open().then(function(o) {
editor = o;
languageMode = editor.languageMode;
}));
waitsForPromise ->
atom.packages.activatePackage('language-python')
waitsForPromise(() => atom.packages.activatePackage('language-python'));
});
it 'matches lines correctly using the increaseIndentPattern', ->
increaseIndentRegex = languageMode.increaseIndentRegexForScopeDescriptor(['source.python'])
it('matches lines correctly using the increaseIndentPattern', () => {
const increaseIndentRegex = languageMode.increaseIndentRegexForScopeDescriptor(['source.python']);
expect(increaseIndentRegex.findNextMatchSync('for i in range(n):')).toBeTruthy()
expect(increaseIndentRegex.findNextMatchSync(' for i in range(n):')).toBeTruthy()
expect(increaseIndentRegex.findNextMatchSync('async for i in range(n):')).toBeTruthy()
expect(increaseIndentRegex.findNextMatchSync(' async for i in range(n):')).toBeTruthy()
expect(increaseIndentRegex.findNextMatchSync('class TheClass(Object):')).toBeTruthy()
expect(increaseIndentRegex.findNextMatchSync(' class TheClass(Object):')).toBeTruthy()
expect(increaseIndentRegex.findNextMatchSync('def f(x):')).toBeTruthy()
expect(increaseIndentRegex.findNextMatchSync(' def f(x):')).toBeTruthy()
expect(increaseIndentRegex.findNextMatchSync('async def f(x):')).toBeTruthy()
expect(increaseIndentRegex.findNextMatchSync(' async def f(x):')).toBeTruthy()
expect(increaseIndentRegex.findNextMatchSync('if this_var == that_var:')).toBeTruthy()
expect(increaseIndentRegex.findNextMatchSync(' if this_var == that_var:')).toBeTruthy()
expect(increaseIndentRegex.findNextMatchSync('elif this_var == that_var:')).toBeTruthy()
expect(increaseIndentRegex.findNextMatchSync(' elif this_var == that_var:')).toBeTruthy()
expect(increaseIndentRegex.findNextMatchSync('else:')).toBeTruthy()
expect(increaseIndentRegex.findNextMatchSync(' else:')).toBeTruthy()
expect(increaseIndentRegex.findNextMatchSync('except Exception:')).toBeTruthy()
expect(increaseIndentRegex.findNextMatchSync(' except Exception:')).toBeTruthy()
expect(increaseIndentRegex.findNextMatchSync('except Exception as e:')).toBeTruthy()
expect(increaseIndentRegex.findNextMatchSync(' except Exception as e:')).toBeTruthy()
expect(increaseIndentRegex.findNextMatchSync('finally:')).toBeTruthy()
expect(increaseIndentRegex.findNextMatchSync(' finally:')).toBeTruthy()
expect(increaseIndentRegex.findNextMatchSync('with open("filename") as f:')).toBeTruthy()
expect(increaseIndentRegex.findNextMatchSync(' with open("filename") as f:')).toBeTruthy()
expect(increaseIndentRegex.findNextMatchSync('async with open("filename") as f:')).toBeTruthy()
expect(increaseIndentRegex.findNextMatchSync(' async with open("filename") as f:')).toBeTruthy()
expect(increaseIndentRegex.findNextMatchSync('while True:')).toBeTruthy()
expect(increaseIndentRegex.findNextMatchSync(' while True:')).toBeTruthy()
expect(increaseIndentRegex.findNextMatchSync('\t\t while True:')).toBeTruthy()
expect(increaseIndentRegex.findNextMatchSync('for i in range(n):')).toBeTruthy();
expect(increaseIndentRegex.findNextMatchSync(' for i in range(n):')).toBeTruthy();
expect(increaseIndentRegex.findNextMatchSync('async for i in range(n):')).toBeTruthy();
expect(increaseIndentRegex.findNextMatchSync(' async for i in range(n):')).toBeTruthy();
expect(increaseIndentRegex.findNextMatchSync('class TheClass(Object):')).toBeTruthy();
expect(increaseIndentRegex.findNextMatchSync(' class TheClass(Object):')).toBeTruthy();
expect(increaseIndentRegex.findNextMatchSync('def f(x):')).toBeTruthy();
expect(increaseIndentRegex.findNextMatchSync(' def f(x):')).toBeTruthy();
expect(increaseIndentRegex.findNextMatchSync('async def f(x):')).toBeTruthy();
expect(increaseIndentRegex.findNextMatchSync(' async def f(x):')).toBeTruthy();
expect(increaseIndentRegex.findNextMatchSync('if this_var == that_var:')).toBeTruthy();
expect(increaseIndentRegex.findNextMatchSync(' if this_var == that_var:')).toBeTruthy();
expect(increaseIndentRegex.findNextMatchSync('elif this_var == that_var:')).toBeTruthy();
expect(increaseIndentRegex.findNextMatchSync(' elif this_var == that_var:')).toBeTruthy();
expect(increaseIndentRegex.findNextMatchSync('else:')).toBeTruthy();
expect(increaseIndentRegex.findNextMatchSync(' else:')).toBeTruthy();
expect(increaseIndentRegex.findNextMatchSync('except Exception:')).toBeTruthy();
expect(increaseIndentRegex.findNextMatchSync(' except Exception:')).toBeTruthy();
expect(increaseIndentRegex.findNextMatchSync('except Exception as e:')).toBeTruthy();
expect(increaseIndentRegex.findNextMatchSync(' except Exception as e:')).toBeTruthy();
expect(increaseIndentRegex.findNextMatchSync('finally:')).toBeTruthy();
expect(increaseIndentRegex.findNextMatchSync(' finally:')).toBeTruthy();
expect(increaseIndentRegex.findNextMatchSync('with open("filename") as f:')).toBeTruthy();
expect(increaseIndentRegex.findNextMatchSync(' with open("filename") as f:')).toBeTruthy();
expect(increaseIndentRegex.findNextMatchSync('async with open("filename") as f:')).toBeTruthy();
expect(increaseIndentRegex.findNextMatchSync(' async with open("filename") as f:')).toBeTruthy();
expect(increaseIndentRegex.findNextMatchSync('while True:')).toBeTruthy();
expect(increaseIndentRegex.findNextMatchSync(' while True:')).toBeTruthy();
expect(increaseIndentRegex.findNextMatchSync('\t\t while True:')).toBeTruthy();
});
it 'does not match lines incorrectly using the increaseIndentPattern', ->
increaseIndentRegex = languageMode.increaseIndentRegexForScopeDescriptor(['source.python'])
it('does not match lines incorrectly using the increaseIndentPattern', () => {
const increaseIndentRegex = languageMode.increaseIndentRegexForScopeDescriptor(['source.python']);
expect(increaseIndentRegex.findNextMatchSync('for i in range(n)')).toBeFalsy()
expect(increaseIndentRegex.findNextMatchSync('class TheClass(Object)')).toBeFalsy()
expect(increaseIndentRegex.findNextMatchSync('def f(x)')).toBeFalsy()
expect(increaseIndentRegex.findNextMatchSync('if this_var == that_var')).toBeFalsy()
expect(increaseIndentRegex.findNextMatchSync('"for i in range(n):"')).toBeFalsy()
expect(increaseIndentRegex.findNextMatchSync('for i in range(n)')).toBeFalsy();
expect(increaseIndentRegex.findNextMatchSync('class TheClass(Object)')).toBeFalsy();
expect(increaseIndentRegex.findNextMatchSync('def f(x)')).toBeFalsy();
expect(increaseIndentRegex.findNextMatchSync('if this_var == that_var')).toBeFalsy();
expect(increaseIndentRegex.findNextMatchSync('"for i in range(n):"')).toBeFalsy();
});
it 'matches lines correctly using the decreaseIndentPattern', ->
decreaseIndentRegex = languageMode.decreaseIndentRegexForScopeDescriptor(['source.python'])
it('matches lines correctly using the decreaseIndentPattern', () => {
const decreaseIndentRegex = languageMode.decreaseIndentRegexForScopeDescriptor(['source.python']);
expect(decreaseIndentRegex.findNextMatchSync('elif this_var == that_var:')).toBeTruthy()
expect(decreaseIndentRegex.findNextMatchSync(' elif this_var == that_var:')).toBeTruthy()
expect(decreaseIndentRegex.findNextMatchSync('else:')).toBeTruthy()
expect(decreaseIndentRegex.findNextMatchSync(' else:')).toBeTruthy()
expect(decreaseIndentRegex.findNextMatchSync('except Exception:')).toBeTruthy()
expect(decreaseIndentRegex.findNextMatchSync(' except Exception:')).toBeTruthy()
expect(decreaseIndentRegex.findNextMatchSync('except Exception as e:')).toBeTruthy()
expect(decreaseIndentRegex.findNextMatchSync(' except Exception as e:')).toBeTruthy()
expect(decreaseIndentRegex.findNextMatchSync('finally:')).toBeTruthy()
expect(decreaseIndentRegex.findNextMatchSync(' finally:')).toBeTruthy()
expect(decreaseIndentRegex.findNextMatchSync('\t\t finally:')).toBeTruthy()
expect(decreaseIndentRegex.findNextMatchSync('elif this_var == that_var:')).toBeTruthy();
expect(decreaseIndentRegex.findNextMatchSync(' elif this_var == that_var:')).toBeTruthy();
expect(decreaseIndentRegex.findNextMatchSync('else:')).toBeTruthy();
expect(decreaseIndentRegex.findNextMatchSync(' else:')).toBeTruthy();
expect(decreaseIndentRegex.findNextMatchSync('except Exception:')).toBeTruthy();
expect(decreaseIndentRegex.findNextMatchSync(' except Exception:')).toBeTruthy();
expect(decreaseIndentRegex.findNextMatchSync('except Exception as e:')).toBeTruthy();
expect(decreaseIndentRegex.findNextMatchSync(' except Exception as e:')).toBeTruthy();
expect(decreaseIndentRegex.findNextMatchSync('finally:')).toBeTruthy();
expect(decreaseIndentRegex.findNextMatchSync(' finally:')).toBeTruthy();
expect(decreaseIndentRegex.findNextMatchSync('\t\t finally:')).toBeTruthy();
});
it 'does not match lines incorrectly using the decreaseIndentPattern', ->
decreaseIndentRegex = languageMode.decreaseIndentRegexForScopeDescriptor(['source.python'])
it('does not match lines incorrectly using the decreaseIndentPattern', () => {
const decreaseIndentRegex = languageMode.decreaseIndentRegexForScopeDescriptor(['source.python']);
# NOTE! This first one is different from most other rote tests here.
expect(decreaseIndentRegex.findNextMatchSync('else: expression()')).toBeFalsy()
expect(decreaseIndentRegex.findNextMatchSync('elif this_var == that_var')).toBeFalsy()
expect(decreaseIndentRegex.findNextMatchSync(' elif this_var == that_var')).toBeFalsy()
expect(decreaseIndentRegex.findNextMatchSync('else')).toBeFalsy()
expect(decreaseIndentRegex.findNextMatchSync(' "finally:"')).toBeFalsy()
// NOTE! This first one is different from most other rote tests here.
expect(decreaseIndentRegex.findNextMatchSync('else: expression()')).toBeFalsy();
expect(decreaseIndentRegex.findNextMatchSync('elif this_var == that_var')).toBeFalsy();
expect(decreaseIndentRegex.findNextMatchSync(' elif this_var == that_var')).toBeFalsy();
expect(decreaseIndentRegex.findNextMatchSync('else')).toBeFalsy();
expect(decreaseIndentRegex.findNextMatchSync(' "finally:"')).toBeFalsy();
});
});

View File

@ -1,53 +0,0 @@
describe 'Python regular expression grammar', ->
grammar = null
beforeEach ->
atom.config.set 'core.useTreeSitterParsers', false
waitsForPromise ->
atom.packages.activatePackage('language-python')
runs ->
grammar = atom.grammars.grammarForScopeName('source.regexp.python')
describe 'character classes', ->
it 'does not recursively match character classes', ->
{tokens} = grammar.tokenizeLine '[.:[\\]@]'
expect(tokens[0]).toEqual value: '[', scopes: ['source.regexp.python', 'constant.other.character-class.set.regexp', 'punctuation.definition.character-class.begin.regexp']
expect(tokens[1]).toEqual value: '.:[', scopes: ['source.regexp.python', 'constant.other.character-class.set.regexp']
expect(tokens[2]).toEqual value: '\\]', scopes: ['source.regexp.python', 'constant.other.character-class.set.regexp', 'constant.character.escape.backslash.regexp']
expect(tokens[3]).toEqual value: '@', scopes: ['source.regexp.python', 'constant.other.character-class.set.regexp']
expect(tokens[4]).toEqual value: ']', scopes: ['source.regexp.python', 'constant.other.character-class.set.regexp', 'punctuation.definition.character-class.end.regexp']
it 'does not end the character class early if the first character is a ]', ->
{tokens} = grammar.tokenizeLine '[][]'
expect(tokens[0]).toEqual value: '[', scopes: ['source.regexp.python', 'constant.other.character-class.set.regexp', 'punctuation.definition.character-class.begin.regexp']
expect(tokens[1]).toEqual value: '][', scopes: ['source.regexp.python', 'constant.other.character-class.set.regexp']
expect(tokens[2]).toEqual value: ']', scopes: ['source.regexp.python', 'constant.other.character-class.set.regexp', 'punctuation.definition.character-class.end.regexp']
{tokens} = grammar.tokenizeLine '[^][]'
expect(tokens[0]).toEqual value: '[', scopes: ['source.regexp.python', 'constant.other.character-class.set.regexp', 'punctuation.definition.character-class.begin.regexp']
expect(tokens[1]).toEqual value: '^', scopes: ['source.regexp.python', 'constant.other.character-class.set.regexp', 'keyword.operator.negation.regexp']
expect(tokens[2]).toEqual value: '][', scopes: ['source.regexp.python', 'constant.other.character-class.set.regexp']
expect(tokens[3]).toEqual value: ']', scopes: ['source.regexp.python', 'constant.other.character-class.set.regexp', 'punctuation.definition.character-class.end.regexp']
it 'escapes the character following any backslash', ->
{tokens} = grammar.tokenizeLine '''\\q\\(\\[\\'\\"\\?\\^\\-\\*\\.\\#'''
expect(tokens[0]).toEqual value: '\\q', scopes: ['source.regexp.python', 'constant.character.escape.backslash.regexp']
expect(tokens[1]).toEqual value: '\\(', scopes: ['source.regexp.python', 'constant.character.escape.backslash.regexp']
expect(tokens[2]).toEqual value: '\\[', scopes: ['source.regexp.python', 'constant.character.escape.backslash.regexp']
expect(tokens[3]).toEqual value: '\\\'', scopes: ['source.regexp.python', 'constant.character.escape.backslash.regexp']
expect(tokens[4]).toEqual value: '\\"', scopes: ['source.regexp.python', 'constant.character.escape.backslash.regexp']
expect(tokens[5]).toEqual value: '\\?', scopes: ['source.regexp.python', 'constant.character.escape.backslash.regexp']
expect(tokens[6]).toEqual value: '\\^', scopes: ['source.regexp.python', 'constant.character.escape.backslash.regexp']
expect(tokens[7]).toEqual value: '\\-', scopes: ['source.regexp.python', 'constant.character.escape.backslash.regexp']
expect(tokens[8]).toEqual value: '\\*', scopes: ['source.regexp.python', 'constant.character.escape.backslash.regexp']
expect(tokens[9]).toEqual value: '\\.', scopes: ['source.regexp.python', 'constant.character.escape.backslash.regexp']
expect(tokens[10]).toEqual value: '\\#', scopes: ['source.regexp.python', 'constant.character.escape.backslash.regexp']
{tokens} = grammar.tokenizeLine '''(\\()\\)'''
expect(tokens[0]).toEqual value: '(', scopes: ['source.regexp.python', 'meta.group.regexp', 'punctuation.definition.group.regexp']
expect(tokens[1]).toEqual value: '\\(', scopes: ['source.regexp.python', 'meta.group.regexp', 'constant.character.escape.backslash.regexp']
expect(tokens[2]).toEqual value: ')', scopes: ['source.regexp.python', 'meta.group.regexp', 'punctuation.definition.group.regexp']
expect(tokens[3]).toEqual value: '\\)', scopes: ['source.regexp.python', 'constant.character.escape.backslash.regexp']

View File

@ -0,0 +1,58 @@
describe('Python regular expression grammar', () => {
let grammar = null;
beforeEach(() => {
atom.config.set('core.useTreeSitterParsers', false);
waitsForPromise(() => atom.packages.activatePackage('language-python'));
runs(() => grammar = atom.grammars.grammarForScopeName('source.regexp.python'));
});
describe('character classes', () => {
it('does not recursively match character classes', () => {
const {tokens} = grammar.tokenizeLine('[.:[\\]@]');
expect(tokens[0]).toEqual({value: '[', scopes: ['source.regexp.python', 'constant.other.character-class.set.regexp', 'punctuation.definition.character-class.begin.regexp']});
expect(tokens[1]).toEqual({value: '.:[', scopes: ['source.regexp.python', 'constant.other.character-class.set.regexp']});
expect(tokens[2]).toEqual({value: '\\]', scopes: ['source.regexp.python', 'constant.other.character-class.set.regexp', 'constant.character.escape.backslash.regexp']});
expect(tokens[3]).toEqual({value: '@', scopes: ['source.regexp.python', 'constant.other.character-class.set.regexp']});
expect(tokens[4]).toEqual({value: ']', scopes: ['source.regexp.python', 'constant.other.character-class.set.regexp', 'punctuation.definition.character-class.end.regexp']});
});
it('does not end the character class early if the first character is a ]', () => {
let {tokens} = grammar.tokenizeLine('[][]');
expect(tokens[0]).toEqual({value: '[', scopes: ['source.regexp.python', 'constant.other.character-class.set.regexp', 'punctuation.definition.character-class.begin.regexp']});
expect(tokens[1]).toEqual({value: '][', scopes: ['source.regexp.python', 'constant.other.character-class.set.regexp']});
expect(tokens[2]).toEqual({value: ']', scopes: ['source.regexp.python', 'constant.other.character-class.set.regexp', 'punctuation.definition.character-class.end.regexp']});
({tokens} = grammar.tokenizeLine('[^][]'));
expect(tokens[0]).toEqual({value: '[', scopes: ['source.regexp.python', 'constant.other.character-class.set.regexp', 'punctuation.definition.character-class.begin.regexp']});
expect(tokens[1]).toEqual({value: '^', scopes: ['source.regexp.python', 'constant.other.character-class.set.regexp', 'keyword.operator.negation.regexp']});
expect(tokens[2]).toEqual({value: '][', scopes: ['source.regexp.python', 'constant.other.character-class.set.regexp']});
expect(tokens[3]).toEqual({value: ']', scopes: ['source.regexp.python', 'constant.other.character-class.set.regexp', 'punctuation.definition.character-class.end.regexp']});
});
it('escapes the character following any backslash', () => {
let {tokens} = grammar.tokenizeLine('\\q\\(\\[\\\'\\"\\?\\^\\-\\*\\.\\#');
expect(tokens[0]).toEqual({value: '\\q', scopes: ['source.regexp.python', 'constant.character.escape.backslash.regexp']});
expect(tokens[1]).toEqual({value: '\\(', scopes: ['source.regexp.python', 'constant.character.escape.backslash.regexp']});
expect(tokens[2]).toEqual({value: '\\[', scopes: ['source.regexp.python', 'constant.character.escape.backslash.regexp']});
expect(tokens[3]).toEqual({value: '\\\'', scopes: ['source.regexp.python', 'constant.character.escape.backslash.regexp']});
expect(tokens[4]).toEqual({value: '\\"', scopes: ['source.regexp.python', 'constant.character.escape.backslash.regexp']});
expect(tokens[5]).toEqual({value: '\\?', scopes: ['source.regexp.python', 'constant.character.escape.backslash.regexp']});
expect(tokens[6]).toEqual({value: '\\^', scopes: ['source.regexp.python', 'constant.character.escape.backslash.regexp']});
expect(tokens[7]).toEqual({value: '\\-', scopes: ['source.regexp.python', 'constant.character.escape.backslash.regexp']});
expect(tokens[8]).toEqual({value: '\\*', scopes: ['source.regexp.python', 'constant.character.escape.backslash.regexp']});
expect(tokens[9]).toEqual({value: '\\.', scopes: ['source.regexp.python', 'constant.character.escape.backslash.regexp']});
expect(tokens[10]).toEqual({value: '\\#', scopes: ['source.regexp.python', 'constant.character.escape.backslash.regexp']});
({tokens} = grammar.tokenizeLine('(\\()\\)'));
expect(tokens[0]).toEqual({value: '(', scopes: ['source.regexp.python', 'meta.group.regexp', 'punctuation.definition.group.regexp']});
expect(tokens[1]).toEqual({value: '\\(', scopes: ['source.regexp.python', 'meta.group.regexp', 'constant.character.escape.backslash.regexp']});
expect(tokens[2]).toEqual({value: ')', scopes: ['source.regexp.python', 'meta.group.regexp', 'punctuation.definition.group.regexp']});
expect(tokens[3]).toEqual({value: '\\)', scopes: ['source.regexp.python', 'constant.character.escape.backslash.regexp']});
});
});
});

View File

@ -1,760 +0,0 @@
path = require 'path'
grammarTest = require 'atom-grammar-test'
describe "Python grammar", ->
grammar = null
beforeEach ->
atom.config.set 'core.useTreeSitterParsers', false
waitsForPromise ->
atom.packages.activatePackage("language-python")
runs ->
grammar = atom.grammars.grammarForScopeName("source.python")
it "recognises shebang on firstline", ->
expect(grammar.firstLineRegex.findNextMatchSync("#!/usr/bin/env python")).not.toBeNull()
expect(grammar.firstLineRegex.findNextMatchSync("#! /usr/bin/env python")).not.toBeNull()
it "parses the grammar", ->
expect(grammar).toBeDefined()
expect(grammar.scopeName).toBe "source.python"
it "tokenizes `yield`", ->
{tokens} = grammar.tokenizeLine 'yield v'
expect(tokens[0]).toEqual value: 'yield', scopes: ['source.python', 'keyword.control.statement.python']
it "tokenizes `yield from`", ->
{tokens} = grammar.tokenizeLine 'yield from v'
expect(tokens[0]).toEqual value: 'yield from', scopes: ['source.python', 'keyword.control.statement.python']
it "tokenizes multi-line strings", ->
tokens = grammar.tokenizeLines('"1\\\n2"')
# Line 0
expect(tokens[0][0].value).toBe '"'
expect(tokens[0][0].scopes).toEqual ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.begin.python']
expect(tokens[0][1].value).toBe '1'
expect(tokens[0][1].scopes).toEqual ['source.python', 'string.quoted.double.single-line.python']
expect(tokens[0][2].value).toBe '\\'
expect(tokens[0][2].scopes).toEqual ['source.python', 'string.quoted.double.single-line.python', 'constant.character.escape.newline.python']
expect(tokens[0][3]).not.toBeDefined()
# Line 1
expect(tokens[1][0].value).toBe '2'
expect(tokens[1][0].scopes).toEqual ['source.python', 'string.quoted.double.single-line.python']
expect(tokens[1][1].value).toBe '"'
expect(tokens[1][1].scopes).toEqual ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.end.python']
expect(tokens[1][2]).not.toBeDefined()
it "terminates a single-quoted raw string containing opening parenthesis at closing quote", ->
tokens = grammar.tokenizeLines("r'%d(' #foo")
expect(tokens[0][0].value).toBe 'r'
expect(tokens[0][0].scopes).toEqual ['source.python', 'string.quoted.single.single-line.raw-regex.python', 'storage.type.string.python']
expect(tokens[0][1].value).toBe "'"
expect(tokens[0][1].scopes).toEqual ['source.python', 'string.quoted.single.single-line.raw-regex.python', 'punctuation.definition.string.begin.python']
expect(tokens[0][2].value).toBe '%d'
expect(tokens[0][2].scopes).toEqual ['source.python', 'string.quoted.single.single-line.raw-regex.python', 'constant.other.placeholder.python']
expect(tokens[0][3].value).toBe '('
expect(tokens[0][3].scopes).toEqual ['source.python', 'string.quoted.single.single-line.raw-regex.python', 'meta.group.regexp', 'punctuation.definition.group.regexp']
expect(tokens[0][4].value).toBe "'"
expect(tokens[0][4].scopes).toEqual ['source.python', 'string.quoted.single.single-line.raw-regex.python', 'punctuation.definition.string.end.python']
expect(tokens[0][5].value).toBe ' '
expect(tokens[0][5].scopes).toEqual ['source.python']
expect(tokens[0][6].value).toBe '#'
expect(tokens[0][6].scopes).toEqual ['source.python', 'comment.line.number-sign.python', 'punctuation.definition.comment.python']
expect(tokens[0][7].value).toBe 'foo'
expect(tokens[0][7].scopes).toEqual ['source.python', 'comment.line.number-sign.python']
it "terminates a single-quoted raw string containing opening bracket at closing quote", ->
tokens = grammar.tokenizeLines("r'%d[' #foo")
expect(tokens[0][0].value).toBe 'r'
expect(tokens[0][0].scopes).toEqual ['source.python', 'string.quoted.single.single-line.raw-regex.python', 'storage.type.string.python']
expect(tokens[0][1].value).toBe "'"
expect(tokens[0][1].scopes).toEqual ['source.python', 'string.quoted.single.single-line.raw-regex.python', 'punctuation.definition.string.begin.python']
expect(tokens[0][2].value).toBe '%d'
expect(tokens[0][2].scopes).toEqual ['source.python', 'string.quoted.single.single-line.raw-regex.python', 'constant.other.placeholder.python']
expect(tokens[0][3].value).toBe '['
expect(tokens[0][3].scopes).toEqual ['source.python', 'string.quoted.single.single-line.raw-regex.python', 'constant.other.character-class.set.regexp', 'punctuation.definition.character-class.begin.regexp']
expect(tokens[0][4].value).toBe "'"
expect(tokens[0][4].scopes).toEqual ['source.python', 'string.quoted.single.single-line.raw-regex.python', 'punctuation.definition.string.end.python']
expect(tokens[0][5].value).toBe ' '
expect(tokens[0][5].scopes).toEqual ['source.python']
expect(tokens[0][6].value).toBe '#'
expect(tokens[0][6].scopes).toEqual ['source.python', 'comment.line.number-sign.python', 'punctuation.definition.comment.python']
expect(tokens[0][7].value).toBe 'foo'
expect(tokens[0][7].scopes).toEqual ['source.python', 'comment.line.number-sign.python']
it "terminates a double-quoted raw string containing opening parenthesis at closing quote", ->
tokens = grammar.tokenizeLines('r"%d(" #foo')
expect(tokens[0][0].value).toBe 'r'
expect(tokens[0][0].scopes).toEqual ['source.python', 'string.quoted.double.single-line.raw-regex.python', 'storage.type.string.python']
expect(tokens[0][1].value).toBe '"'
expect(tokens[0][1].scopes).toEqual ['source.python', 'string.quoted.double.single-line.raw-regex.python', 'punctuation.definition.string.begin.python']
expect(tokens[0][2].value).toBe '%d'
expect(tokens[0][2].scopes).toEqual ['source.python', 'string.quoted.double.single-line.raw-regex.python', 'constant.other.placeholder.python']
expect(tokens[0][3].value).toBe '('
expect(tokens[0][3].scopes).toEqual ['source.python', 'string.quoted.double.single-line.raw-regex.python', 'meta.group.regexp', 'punctuation.definition.group.regexp']
expect(tokens[0][4].value).toBe '"'
expect(tokens[0][4].scopes).toEqual ['source.python', 'string.quoted.double.single-line.raw-regex.python', 'punctuation.definition.string.end.python']
expect(tokens[0][5].value).toBe ' '
expect(tokens[0][5].scopes).toEqual ['source.python']
expect(tokens[0][6].value).toBe '#'
expect(tokens[0][6].scopes).toEqual ['source.python', 'comment.line.number-sign.python', 'punctuation.definition.comment.python']
expect(tokens[0][7].value).toBe 'foo'
expect(tokens[0][7].scopes).toEqual ['source.python', 'comment.line.number-sign.python']
it "terminates a double-quoted raw string containing opening bracket at closing quote", ->
tokens = grammar.tokenizeLines('r"%d[" #foo')
expect(tokens[0][0].value).toBe 'r'
expect(tokens[0][0].scopes).toEqual ['source.python', 'string.quoted.double.single-line.raw-regex.python', 'storage.type.string.python']
expect(tokens[0][1].value).toBe '"'
expect(tokens[0][1].scopes).toEqual ['source.python', 'string.quoted.double.single-line.raw-regex.python', 'punctuation.definition.string.begin.python']
expect(tokens[0][2].value).toBe '%d'
expect(tokens[0][2].scopes).toEqual ['source.python', 'string.quoted.double.single-line.raw-regex.python', 'constant.other.placeholder.python']
expect(tokens[0][3].value).toBe '['
expect(tokens[0][3].scopes).toEqual ['source.python', 'string.quoted.double.single-line.raw-regex.python', 'constant.other.character-class.set.regexp', 'punctuation.definition.character-class.begin.regexp']
expect(tokens[0][4].value).toBe '"'
expect(tokens[0][4].scopes).toEqual ['source.python', 'string.quoted.double.single-line.raw-regex.python', 'punctuation.definition.string.end.python']
expect(tokens[0][5].value).toBe ' '
expect(tokens[0][5].scopes).toEqual ['source.python']
expect(tokens[0][6].value).toBe '#'
expect(tokens[0][6].scopes).toEqual ['source.python', 'comment.line.number-sign.python', 'punctuation.definition.comment.python']
expect(tokens[0][7].value).toBe 'foo'
expect(tokens[0][7].scopes).toEqual ['source.python', 'comment.line.number-sign.python']
it "terminates a unicode single-quoted raw string containing opening parenthesis at closing quote", ->
tokens = grammar.tokenizeLines("ur'%d(' #foo")
expect(tokens[0][0].value).toBe 'ur'
expect(tokens[0][0].scopes).toEqual ['source.python', 'string.quoted.single.single-line.unicode-raw-regex.python', 'storage.type.string.python']
expect(tokens[0][1].value).toBe "'"
expect(tokens[0][1].scopes).toEqual ['source.python', 'string.quoted.single.single-line.unicode-raw-regex.python', 'punctuation.definition.string.begin.python']
expect(tokens[0][2].value).toBe '%d'
expect(tokens[0][2].scopes).toEqual ['source.python', 'string.quoted.single.single-line.unicode-raw-regex.python', 'constant.other.placeholder.python']
expect(tokens[0][3].value).toBe '('
expect(tokens[0][3].scopes).toEqual ['source.python', 'string.quoted.single.single-line.unicode-raw-regex.python', 'meta.group.regexp', 'punctuation.definition.group.regexp']
expect(tokens[0][4].value).toBe "'"
expect(tokens[0][4].scopes).toEqual ['source.python', 'string.quoted.single.single-line.unicode-raw-regex.python', 'punctuation.definition.string.end.python']
expect(tokens[0][5].value).toBe ' '
expect(tokens[0][5].scopes).toEqual ['source.python']
expect(tokens[0][6].value).toBe '#'
expect(tokens[0][6].scopes).toEqual ['source.python', 'comment.line.number-sign.python', 'punctuation.definition.comment.python']
expect(tokens[0][7].value).toBe 'foo'
expect(tokens[0][7].scopes).toEqual ['source.python', 'comment.line.number-sign.python']
it "terminates a unicode single-quoted raw string containing opening bracket at closing quote", ->
tokens = grammar.tokenizeLines("ur'%d[' #foo")
expect(tokens[0][0].value).toBe 'ur'
expect(tokens[0][0].scopes).toEqual ['source.python', 'string.quoted.single.single-line.unicode-raw-regex.python', 'storage.type.string.python']
expect(tokens[0][1].value).toBe "'"
expect(tokens[0][1].scopes).toEqual ['source.python', 'string.quoted.single.single-line.unicode-raw-regex.python', 'punctuation.definition.string.begin.python']
expect(tokens[0][2].value).toBe '%d'
expect(tokens[0][2].scopes).toEqual ['source.python', 'string.quoted.single.single-line.unicode-raw-regex.python', 'constant.other.placeholder.python']
expect(tokens[0][3].value).toBe '['
expect(tokens[0][3].scopes).toEqual ['source.python', 'string.quoted.single.single-line.unicode-raw-regex.python', 'constant.other.character-class.set.regexp', 'punctuation.definition.character-class.begin.regexp']
expect(tokens[0][4].value).toBe "'"
expect(tokens[0][4].scopes).toEqual ['source.python', 'string.quoted.single.single-line.unicode-raw-regex.python', 'punctuation.definition.string.end.python']
expect(tokens[0][5].value).toBe ' '
expect(tokens[0][5].scopes).toEqual ['source.python']
expect(tokens[0][6].value).toBe '#'
expect(tokens[0][6].scopes).toEqual ['source.python', 'comment.line.number-sign.python', 'punctuation.definition.comment.python']
expect(tokens[0][7].value).toBe 'foo'
expect(tokens[0][7].scopes).toEqual ['source.python', 'comment.line.number-sign.python']
it "terminates a unicode double-quoted raw string containing opening parenthesis at closing quote", ->
tokens = grammar.tokenizeLines('ur"%d(" #foo')
expect(tokens[0][0].value).toBe 'ur'
expect(tokens[0][0].scopes).toEqual ['source.python', 'string.quoted.double.single-line.unicode-raw-regex.python', 'storage.type.string.python']
expect(tokens[0][1].value).toBe '"'
expect(tokens[0][1].scopes).toEqual ['source.python', 'string.quoted.double.single-line.unicode-raw-regex.python', 'punctuation.definition.string.begin.python']
expect(tokens[0][2].value).toBe '%d'
expect(tokens[0][2].scopes).toEqual ['source.python', 'string.quoted.double.single-line.unicode-raw-regex.python', 'constant.other.placeholder.python']
expect(tokens[0][3].value).toBe '('
expect(tokens[0][3].scopes).toEqual ['source.python', 'string.quoted.double.single-line.unicode-raw-regex.python', 'meta.group.regexp', 'punctuation.definition.group.regexp']
expect(tokens[0][4].value).toBe '"'
expect(tokens[0][4].scopes).toEqual ['source.python', 'string.quoted.double.single-line.unicode-raw-regex.python', 'punctuation.definition.string.end.python']
expect(tokens[0][5].value).toBe ' '
expect(tokens[0][5].scopes).toEqual ['source.python']
expect(tokens[0][6].value).toBe '#'
expect(tokens[0][6].scopes).toEqual ['source.python', 'comment.line.number-sign.python', 'punctuation.definition.comment.python']
expect(tokens[0][7].value).toBe 'foo'
expect(tokens[0][7].scopes).toEqual ['source.python', 'comment.line.number-sign.python']
it "terminates a unicode double-quoted raw string containing opening bracket at closing quote", ->
tokens = grammar.tokenizeLines('ur"%d[" #foo')
expect(tokens[0][0].value).toBe 'ur'
expect(tokens[0][0].scopes).toEqual ['source.python', 'string.quoted.double.single-line.unicode-raw-regex.python', 'storage.type.string.python']
expect(tokens[0][1].value).toBe '"'
expect(tokens[0][1].scopes).toEqual ['source.python', 'string.quoted.double.single-line.unicode-raw-regex.python', 'punctuation.definition.string.begin.python']
expect(tokens[0][2].value).toBe '%d'
expect(tokens[0][2].scopes).toEqual ['source.python', 'string.quoted.double.single-line.unicode-raw-regex.python', 'constant.other.placeholder.python']
expect(tokens[0][3].value).toBe '['
expect(tokens[0][3].scopes).toEqual ['source.python', 'string.quoted.double.single-line.unicode-raw-regex.python', 'constant.other.character-class.set.regexp', 'punctuation.definition.character-class.begin.regexp']
expect(tokens[0][4].value).toBe '"'
expect(tokens[0][4].scopes).toEqual ['source.python', 'string.quoted.double.single-line.unicode-raw-regex.python', 'punctuation.definition.string.end.python']
expect(tokens[0][5].value).toBe ' '
expect(tokens[0][5].scopes).toEqual ['source.python']
expect(tokens[0][6].value).toBe '#'
expect(tokens[0][6].scopes).toEqual ['source.python', 'comment.line.number-sign.python', 'punctuation.definition.comment.python']
expect(tokens[0][7].value).toBe 'foo'
expect(tokens[0][7].scopes).toEqual ['source.python', 'comment.line.number-sign.python']
it "terminates referencing an item in a list variable after a sequence of a closing and opening bracket", ->
tokens = grammar.tokenizeLines('foo[i[0]][j[0]]')
expect(tokens[0][0].value).toBe 'foo'
expect(tokens[0][0].scopes).toEqual ['source.python', 'meta.item-access.python']
expect(tokens[0][1].value).toBe '['
expect(tokens[0][1].scopes).toEqual ['source.python', 'meta.item-access.python', 'punctuation.definition.arguments.begin.python']
expect(tokens[0][2].value).toBe 'i'
expect(tokens[0][2].scopes).toEqual ['source.python', 'meta.item-access.python', 'meta.item-access.arguments.python', 'meta.item-access.python']
expect(tokens[0][3].value).toBe '['
expect(tokens[0][3].scopes).toEqual ['source.python', 'meta.item-access.python', 'meta.item-access.arguments.python', 'meta.item-access.python', 'punctuation.definition.arguments.begin.python']
expect(tokens[0][4].value).toBe '0'
expect(tokens[0][4].scopes).toEqual ['source.python', 'meta.item-access.python', 'meta.item-access.arguments.python', 'meta.item-access.python', 'meta.item-access.arguments.python', 'constant.numeric.integer.decimal.python']
expect(tokens[0][5].value).toBe ']'
expect(tokens[0][5].scopes).toEqual ['source.python', 'meta.item-access.python', 'meta.item-access.arguments.python', 'meta.item-access.python', 'punctuation.definition.arguments.end.python']
expect(tokens[0][6].value).toBe ']'
expect(tokens[0][6].scopes).toEqual ['source.python', 'meta.item-access.python', 'punctuation.definition.arguments.end.python']
expect(tokens[0][7].value).toBe '['
expect(tokens[0][7].scopes).toEqual ['source.python', 'meta.structure.list.python', 'punctuation.definition.list.begin.python']
expect(tokens[0][8].value).toBe 'j'
expect(tokens[0][8].scopes).toEqual ['source.python', 'meta.structure.list.python', 'meta.structure.list.item.python', 'meta.item-access.python']
expect(tokens[0][9].value).toBe '['
expect(tokens[0][9].scopes).toEqual ['source.python', 'meta.structure.list.python', 'meta.structure.list.item.python', 'meta.item-access.python', 'punctuation.definition.arguments.begin.python']
expect(tokens[0][10].value).toBe '0'
expect(tokens[0][10].scopes).toEqual ['source.python', 'meta.structure.list.python', 'meta.structure.list.item.python', 'meta.item-access.python', 'meta.item-access.arguments.python', 'constant.numeric.integer.decimal.python']
expect(tokens[0][11].value).toBe ']'
expect(tokens[0][11].scopes).toEqual ['source.python', 'meta.structure.list.python', 'meta.structure.list.item.python', 'meta.item-access.python', 'punctuation.definition.arguments.end.python']
expect(tokens[0][12].value).toBe ']'
expect(tokens[0][12].scopes).toEqual ['source.python', 'meta.structure.list.python', 'punctuation.definition.list.end.python']
it "tokenizes a hex escape inside a string", ->
tokens = grammar.tokenizeLines('"\\x5A"')
expect(tokens[0][0].value).toBe '"'
expect(tokens[0][0].scopes).toEqual ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.begin.python']
expect(tokens[0][1].value).toBe '\\x5A'
expect(tokens[0][1].scopes).toEqual ['source.python', 'string.quoted.double.single-line.python', 'constant.character.escape.hex.python']
tokens = grammar.tokenizeLines('"\\x9f"')
expect(tokens[0][0].value).toBe '"'
expect(tokens[0][0].scopes).toEqual ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.begin.python']
expect(tokens[0][1].value).toBe '\\x9f'
expect(tokens[0][1].scopes).toEqual ['source.python', 'string.quoted.double.single-line.python', 'constant.character.escape.hex.python']
describe "f-strings", ->
it "tokenizes them", ->
{tokens} = grammar.tokenizeLine "f'hello'"
expect(tokens[0]).toEqual value: 'f', scopes: ['source.python', "string.quoted.single.single-line.format.python", 'storage.type.string.python']
expect(tokens[1]).toEqual value: "'", scopes: ['source.python', "string.quoted.single.single-line.format.python", 'punctuation.definition.string.begin.python']
expect(tokens[2]).toEqual value: 'hello', scopes: ['source.python', "string.quoted.single.single-line.format.python"]
expect(tokens[3]).toEqual value: "'", scopes: ['source.python', "string.quoted.single.single-line.format.python", 'punctuation.definition.string.end.python']
it "tokenizes {{ and }} as escape characters", ->
{tokens} = grammar.tokenizeLine "f'he}}l{{lo'"
expect(tokens[0]).toEqual value: 'f', scopes: ['source.python', "string.quoted.single.single-line.format.python", 'storage.type.string.python']
expect(tokens[1]).toEqual value: "'", scopes: ['source.python', "string.quoted.single.single-line.format.python", 'punctuation.definition.string.begin.python']
expect(tokens[2]).toEqual value: 'he', scopes: ['source.python', "string.quoted.single.single-line.format.python"]
expect(tokens[3]).toEqual value: '}}', scopes: ['source.python', "string.quoted.single.single-line.format.python", 'constant.character.escape.curly-bracket.python']
expect(tokens[4]).toEqual value: 'l', scopes: ['source.python', "string.quoted.single.single-line.format.python"]
expect(tokens[5]).toEqual value: '{{', scopes: ['source.python', "string.quoted.single.single-line.format.python", 'constant.character.escape.curly-bracket.python']
expect(tokens[6]).toEqual value: 'lo', scopes: ['source.python', "string.quoted.single.single-line.format.python"]
expect(tokens[7]).toEqual value: "'", scopes: ['source.python', "string.quoted.single.single-line.format.python", 'punctuation.definition.string.end.python']
it "tokenizes unmatched closing curly brackets as invalid", ->
{tokens} = grammar.tokenizeLine "f'he}llo'"
expect(tokens[0]).toEqual value: 'f', scopes: ['source.python', "string.quoted.single.single-line.format.python", 'storage.type.string.python']
expect(tokens[1]).toEqual value: "'", scopes: ['source.python', "string.quoted.single.single-line.format.python", 'punctuation.definition.string.begin.python']
expect(tokens[2]).toEqual value: 'he', scopes: ['source.python', "string.quoted.single.single-line.format.python"]
expect(tokens[3]).toEqual value: '}', scopes: ['source.python', "string.quoted.single.single-line.format.python", 'invalid.illegal.closing-curly-bracket.python']
expect(tokens[4]).toEqual value: 'llo', scopes: ['source.python', "string.quoted.single.single-line.format.python"]
expect(tokens[5]).toEqual value: "'", scopes: ['source.python', "string.quoted.single.single-line.format.python", 'punctuation.definition.string.end.python']
describe "in expressions", ->
it "tokenizes variables", ->
{tokens} = grammar.tokenizeLine "f'{abc}'"
expect(tokens[2]).toEqual value: '{', scopes: ['source.python', "string.quoted.single.single-line.format.python", 'meta.interpolation.python', 'punctuation.definition.interpolation.begin.bracket.curly.python']
expect(tokens[3]).toEqual value: 'abc', scopes: ['source.python', "string.quoted.single.single-line.format.python", 'meta.interpolation.python', 'meta.embedded.python']
expect(tokens[4]).toEqual value: '}', scopes: ['source.python', "string.quoted.single.single-line.format.python", 'meta.interpolation.python', 'punctuation.definition.interpolation.end.bracket.curly.python']
it "tokenizes arithmetic", ->
{tokens} = grammar.tokenizeLine "f'{5 - 3}'"
expect(tokens[2]).toEqual value: '{', scopes: ['source.python', "string.quoted.single.single-line.format.python", 'meta.interpolation.python', 'punctuation.definition.interpolation.begin.bracket.curly.python']
expect(tokens[3]).toEqual value: '5', scopes: ['source.python', "string.quoted.single.single-line.format.python", 'meta.interpolation.python', 'meta.embedded.python', 'constant.numeric.integer.decimal.python']
expect(tokens[5]).toEqual value: '-', scopes: ['source.python', "string.quoted.single.single-line.format.python", 'meta.interpolation.python', 'meta.embedded.python', 'keyword.operator.arithmetic.python']
expect(tokens[7]).toEqual value: '3', scopes: ['source.python', "string.quoted.single.single-line.format.python", 'meta.interpolation.python', 'meta.embedded.python', 'constant.numeric.integer.decimal.python']
expect(tokens[8]).toEqual value: '}', scopes: ['source.python', "string.quoted.single.single-line.format.python", 'meta.interpolation.python', 'punctuation.definition.interpolation.end.bracket.curly.python']
it "tokenizes function and method calls", ->
{tokens} = grammar.tokenizeLine "f'{name.decode(\"utf-8\").lower()}'"
expect(tokens[2]).toEqual value: '{', scopes: ['source.python', "string.quoted.single.single-line.format.python", 'meta.interpolation.python', 'punctuation.definition.interpolation.begin.bracket.curly.python']
expect(tokens[3]).toEqual value: 'name', scopes: ['source.python', "string.quoted.single.single-line.format.python", 'meta.interpolation.python', 'meta.embedded.python', 'variable.other.object.python']
expect(tokens[4]).toEqual value: '.', scopes: ['source.python', "string.quoted.single.single-line.format.python", 'meta.interpolation.python', 'meta.embedded.python', 'meta.method-call.python', 'punctuation.separator.method.period.python']
expect(tokens[5]).toEqual value: 'decode', scopes: ['source.python', "string.quoted.single.single-line.format.python", 'meta.interpolation.python', 'meta.embedded.python', 'meta.method-call.python', 'entity.name.function.python']
expect(tokens[6]).toEqual value: '(', scopes: ['source.python', "string.quoted.single.single-line.format.python", 'meta.interpolation.python', 'meta.embedded.python', 'meta.method-call.python', 'punctuation.definition.arguments.begin.bracket.round.python']
expect(tokens[7]).toEqual value: '"', scopes: ['source.python', "string.quoted.single.single-line.format.python", 'meta.interpolation.python', 'meta.embedded.python', 'meta.method-call.python', 'meta.method-call.arguments.python', "string.quoted.double.single-line.python", 'punctuation.definition.string.begin.python']
expect(tokens[8]).toEqual value: 'utf-8', scopes: ['source.python', "string.quoted.single.single-line.format.python", 'meta.interpolation.python', 'meta.embedded.python', 'meta.method-call.python', 'meta.method-call.arguments.python', "string.quoted.double.single-line.python"]
expect(tokens[9]).toEqual value: '"', scopes: ['source.python', "string.quoted.single.single-line.format.python", 'meta.interpolation.python', 'meta.embedded.python', 'meta.method-call.python', 'meta.method-call.arguments.python', "string.quoted.double.single-line.python", 'punctuation.definition.string.end.python']
expect(tokens[10]).toEqual value: ')', scopes: ['source.python', "string.quoted.single.single-line.format.python", 'meta.interpolation.python', 'meta.embedded.python', 'meta.method-call.python', 'punctuation.definition.arguments.end.bracket.round.python']
expect(tokens[11]).toEqual value: '.', scopes: ['source.python', "string.quoted.single.single-line.format.python", 'meta.interpolation.python', 'meta.embedded.python', 'meta.method-call.python', 'punctuation.separator.method.period.python']
expect(tokens[12]).toEqual value: 'lower', scopes: ['source.python', "string.quoted.single.single-line.format.python", 'meta.interpolation.python', 'meta.embedded.python', 'meta.method-call.python', 'entity.name.function.python']
expect(tokens[13]).toEqual value: '(', scopes: ['source.python', "string.quoted.single.single-line.format.python", 'meta.interpolation.python', 'meta.embedded.python', 'meta.method-call.python', 'punctuation.definition.arguments.begin.bracket.round.python']
expect(tokens[14]).toEqual value: ')', scopes: ['source.python', "string.quoted.single.single-line.format.python", 'meta.interpolation.python', 'meta.embedded.python', 'meta.method-call.python', 'punctuation.definition.arguments.end.bracket.round.python']
expect(tokens[15]).toEqual value: '}', scopes: ['source.python', "string.quoted.single.single-line.format.python", 'meta.interpolation.python', 'punctuation.definition.interpolation.end.bracket.curly.python']
it "tokenizes conversion flags", ->
{tokens} = grammar.tokenizeLine "f'{abc!r}'"
expect(tokens[2]).toEqual value: '{', scopes: ['source.python', "string.quoted.single.single-line.format.python", 'meta.interpolation.python', 'punctuation.definition.interpolation.begin.bracket.curly.python']
expect(tokens[3]).toEqual value: 'abc', scopes: ['source.python', "string.quoted.single.single-line.format.python", 'meta.interpolation.python', 'meta.embedded.python']
expect(tokens[4]).toEqual value: '!r', scopes: ['source.python', "string.quoted.single.single-line.format.python", 'meta.interpolation.python', 'constant.other.placeholder.python']
expect(tokens[5]).toEqual value: '}', scopes: ['source.python', "string.quoted.single.single-line.format.python", 'meta.interpolation.python', 'punctuation.definition.interpolation.end.bracket.curly.python']
it "tokenizes format specifiers", ->
{tokens} = grammar.tokenizeLine "f'{abc:^d}'"
expect(tokens[2]).toEqual value: '{', scopes: ['source.python', "string.quoted.single.single-line.format.python", 'meta.interpolation.python', 'punctuation.definition.interpolation.begin.bracket.curly.python']
expect(tokens[3]).toEqual value: 'abc', scopes: ['source.python', "string.quoted.single.single-line.format.python", 'meta.interpolation.python', 'meta.embedded.python']
expect(tokens[4]).toEqual value: ':^d', scopes: ['source.python', "string.quoted.single.single-line.format.python", 'meta.interpolation.python', 'constant.other.placeholder.python']
expect(tokens[5]).toEqual value: '}', scopes: ['source.python', "string.quoted.single.single-line.format.python", 'meta.interpolation.python', 'punctuation.definition.interpolation.end.bracket.curly.python']
it "tokenizes nested replacement fields in top-level format specifiers", ->
{tokens} = grammar.tokenizeLine "f'{abc:{align}d}'"
expect(tokens[2]).toEqual value: '{', scopes: ['source.python', "string.quoted.single.single-line.format.python", 'meta.interpolation.python', 'punctuation.definition.interpolation.begin.bracket.curly.python']
expect(tokens[3]).toEqual value: 'abc', scopes: ['source.python', "string.quoted.single.single-line.format.python", 'meta.interpolation.python', 'meta.embedded.python']
expect(tokens[4]).toEqual value: ':', scopes: ['source.python', "string.quoted.single.single-line.format.python", 'meta.interpolation.python', 'constant.other.placeholder.python']
expect(tokens[5]).toEqual value: '{align}', scopes: ['source.python', "string.quoted.single.single-line.format.python", 'meta.interpolation.python', 'constant.other.placeholder.python', 'constant.other.placeholder.python']
expect(tokens[6]).toEqual value: 'd', scopes: ['source.python', "string.quoted.single.single-line.format.python", 'meta.interpolation.python', 'constant.other.placeholder.python']
expect(tokens[7]).toEqual value: '}', scopes: ['source.python', "string.quoted.single.single-line.format.python", 'meta.interpolation.python', 'punctuation.definition.interpolation.end.bracket.curly.python']
it "tokenizes backslashes as invalid", ->
{tokens} = grammar.tokenizeLine "f'{ab\\n}'"
expect(tokens[2]).toEqual value: '{', scopes: ['source.python', "string.quoted.single.single-line.format.python", 'meta.interpolation.python', 'punctuation.definition.interpolation.begin.bracket.curly.python']
expect(tokens[3]).toEqual value: 'ab', scopes: ['source.python', "string.quoted.single.single-line.format.python", 'meta.interpolation.python', 'meta.embedded.python']
expect(tokens[4]).toEqual value: '\\', scopes: ['source.python', "string.quoted.single.single-line.format.python", 'meta.interpolation.python', 'meta.embedded.python', 'invalid.illegal.backslash.python']
expect(tokens[6]).toEqual value: '}', scopes: ['source.python', "string.quoted.single.single-line.format.python", 'meta.interpolation.python', 'punctuation.definition.interpolation.end.bracket.curly.python']
describe "binary strings", ->
it "tokenizes them", ->
{tokens} = grammar.tokenizeLine "b'test'"
expect(tokens[0]).toEqual value: 'b', scopes: ['source.python', "string.quoted.single.single-line.binary.python", 'storage.type.string.python']
expect(tokens[1]).toEqual value: "'", scopes: ['source.python', "string.quoted.single.single-line.binary.python", 'punctuation.definition.string.begin.python']
expect(tokens[2]).toEqual value: 'test', scopes: ['source.python', "string.quoted.single.single-line.binary.python"]
expect(tokens[3]).toEqual value: "'", scopes: ['source.python', "string.quoted.single.single-line.binary.python", 'punctuation.definition.string.end.python']
it "tokenizes invalid characters", ->
{tokens} = grammar.tokenizeLine "b'tést'"
expect(tokens[0]).toEqual value: 'b', scopes: ['source.python', "string.quoted.single.single-line.binary.python", 'storage.type.string.python']
expect(tokens[1]).toEqual value: "'", scopes: ['source.python', "string.quoted.single.single-line.binary.python", 'punctuation.definition.string.begin.python']
expect(tokens[2]).toEqual value: 't', scopes: ['source.python', "string.quoted.single.single-line.binary.python"]
expect(tokens[3]).toEqual value: 'é', scopes: ['source.python', "string.quoted.single.single-line.binary.python", 'invalid.illegal.character-out-of-range.python']
expect(tokens[4]).toEqual value: 'st', scopes: ['source.python', "string.quoted.single.single-line.binary.python"]
expect(tokens[5]).toEqual value: "'", scopes: ['source.python', "string.quoted.single.single-line.binary.python", 'punctuation.definition.string.end.python']
describe "docstrings", ->
it "tokenizes them", ->
lines = grammar.tokenizeLines '''
"""
Bla bla bla "wow" what's this?
"""
'''
expect(lines[0][0]).toEqual value: '"""', scopes: ['source.python', 'string.quoted.double.block.python', 'punctuation.definition.string.begin.python']
expect(lines[1][0]).toEqual value: ' Bla bla bla "wow" what\'s this?', scopes: ['source.python', 'string.quoted.double.block.python']
expect(lines[2][0]).toEqual value: '"""', scopes: ['source.python', 'string.quoted.double.block.python', 'punctuation.definition.string.end.python']
lines = grammar.tokenizeLines """
'''
Bla bla bla "wow" what's this?
'''
"""
expect(lines[0][0]).toEqual value: "'''", scopes: ['source.python', 'string.quoted.single.block.python', 'punctuation.definition.string.begin.python']
expect(lines[1][0]).toEqual value: ' Bla bla bla "wow" what\'s this?', scopes: ['source.python', 'string.quoted.single.block.python']
expect(lines[2][0]).toEqual value: "'''", scopes: ['source.python', 'string.quoted.single.block.python', 'punctuation.definition.string.end.python']
describe "string formatting", ->
describe "%-style formatting", ->
it "tokenizes the conversion type", ->
{tokens} = grammar.tokenizeLine '"%d"'
expect(tokens[0]).toEqual value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.begin.python']
expect(tokens[1]).toEqual value: '%d', scopes: ['source.python', 'string.quoted.double.single-line.python', 'constant.other.placeholder.python']
expect(tokens[2]).toEqual value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.end.python']
it "tokenizes an optional mapping key", ->
{tokens} = grammar.tokenizeLine '"%(key)x"'
expect(tokens[0]).toEqual value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.begin.python']
expect(tokens[1]).toEqual value: '%(key)x', scopes: ['source.python', 'string.quoted.double.single-line.python', 'constant.other.placeholder.python']
expect(tokens[2]).toEqual value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.end.python']
it "tokenizes an optional conversion flag", ->
{tokens} = grammar.tokenizeLine '"% F"'
expect(tokens[0]).toEqual value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.begin.python']
expect(tokens[1]).toEqual value: '% F', scopes: ['source.python', 'string.quoted.double.single-line.python', 'constant.other.placeholder.python']
expect(tokens[2]).toEqual value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.end.python']
it "tokenizes an optional field width", ->
{tokens} = grammar.tokenizeLine '"%11s"'
expect(tokens[0]).toEqual value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.begin.python']
expect(tokens[1]).toEqual value: '%11s', scopes: ['source.python', 'string.quoted.double.single-line.python', 'constant.other.placeholder.python']
expect(tokens[2]).toEqual value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.end.python']
it "tokenizes * as the optional field width", ->
{tokens} = grammar.tokenizeLine '"%*g"'
expect(tokens[0]).toEqual value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.begin.python']
expect(tokens[1]).toEqual value: '%*g', scopes: ['source.python', 'string.quoted.double.single-line.python', 'constant.other.placeholder.python']
expect(tokens[2]).toEqual value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.end.python']
it "tokenizes an optional precision", ->
{tokens} = grammar.tokenizeLine '"%.4r"'
expect(tokens[0]).toEqual value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.begin.python']
expect(tokens[1]).toEqual value: '%.4r', scopes: ['source.python', 'string.quoted.double.single-line.python', 'constant.other.placeholder.python']
expect(tokens[2]).toEqual value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.end.python']
it "tokenizes * as the optional precision", ->
{tokens} = grammar.tokenizeLine '"%.*%"'
expect(tokens[0]).toEqual value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.begin.python']
expect(tokens[1]).toEqual value: '%.*%', scopes: ['source.python', 'string.quoted.double.single-line.python', 'constant.other.placeholder.python']
expect(tokens[2]).toEqual value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.end.python']
it "tokenizes an optional length modifier", ->
{tokens} = grammar.tokenizeLine '"%Lo"'
expect(tokens[0]).toEqual value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.begin.python']
expect(tokens[1]).toEqual value: '%Lo', scopes: ['source.python', 'string.quoted.double.single-line.python', 'constant.other.placeholder.python']
expect(tokens[2]).toEqual value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.end.python']
it "tokenizes complex formats", ->
{tokens} = grammar.tokenizeLine '"%(key)#5.*hc"'
expect(tokens[0]).toEqual value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.begin.python']
expect(tokens[1]).toEqual value: '%(key)#5.*hc', scopes: ['source.python', 'string.quoted.double.single-line.python', 'constant.other.placeholder.python']
expect(tokens[2]).toEqual value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.end.python']
describe "{}-style formatting", ->
it "tokenizes the empty replacement field", ->
{tokens} = grammar.tokenizeLine '"{}"'
expect(tokens[0]).toEqual value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.begin.python']
expect(tokens[1]).toEqual value: '{}', scopes: ['source.python', 'string.quoted.double.single-line.python', 'constant.other.placeholder.python']
expect(tokens[2]).toEqual value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.end.python']
it "tokenizes a number as the field name", ->
{tokens} = grammar.tokenizeLine '"{1}"'
expect(tokens[0]).toEqual value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.begin.python']
expect(tokens[1]).toEqual value: '{1}', scopes: ['source.python', 'string.quoted.double.single-line.python', 'constant.other.placeholder.python']
expect(tokens[2]).toEqual value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.end.python']
it "tokenizes a variable name as the field name", ->
{tokens} = grammar.tokenizeLine '"{key}"'
expect(tokens[0]).toEqual value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.begin.python']
expect(tokens[1]).toEqual value: '{key}', scopes: ['source.python', 'string.quoted.double.single-line.python', 'constant.other.placeholder.python']
expect(tokens[2]).toEqual value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.end.python']
it "tokenizes field name attributes", ->
{tokens} = grammar.tokenizeLine '"{key.length}"'
expect(tokens[0]).toEqual value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.begin.python']
expect(tokens[1]).toEqual value: '{key.length}', scopes: ['source.python', 'string.quoted.double.single-line.python', 'constant.other.placeholder.python']
expect(tokens[2]).toEqual value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.end.python']
{tokens} = grammar.tokenizeLine '"{4.width}"'
expect(tokens[0]).toEqual value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.begin.python']
expect(tokens[1]).toEqual value: '{4.width}', scopes: ['source.python', 'string.quoted.double.single-line.python', 'constant.other.placeholder.python']
expect(tokens[2]).toEqual value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.end.python']
{tokens} = grammar.tokenizeLine '"{python2[\'3\']}"'
expect(tokens[0]).toEqual value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.begin.python']
expect(tokens[1]).toEqual value: '{python2[\'3\']}', scopes: ['source.python', 'string.quoted.double.single-line.python', 'constant.other.placeholder.python']
expect(tokens[2]).toEqual value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.end.python']
{tokens} = grammar.tokenizeLine '"{2[4]}"'
expect(tokens[0]).toEqual value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.begin.python']
expect(tokens[1]).toEqual value: '{2[4]}', scopes: ['source.python', 'string.quoted.double.single-line.python', 'constant.other.placeholder.python']
expect(tokens[2]).toEqual value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.end.python']
it "tokenizes multiple field name attributes", ->
{tokens} = grammar.tokenizeLine '"{nested.a[2][\'val\'].value}"'
expect(tokens[0]).toEqual value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.begin.python']
expect(tokens[1]).toEqual value: '{nested.a[2][\'val\'].value}', scopes: ['source.python', 'string.quoted.double.single-line.python', 'constant.other.placeholder.python']
expect(tokens[2]).toEqual value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.end.python']
it "tokenizes conversions", ->
{tokens} = grammar.tokenizeLine '"{!r}"'
expect(tokens[0]).toEqual value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.begin.python']
expect(tokens[1]).toEqual value: '{!r}', scopes: ['source.python', 'string.quoted.double.single-line.python', 'constant.other.placeholder.python']
expect(tokens[2]).toEqual value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.end.python']
describe "format specifiers", ->
it "tokenizes alignment", ->
{tokens} = grammar.tokenizeLine '"{:<}"'
expect(tokens[0]).toEqual value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.begin.python']
expect(tokens[1]).toEqual value: '{:<}', scopes: ['source.python', 'string.quoted.double.single-line.python', 'constant.other.placeholder.python']
expect(tokens[2]).toEqual value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.end.python']
{tokens} = grammar.tokenizeLine '"{:a^}"'
expect(tokens[0]).toEqual value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.begin.python']
expect(tokens[1]).toEqual value: '{:a^}', scopes: ['source.python', 'string.quoted.double.single-line.python', 'constant.other.placeholder.python']
expect(tokens[2]).toEqual value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.end.python']
it "tokenizes signs", ->
{tokens} = grammar.tokenizeLine '"{:+}"'
expect(tokens[0]).toEqual value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.begin.python']
expect(tokens[1]).toEqual value: '{:+}', scopes: ['source.python', 'string.quoted.double.single-line.python', 'constant.other.placeholder.python']
expect(tokens[2]).toEqual value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.end.python']
{tokens} = grammar.tokenizeLine '"{: }"'
expect(tokens[0]).toEqual value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.begin.python']
expect(tokens[1]).toEqual value: '{: }', scopes: ['source.python', 'string.quoted.double.single-line.python', 'constant.other.placeholder.python']
expect(tokens[2]).toEqual value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.end.python']
it "tokenizes the alternate form indicator", ->
{tokens} = grammar.tokenizeLine '"{:#}"'
expect(tokens[0]).toEqual value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.begin.python']
expect(tokens[1]).toEqual value: '{:#}', scopes: ['source.python', 'string.quoted.double.single-line.python', 'constant.other.placeholder.python']
expect(tokens[2]).toEqual value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.end.python']
it "tokenizes 0", ->
{tokens} = grammar.tokenizeLine '"{:0}"'
expect(tokens[0]).toEqual value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.begin.python']
expect(tokens[1]).toEqual value: '{:0}', scopes: ['source.python', 'string.quoted.double.single-line.python', 'constant.other.placeholder.python']
expect(tokens[2]).toEqual value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.end.python']
it "tokenizes the width", ->
{tokens} = grammar.tokenizeLine '"{:34}"'
expect(tokens[0]).toEqual value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.begin.python']
expect(tokens[1]).toEqual value: '{:34}', scopes: ['source.python', 'string.quoted.double.single-line.python', 'constant.other.placeholder.python']
expect(tokens[2]).toEqual value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.end.python']
it "tokenizes the grouping option", ->
{tokens} = grammar.tokenizeLine '"{:,}"'
expect(tokens[0]).toEqual value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.begin.python']
expect(tokens[1]).toEqual value: '{:,}', scopes: ['source.python', 'string.quoted.double.single-line.python', 'constant.other.placeholder.python']
expect(tokens[2]).toEqual value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.end.python']
it "tokenizes the precision", ->
{tokens} = grammar.tokenizeLine '"{:.5}"'
expect(tokens[0]).toEqual value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.begin.python']
expect(tokens[1]).toEqual value: '{:.5}', scopes: ['source.python', 'string.quoted.double.single-line.python', 'constant.other.placeholder.python']
expect(tokens[2]).toEqual value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.end.python']
it "tokenizes the type", ->
{tokens} = grammar.tokenizeLine '"{:b}"'
expect(tokens[0]).toEqual value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.begin.python']
expect(tokens[1]).toEqual value: '{:b}', scopes: ['source.python', 'string.quoted.double.single-line.python', 'constant.other.placeholder.python']
expect(tokens[2]).toEqual value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.end.python']
it "tokenizes nested replacement fields", ->
{tokens} = grammar.tokenizeLine '"{:{align}-.{precision}%}"'
expect(tokens[0]).toEqual value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.begin.python']
expect(tokens[1]).toEqual value: '{:', scopes: ['source.python', 'string.quoted.double.single-line.python', 'constant.other.placeholder.python']
expect(tokens[2]).toEqual value: '{align}', scopes: ['source.python', 'string.quoted.double.single-line.python', 'constant.other.placeholder.python', 'constant.other.placeholder.python']
expect(tokens[3]).toEqual value: '-.', scopes: ['source.python', 'string.quoted.double.single-line.python', 'constant.other.placeholder.python']
expect(tokens[4]).toEqual value: '{precision}', scopes: ['source.python', 'string.quoted.double.single-line.python', 'constant.other.placeholder.python', 'constant.other.placeholder.python']
expect(tokens[5]).toEqual value: '%}', scopes: ['source.python', 'string.quoted.double.single-line.python', 'constant.other.placeholder.python']
expect(tokens[6]).toEqual value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.end.python']
it "tokenizes complex formats", ->
{tokens} = grammar.tokenizeLine '"{0.players[2]!a:2>-#01_.3d}"'
expect(tokens[0]).toEqual value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.begin.python']
expect(tokens[1]).toEqual value: '{0.players[2]!a:2>-#01_.3d}', scopes: ['source.python', 'string.quoted.double.single-line.python', 'constant.other.placeholder.python']
expect(tokens[2]).toEqual value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.end.python']
it "tokenizes {{ and }} as escape characters and not formatters", ->
{tokens} = grammar.tokenizeLine '"{{hello}}"'
expect(tokens[0]).toEqual value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.begin.python']
expect(tokens[1]).toEqual value: '{{', scopes: ['source.python', 'string.quoted.double.single-line.python', 'constant.character.escape.curly-bracket.python']
expect(tokens[2]).toEqual value: 'hello', scopes: ['source.python', 'string.quoted.double.single-line.python']
expect(tokens[3]).toEqual value: '}}', scopes: ['source.python', 'string.quoted.double.single-line.python', 'constant.character.escape.curly-bracket.python']
expect(tokens[4]).toEqual value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.end.python']
it "tokenizes properties of self as self-type variables", ->
tokens = grammar.tokenizeLines('self.foo')
expect(tokens[0][0]).toEqual value: 'self', scopes: ['source.python', 'variable.language.self.python']
expect(tokens[0][1]).toEqual value: '.', scopes: ['source.python', 'punctuation.separator.property.period.python']
expect(tokens[0][2]).toEqual value: 'foo', scopes: ['source.python', 'variable.other.property.python']
it "tokenizes cls as a self-type variable", ->
tokens = grammar.tokenizeLines('cls.foo')
expect(tokens[0][0]).toEqual value: 'cls', scopes: ['source.python', 'variable.language.self.python']
expect(tokens[0][1]).toEqual value: '.', scopes: ['source.python', 'punctuation.separator.property.period.python']
expect(tokens[0][2]).toEqual value: 'foo', scopes: ['source.python', 'variable.other.property.python']
it "tokenizes properties of a variable as variables", ->
tokens = grammar.tokenizeLines('bar.foo')
expect(tokens[0][0]).toEqual value: 'bar', scopes: ['source.python', 'variable.other.object.python']
expect(tokens[0][1]).toEqual value: '.', scopes: ['source.python', 'punctuation.separator.property.period.python']
expect(tokens[0][2]).toEqual value: 'foo', scopes: ['source.python', 'variable.other.property.python']
# Add the grammar test fixtures
grammarTest path.join(__dirname, 'fixtures/grammar/syntax_test_python.py')
grammarTest path.join(__dirname, 'fixtures/grammar/syntax_test_python_functions.py')
grammarTest path.join(__dirname, 'fixtures/grammar/syntax_test_python_lambdas.py')
grammarTest path.join(__dirname, 'fixtures/grammar/syntax_test_python_typing.py')
describe "SQL highlighting", ->
beforeEach ->
waitsForPromise ->
atom.packages.activatePackage('language-sql')
it "tokenizes SQL inline highlighting on blocks", ->
delimsByScope =
"string.quoted.double.block.sql.python": '"""'
"string.quoted.single.block.sql.python": "'''"
for scope, delim in delimsByScope
tokens = grammar.tokenizeLines(
delim +
'SELECT bar
FROM foo'
+ delim
)
expect(tokens[0][0]).toEqual value: delim, scopes: ['source.python', scope, 'punctuation.definition.string.begin.python']
expect(tokens[1][0]).toEqual value: 'SELECT', scopes: ['source.python', scope, 'meta.embedded.sql', 'keyword.other.DML.sql']
expect(tokens[1][1]).toEqual value: ' bar', scopes: ['source.python', scope, 'meta.embedded.sql']
expect(tokens[2][0]).toEqual value: 'FROM', scopes: ['source.python', scope, 'meta.embedded.sql', 'keyword.other.DML.sql']
expect(tokens[2][1]).toEqual value ' foo', scopes: ['source.python', scope, 'meta.embedded.sql']
expect(tokens[3][0]).toEqual value: delim, scopes: ['source.python', scope, 'punctuation.definition.string.end.python']
it "tokenizes SQL inline highlighting on blocks with a CTE", ->
# Note that these scopes do not contain .sql because we can't definitively tell
# if the string contains SQL or not
delimsByScope =
"string.quoted.double.block.python": '"""'
"string.quoted.single.block.python": "'''"
for scope, delim of delimsByScope
tokens = grammar.tokenizeLines("""
#{delim}
WITH example_cte AS (
SELECT bar
FROM foo
GROUP BY bar
)
SELECT COUNT(*)
FROM example_cte
#{delim}
""")
expect(tokens[0][0]).toEqual value: delim, scopes: ['source.python', scope, 'punctuation.definition.string.begin.python']
expect(tokens[1][0]).toEqual value: 'WITH', scopes: ['source.python', scope, 'meta.embedded.sql', 'keyword.other.DML.sql']
expect(tokens[1][1]).toEqual value: ' example_cte ', scopes: ['source.python', scope, 'meta.embedded.sql']
expect(tokens[1][2]).toEqual value: 'AS', scopes: ['source.python', scope, 'meta.embedded.sql', 'keyword.other.alias.sql']
expect(tokens[1][3]).toEqual value: ' ', scopes: ['source.python', scope, 'meta.embedded.sql']
expect(tokens[1][4]).toEqual value: '(', scopes: ['source.python', scope, 'meta.embedded.sql', 'punctuation.definition.section.bracket.round.begin.sql']
expect(tokens[2][0]).toEqual value: 'SELECT', scopes: ['source.python', scope, 'meta.embedded.sql', 'keyword.other.DML.sql']
expect(tokens[2][1]).toEqual value: ' bar', scopes: ['source.python', scope, 'meta.embedded.sql']
expect(tokens[3][0]).toEqual value: 'FROM', scopes: ['source.python', scope, 'meta.embedded.sql', 'keyword.other.DML.sql']
expect(tokens[3][1]).toEqual value: ' foo', scopes: ['source.python', scope, 'meta.embedded.sql']
expect(tokens[4][0]).toEqual value: 'GROUP BY', scopes: ['source.python', scope, 'meta.embedded.sql', 'keyword.other.DML.sql']
expect(tokens[4][1]).toEqual value: ' bar', scopes: ['source.python', scope, 'meta.embedded.sql']
expect(tokens[5][0]).toEqual value: ')', scopes: ['source.python', scope, 'meta.embedded.sql', 'punctuation.definition.section.bracket.round.end.sql']
expect(tokens[7][0]).toEqual value: 'SELECT', scopes: ['source.python', scope, 'meta.embedded.sql', 'keyword.other.DML.sql']
expect(tokens[8][0]).toEqual value: 'FROM', scopes: ['source.python', scope, 'meta.embedded.sql', 'keyword.other.DML.sql']
expect(tokens[9][0]).toEqual value: delim, scopes: ['source.python', scope, 'punctuation.definition.string.end.python']
it "tokenizes SQL inline highlighting on single line with a CTE", ->
{tokens} = grammar.tokenizeLine('\'WITH example_cte AS (SELECT bar FROM foo) SELECT COUNT(*) FROM example_cte\'')
expect(tokens[0]).toEqual value: '\'', scopes: ['source.python', 'string.quoted.single.single-line.sql.python', 'punctuation.definition.string.begin.python']
expect(tokens[1]).toEqual value: 'WITH', scopes: ['source.python', 'string.quoted.single.single-line.sql.python', 'meta.embedded.sql', 'keyword.other.DML.sql']
expect(tokens[2]).toEqual value: ' example_cte ', scopes: ['source.python', 'string.quoted.single.single-line.sql.python', 'meta.embedded.sql']
expect(tokens[3]).toEqual value: 'AS', scopes: ['source.python', 'string.quoted.single.single-line.sql.python', 'meta.embedded.sql', 'keyword.other.alias.sql']
expect(tokens[4]).toEqual value: ' ', scopes: ['source.python', 'string.quoted.single.single-line.sql.python', 'meta.embedded.sql']
expect(tokens[5]).toEqual value: '(', scopes: ['source.python', 'string.quoted.single.single-line.sql.python', 'meta.embedded.sql', 'punctuation.definition.section.bracket.round.begin.sql']
expect(tokens[6]).toEqual value: 'SELECT', scopes: ['source.python', 'string.quoted.single.single-line.sql.python', 'meta.embedded.sql', 'keyword.other.DML.sql']
expect(tokens[7]).toEqual value: ' bar ', scopes: ['source.python', 'string.quoted.single.single-line.sql.python', 'meta.embedded.sql']
expect(tokens[8]).toEqual value: 'FROM', scopes: ['source.python', 'string.quoted.single.single-line.sql.python', 'meta.embedded.sql', 'keyword.other.DML.sql']
expect(tokens[9]).toEqual value: ' foo', scopes: ['source.python', 'string.quoted.single.single-line.sql.python', 'meta.embedded.sql']
expect(tokens[10]).toEqual value: ')', scopes: ['source.python', 'string.quoted.single.single-line.sql.python', 'meta.embedded.sql', 'punctuation.definition.section.bracket.round.end.sql']
expect(tokens[11]).toEqual value: ' ', scopes: ['source.python', 'string.quoted.single.single-line.sql.python', 'meta.embedded.sql']
expect(tokens[12]).toEqual value: 'SELECT', scopes: ['source.python', 'string.quoted.single.single-line.sql.python', 'meta.embedded.sql', 'keyword.other.DML.sql']
expect(tokens[13]).toEqual value: ' ', scopes: ['source.python', 'string.quoted.single.single-line.sql.python', 'meta.embedded.sql']
expect(tokens[14]).toEqual value: 'COUNT', scopes: ['source.python', 'string.quoted.single.single-line.sql.python', 'meta.embedded.sql', 'support.function.aggregate.sql']
expect(tokens[15]).toEqual value: '(', scopes: ['source.python', 'string.quoted.single.single-line.sql.python', 'meta.embedded.sql', 'punctuation.definition.section.bracket.round.begin.sql']
expect(tokens[16]).toEqual value: '*', scopes: ['source.python', 'string.quoted.single.single-line.sql.python', 'meta.embedded.sql', 'keyword.operator.star.sql']
expect(tokens[17]).toEqual value: ')', scopes: ['source.python', 'string.quoted.single.single-line.sql.python', 'meta.embedded.sql', 'punctuation.definition.section.bracket.round.end.sql']
expect(tokens[18]).toEqual value: ' ', scopes: ['source.python', 'string.quoted.single.single-line.sql.python', 'meta.embedded.sql']
expect(tokens[19]).toEqual value: 'FROM', scopes: ['source.python', 'string.quoted.single.single-line.sql.python', 'meta.embedded.sql', 'keyword.other.DML.sql']
expect(tokens[20]).toEqual value: ' example_cte', scopes: ['source.python', 'string.quoted.single.single-line.sql.python', 'meta.embedded.sql']
expect(tokens[21]).toEqual value: '\'', scopes: ['source.python', 'string.quoted.single.single-line.sql.python', 'punctuation.definition.string.end.python']
it "tokenizes Python escape characters and formatting specifiers in SQL strings", ->
{tokens} = grammar.tokenizeLine('"INSERT INTO url (image_uri) VALUES (\\\'%s\\\');" % values')
expect(tokens[0]).toEqual value: '"', scopes: ['source.python', 'string.quoted.double.single-line.sql.python', 'punctuation.definition.string.begin.python']
expect(tokens[10]).toEqual value: '\\\'', scopes: ['source.python', 'string.quoted.double.single-line.sql.python', 'meta.embedded.sql', 'constant.character.escape.single-quote.python']
expect(tokens[11]).toEqual value: '%s', scopes: ['source.python', 'string.quoted.double.single-line.sql.python', 'meta.embedded.sql', 'constant.other.placeholder.python']
expect(tokens[12]).toEqual value: '\\\'', scopes: ['source.python', 'string.quoted.double.single-line.sql.python', 'meta.embedded.sql', 'constant.character.escape.single-quote.python']
expect(tokens[13]).toEqual value: ')', scopes: ['source.python', 'string.quoted.double.single-line.sql.python', 'meta.embedded.sql', 'punctuation.definition.section.bracket.round.end.sql']
expect(tokens[15]).toEqual value: '"', scopes: ['source.python', 'string.quoted.double.single-line.sql.python', 'punctuation.definition.string.end.python']
expect(tokens[17]).toEqual value: '%', scopes: ['source.python', 'keyword.operator.arithmetic.python']
it "recognizes DELETE as an HTTP method", ->
{tokens} = grammar.tokenizeLine('"DELETE /api/v1/endpoint"')
expect(tokens[0]).toEqual value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.begin.python']
expect(tokens[1]).toEqual value: 'DELETE /api/v1/endpoint', scopes: ['source.python', 'string.quoted.double.single-line.python']
expect(tokens[2]).toEqual value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.end.python']

View File

@ -0,0 +1,844 @@
const path = require('path');
const grammarTest = require('atom-grammar-test');
describe("Python grammar", function() {
let grammar = null;
beforeEach(function() {
atom.config.set('core.useTreeSitterParsers', false);
waitsForPromise(() => atom.packages.activatePackage("language-python"));
runs(() => grammar = atom.grammars.grammarForScopeName("source.python"));
});
it("recognises shebang on firstline", function() {
expect(grammar.firstLineRegex.findNextMatchSync("#!/usr/bin/env python")).not.toBeNull();
expect(grammar.firstLineRegex.findNextMatchSync("#! /usr/bin/env python")).not.toBeNull();
});
it("parses the grammar", function() {
expect(grammar).toBeDefined();
expect(grammar.scopeName).toBe("source.python");
});
it("tokenizes `yield`", function() {
const {tokens} = grammar.tokenizeLine('yield v');
expect(tokens[0]).toEqual({value: 'yield', scopes: ['source.python', 'keyword.control.statement.python']});
});
it("tokenizes `yield from`", function() {
const {tokens} = grammar.tokenizeLine('yield from v');
expect(tokens[0]).toEqual({value: 'yield from', scopes: ['source.python', 'keyword.control.statement.python']});
});
it("tokenizes multi-line strings", function() {
const tokens = grammar.tokenizeLines('"1\\\n2"');
// Line 0
expect(tokens[0][0].value).toBe('"');
expect(tokens[0][0].scopes).toEqual(['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.begin.python']);
expect(tokens[0][1].value).toBe('1');
expect(tokens[0][1].scopes).toEqual(['source.python', 'string.quoted.double.single-line.python']);
expect(tokens[0][2].value).toBe('\\');
expect(tokens[0][2].scopes).toEqual(['source.python', 'string.quoted.double.single-line.python', 'constant.character.escape.newline.python']);
expect(tokens[0][3]).not.toBeDefined();
// Line 1
expect(tokens[1][0].value).toBe('2');
expect(tokens[1][0].scopes).toEqual(['source.python', 'string.quoted.double.single-line.python']);
expect(tokens[1][1].value).toBe('"');
expect(tokens[1][1].scopes).toEqual(['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.end.python']);
expect(tokens[1][2]).not.toBeDefined();
});
it("terminates a single-quoted raw string containing opening parenthesis at closing quote", function() {
const tokens = grammar.tokenizeLines("r'%d(' #foo");
expect(tokens[0][0].value).toBe('r');
expect(tokens[0][0].scopes).toEqual(['source.python', 'string.quoted.single.single-line.raw-regex.python', 'storage.type.string.python']);
expect(tokens[0][1].value).toBe("'");
expect(tokens[0][1].scopes).toEqual(['source.python', 'string.quoted.single.single-line.raw-regex.python', 'punctuation.definition.string.begin.python']);
expect(tokens[0][2].value).toBe('%d');
expect(tokens[0][2].scopes).toEqual(['source.python', 'string.quoted.single.single-line.raw-regex.python', 'constant.other.placeholder.python']);
expect(tokens[0][3].value).toBe('(');
expect(tokens[0][3].scopes).toEqual(['source.python', 'string.quoted.single.single-line.raw-regex.python', 'meta.group.regexp', 'punctuation.definition.group.regexp']);
expect(tokens[0][4].value).toBe("'");
expect(tokens[0][4].scopes).toEqual(['source.python', 'string.quoted.single.single-line.raw-regex.python', 'punctuation.definition.string.end.python']);
expect(tokens[0][5].value).toBe(' ');
expect(tokens[0][5].scopes).toEqual(['source.python']);
expect(tokens[0][6].value).toBe('#');
expect(tokens[0][6].scopes).toEqual(['source.python', 'comment.line.number-sign.python', 'punctuation.definition.comment.python']);
expect(tokens[0][7].value).toBe('foo');
expect(tokens[0][7].scopes).toEqual(['source.python', 'comment.line.number-sign.python']);
});
it("terminates a single-quoted raw string containing opening bracket at closing quote", function() {
const tokens = grammar.tokenizeLines("r'%d[' #foo");
expect(tokens[0][0].value).toBe('r');
expect(tokens[0][0].scopes).toEqual(['source.python', 'string.quoted.single.single-line.raw-regex.python', 'storage.type.string.python']);
expect(tokens[0][1].value).toBe("'");
expect(tokens[0][1].scopes).toEqual(['source.python', 'string.quoted.single.single-line.raw-regex.python', 'punctuation.definition.string.begin.python']);
expect(tokens[0][2].value).toBe('%d');
expect(tokens[0][2].scopes).toEqual(['source.python', 'string.quoted.single.single-line.raw-regex.python', 'constant.other.placeholder.python']);
expect(tokens[0][3].value).toBe('[');
expect(tokens[0][3].scopes).toEqual(['source.python', 'string.quoted.single.single-line.raw-regex.python', 'constant.other.character-class.set.regexp', 'punctuation.definition.character-class.begin.regexp']);
expect(tokens[0][4].value).toBe("'");
expect(tokens[0][4].scopes).toEqual(['source.python', 'string.quoted.single.single-line.raw-regex.python', 'punctuation.definition.string.end.python']);
expect(tokens[0][5].value).toBe(' ');
expect(tokens[0][5].scopes).toEqual(['source.python']);
expect(tokens[0][6].value).toBe('#');
expect(tokens[0][6].scopes).toEqual(['source.python', 'comment.line.number-sign.python', 'punctuation.definition.comment.python']);
expect(tokens[0][7].value).toBe('foo');
expect(tokens[0][7].scopes).toEqual(['source.python', 'comment.line.number-sign.python']);
});
it("terminates a double-quoted raw string containing opening parenthesis at closing quote", function() {
const tokens = grammar.tokenizeLines('r"%d(" #foo');
expect(tokens[0][0].value).toBe('r');
expect(tokens[0][0].scopes).toEqual(['source.python', 'string.quoted.double.single-line.raw-regex.python', 'storage.type.string.python']);
expect(tokens[0][1].value).toBe('"');
expect(tokens[0][1].scopes).toEqual(['source.python', 'string.quoted.double.single-line.raw-regex.python', 'punctuation.definition.string.begin.python']);
expect(tokens[0][2].value).toBe('%d');
expect(tokens[0][2].scopes).toEqual(['source.python', 'string.quoted.double.single-line.raw-regex.python', 'constant.other.placeholder.python']);
expect(tokens[0][3].value).toBe('(');
expect(tokens[0][3].scopes).toEqual(['source.python', 'string.quoted.double.single-line.raw-regex.python', 'meta.group.regexp', 'punctuation.definition.group.regexp']);
expect(tokens[0][4].value).toBe('"');
expect(tokens[0][4].scopes).toEqual(['source.python', 'string.quoted.double.single-line.raw-regex.python', 'punctuation.definition.string.end.python']);
expect(tokens[0][5].value).toBe(' ');
expect(tokens[0][5].scopes).toEqual(['source.python']);
expect(tokens[0][6].value).toBe('#');
expect(tokens[0][6].scopes).toEqual(['source.python', 'comment.line.number-sign.python', 'punctuation.definition.comment.python']);
expect(tokens[0][7].value).toBe('foo');
expect(tokens[0][7].scopes).toEqual(['source.python', 'comment.line.number-sign.python']);
});
it("terminates a double-quoted raw string containing opening bracket at closing quote", function() {
const tokens = grammar.tokenizeLines('r"%d[" #foo');
expect(tokens[0][0].value).toBe('r');
expect(tokens[0][0].scopes).toEqual(['source.python', 'string.quoted.double.single-line.raw-regex.python', 'storage.type.string.python']);
expect(tokens[0][1].value).toBe('"');
expect(tokens[0][1].scopes).toEqual(['source.python', 'string.quoted.double.single-line.raw-regex.python', 'punctuation.definition.string.begin.python']);
expect(tokens[0][2].value).toBe('%d');
expect(tokens[0][2].scopes).toEqual(['source.python', 'string.quoted.double.single-line.raw-regex.python', 'constant.other.placeholder.python']);
expect(tokens[0][3].value).toBe('[');
expect(tokens[0][3].scopes).toEqual(['source.python', 'string.quoted.double.single-line.raw-regex.python', 'constant.other.character-class.set.regexp', 'punctuation.definition.character-class.begin.regexp']);
expect(tokens[0][4].value).toBe('"');
expect(tokens[0][4].scopes).toEqual(['source.python', 'string.quoted.double.single-line.raw-regex.python', 'punctuation.definition.string.end.python']);
expect(tokens[0][5].value).toBe(' ');
expect(tokens[0][5].scopes).toEqual(['source.python']);
expect(tokens[0][6].value).toBe('#');
expect(tokens[0][6].scopes).toEqual(['source.python', 'comment.line.number-sign.python', 'punctuation.definition.comment.python']);
expect(tokens[0][7].value).toBe('foo');
expect(tokens[0][7].scopes).toEqual(['source.python', 'comment.line.number-sign.python']);
});
it("terminates a unicode single-quoted raw string containing opening parenthesis at closing quote", function() {
const tokens = grammar.tokenizeLines("ur'%d(' #foo");
expect(tokens[0][0].value).toBe('ur');
expect(tokens[0][0].scopes).toEqual(['source.python', 'string.quoted.single.single-line.unicode-raw-regex.python', 'storage.type.string.python']);
expect(tokens[0][1].value).toBe("'");
expect(tokens[0][1].scopes).toEqual(['source.python', 'string.quoted.single.single-line.unicode-raw-regex.python', 'punctuation.definition.string.begin.python']);
expect(tokens[0][2].value).toBe('%d');
expect(tokens[0][2].scopes).toEqual(['source.python', 'string.quoted.single.single-line.unicode-raw-regex.python', 'constant.other.placeholder.python']);
expect(tokens[0][3].value).toBe('(');
expect(tokens[0][3].scopes).toEqual(['source.python', 'string.quoted.single.single-line.unicode-raw-regex.python', 'meta.group.regexp', 'punctuation.definition.group.regexp']);
expect(tokens[0][4].value).toBe("'");
expect(tokens[0][4].scopes).toEqual(['source.python', 'string.quoted.single.single-line.unicode-raw-regex.python', 'punctuation.definition.string.end.python']);
expect(tokens[0][5].value).toBe(' ');
expect(tokens[0][5].scopes).toEqual(['source.python']);
expect(tokens[0][6].value).toBe('#');
expect(tokens[0][6].scopes).toEqual(['source.python', 'comment.line.number-sign.python', 'punctuation.definition.comment.python']);
expect(tokens[0][7].value).toBe('foo');
expect(tokens[0][7].scopes).toEqual(['source.python', 'comment.line.number-sign.python']);
});
it("terminates a unicode single-quoted raw string containing opening bracket at closing quote", function() {
const tokens = grammar.tokenizeLines("ur'%d[' #foo");
expect(tokens[0][0].value).toBe('ur');
expect(tokens[0][0].scopes).toEqual(['source.python', 'string.quoted.single.single-line.unicode-raw-regex.python', 'storage.type.string.python']);
expect(tokens[0][1].value).toBe("'");
expect(tokens[0][1].scopes).toEqual(['source.python', 'string.quoted.single.single-line.unicode-raw-regex.python', 'punctuation.definition.string.begin.python']);
expect(tokens[0][2].value).toBe('%d');
expect(tokens[0][2].scopes).toEqual(['source.python', 'string.quoted.single.single-line.unicode-raw-regex.python', 'constant.other.placeholder.python']);
expect(tokens[0][3].value).toBe('[');
expect(tokens[0][3].scopes).toEqual(['source.python', 'string.quoted.single.single-line.unicode-raw-regex.python', 'constant.other.character-class.set.regexp', 'punctuation.definition.character-class.begin.regexp']);
expect(tokens[0][4].value).toBe("'");
expect(tokens[0][4].scopes).toEqual(['source.python', 'string.quoted.single.single-line.unicode-raw-regex.python', 'punctuation.definition.string.end.python']);
expect(tokens[0][5].value).toBe(' ');
expect(tokens[0][5].scopes).toEqual(['source.python']);
expect(tokens[0][6].value).toBe('#');
expect(tokens[0][6].scopes).toEqual(['source.python', 'comment.line.number-sign.python', 'punctuation.definition.comment.python']);
expect(tokens[0][7].value).toBe('foo');
expect(tokens[0][7].scopes).toEqual(['source.python', 'comment.line.number-sign.python']);
});
it("terminates a unicode double-quoted raw string containing opening parenthesis at closing quote", function() {
const tokens = grammar.tokenizeLines('ur"%d(" #foo');
expect(tokens[0][0].value).toBe('ur');
expect(tokens[0][0].scopes).toEqual(['source.python', 'string.quoted.double.single-line.unicode-raw-regex.python', 'storage.type.string.python']);
expect(tokens[0][1].value).toBe('"');
expect(tokens[0][1].scopes).toEqual(['source.python', 'string.quoted.double.single-line.unicode-raw-regex.python', 'punctuation.definition.string.begin.python']);
expect(tokens[0][2].value).toBe('%d');
expect(tokens[0][2].scopes).toEqual(['source.python', 'string.quoted.double.single-line.unicode-raw-regex.python', 'constant.other.placeholder.python']);
expect(tokens[0][3].value).toBe('(');
expect(tokens[0][3].scopes).toEqual(['source.python', 'string.quoted.double.single-line.unicode-raw-regex.python', 'meta.group.regexp', 'punctuation.definition.group.regexp']);
expect(tokens[0][4].value).toBe('"');
expect(tokens[0][4].scopes).toEqual(['source.python', 'string.quoted.double.single-line.unicode-raw-regex.python', 'punctuation.definition.string.end.python']);
expect(tokens[0][5].value).toBe(' ');
expect(tokens[0][5].scopes).toEqual(['source.python']);
expect(tokens[0][6].value).toBe('#');
expect(tokens[0][6].scopes).toEqual(['source.python', 'comment.line.number-sign.python', 'punctuation.definition.comment.python']);
expect(tokens[0][7].value).toBe('foo');
expect(tokens[0][7].scopes).toEqual(['source.python', 'comment.line.number-sign.python']);
});
it("terminates a unicode double-quoted raw string containing opening bracket at closing quote", function() {
const tokens = grammar.tokenizeLines('ur"%d[" #foo');
expect(tokens[0][0].value).toBe('ur');
expect(tokens[0][0].scopes).toEqual(['source.python', 'string.quoted.double.single-line.unicode-raw-regex.python', 'storage.type.string.python']);
expect(tokens[0][1].value).toBe('"');
expect(tokens[0][1].scopes).toEqual(['source.python', 'string.quoted.double.single-line.unicode-raw-regex.python', 'punctuation.definition.string.begin.python']);
expect(tokens[0][2].value).toBe('%d');
expect(tokens[0][2].scopes).toEqual(['source.python', 'string.quoted.double.single-line.unicode-raw-regex.python', 'constant.other.placeholder.python']);
expect(tokens[0][3].value).toBe('[');
expect(tokens[0][3].scopes).toEqual(['source.python', 'string.quoted.double.single-line.unicode-raw-regex.python', 'constant.other.character-class.set.regexp', 'punctuation.definition.character-class.begin.regexp']);
expect(tokens[0][4].value).toBe('"');
expect(tokens[0][4].scopes).toEqual(['source.python', 'string.quoted.double.single-line.unicode-raw-regex.python', 'punctuation.definition.string.end.python']);
expect(tokens[0][5].value).toBe(' ');
expect(tokens[0][5].scopes).toEqual(['source.python']);
expect(tokens[0][6].value).toBe('#');
expect(tokens[0][6].scopes).toEqual(['source.python', 'comment.line.number-sign.python', 'punctuation.definition.comment.python']);
expect(tokens[0][7].value).toBe('foo');
expect(tokens[0][7].scopes).toEqual(['source.python', 'comment.line.number-sign.python']);
});
it("terminates referencing an item in a list variable after a sequence of a closing and opening bracket", function() {
const tokens = grammar.tokenizeLines('foo[i[0]][j[0]]');
expect(tokens[0][0].value).toBe('foo');
expect(tokens[0][0].scopes).toEqual(['source.python', 'meta.item-access.python']);
expect(tokens[0][1].value).toBe('[');
expect(tokens[0][1].scopes).toEqual(['source.python', 'meta.item-access.python', 'punctuation.definition.arguments.begin.python']);
expect(tokens[0][2].value).toBe('i');
expect(tokens[0][2].scopes).toEqual(['source.python', 'meta.item-access.python', 'meta.item-access.arguments.python', 'meta.item-access.python']);
expect(tokens[0][3].value).toBe('[');
expect(tokens[0][3].scopes).toEqual(['source.python', 'meta.item-access.python', 'meta.item-access.arguments.python', 'meta.item-access.python', 'punctuation.definition.arguments.begin.python']);
expect(tokens[0][4].value).toBe('0');
expect(tokens[0][4].scopes).toEqual(['source.python', 'meta.item-access.python', 'meta.item-access.arguments.python', 'meta.item-access.python', 'meta.item-access.arguments.python', 'constant.numeric.integer.decimal.python']);
expect(tokens[0][5].value).toBe(']');
expect(tokens[0][5].scopes).toEqual(['source.python', 'meta.item-access.python', 'meta.item-access.arguments.python', 'meta.item-access.python', 'punctuation.definition.arguments.end.python']);
expect(tokens[0][6].value).toBe(']');
expect(tokens[0][6].scopes).toEqual(['source.python', 'meta.item-access.python', 'punctuation.definition.arguments.end.python']);
expect(tokens[0][7].value).toBe('[');
expect(tokens[0][7].scopes).toEqual(['source.python', 'meta.structure.list.python', 'punctuation.definition.list.begin.python']);
expect(tokens[0][8].value).toBe('j');
expect(tokens[0][8].scopes).toEqual(['source.python', 'meta.structure.list.python', 'meta.structure.list.item.python', 'meta.item-access.python']);
expect(tokens[0][9].value).toBe('[');
expect(tokens[0][9].scopes).toEqual(['source.python', 'meta.structure.list.python', 'meta.structure.list.item.python', 'meta.item-access.python', 'punctuation.definition.arguments.begin.python']);
expect(tokens[0][10].value).toBe('0');
expect(tokens[0][10].scopes).toEqual(['source.python', 'meta.structure.list.python', 'meta.structure.list.item.python', 'meta.item-access.python', 'meta.item-access.arguments.python', 'constant.numeric.integer.decimal.python']);
expect(tokens[0][11].value).toBe(']');
expect(tokens[0][11].scopes).toEqual(['source.python', 'meta.structure.list.python', 'meta.structure.list.item.python', 'meta.item-access.python', 'punctuation.definition.arguments.end.python']);
expect(tokens[0][12].value).toBe(']');
expect(tokens[0][12].scopes).toEqual(['source.python', 'meta.structure.list.python', 'punctuation.definition.list.end.python']);
});
it("tokenizes a hex escape inside a string", function() {
let tokens = grammar.tokenizeLines('"\\x5A"');
expect(tokens[0][0].value).toBe('"');
expect(tokens[0][0].scopes).toEqual(['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.begin.python']);
expect(tokens[0][1].value).toBe('\\x5A');
expect(tokens[0][1].scopes).toEqual(['source.python', 'string.quoted.double.single-line.python', 'constant.character.escape.hex.python']);
tokens = grammar.tokenizeLines('"\\x9f"');
expect(tokens[0][0].value).toBe('"');
expect(tokens[0][0].scopes).toEqual(['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.begin.python']);
expect(tokens[0][1].value).toBe('\\x9f');
expect(tokens[0][1].scopes).toEqual(['source.python', 'string.quoted.double.single-line.python', 'constant.character.escape.hex.python']);
});
describe("f-strings", function() {
it("tokenizes them", function() {
const {tokens} = grammar.tokenizeLine("f'hello'");
expect(tokens[0]).toEqual({value: 'f', scopes: ['source.python', "string.quoted.single.single-line.format.python", 'storage.type.string.python']});
expect(tokens[1]).toEqual({value: "'", scopes: ['source.python', "string.quoted.single.single-line.format.python", 'punctuation.definition.string.begin.python']});
expect(tokens[2]).toEqual({value: 'hello', scopes: ['source.python', "string.quoted.single.single-line.format.python"]});
expect(tokens[3]).toEqual({value: "'", scopes: ['source.python', "string.quoted.single.single-line.format.python", 'punctuation.definition.string.end.python']});
});
it("tokenizes {{ and }} as escape characters", function() {
const {tokens} = grammar.tokenizeLine("f'he}}l{{lo'");
expect(tokens[0]).toEqual({value: 'f', scopes: ['source.python', "string.quoted.single.single-line.format.python", 'storage.type.string.python']});
expect(tokens[1]).toEqual({value: "'", scopes: ['source.python', "string.quoted.single.single-line.format.python", 'punctuation.definition.string.begin.python']});
expect(tokens[2]).toEqual({value: 'he', scopes: ['source.python', "string.quoted.single.single-line.format.python"]});
expect(tokens[3]).toEqual({value: '}}', scopes: ['source.python', "string.quoted.single.single-line.format.python", 'constant.character.escape.curly-bracket.python']});
expect(tokens[4]).toEqual({value: 'l', scopes: ['source.python', "string.quoted.single.single-line.format.python"]});
expect(tokens[5]).toEqual({value: '{{', scopes: ['source.python', "string.quoted.single.single-line.format.python", 'constant.character.escape.curly-bracket.python']});
expect(tokens[6]).toEqual({value: 'lo', scopes: ['source.python', "string.quoted.single.single-line.format.python"]});
expect(tokens[7]).toEqual({value: "'", scopes: ['source.python', "string.quoted.single.single-line.format.python", 'punctuation.definition.string.end.python']});
});
it("tokenizes unmatched closing curly brackets as invalid", function() {
const {tokens} = grammar.tokenizeLine("f'he}llo'");
expect(tokens[0]).toEqual({value: 'f', scopes: ['source.python', "string.quoted.single.single-line.format.python", 'storage.type.string.python']});
expect(tokens[1]).toEqual({value: "'", scopes: ['source.python', "string.quoted.single.single-line.format.python", 'punctuation.definition.string.begin.python']});
expect(tokens[2]).toEqual({value: 'he', scopes: ['source.python', "string.quoted.single.single-line.format.python"]});
expect(tokens[3]).toEqual({value: '}', scopes: ['source.python', "string.quoted.single.single-line.format.python", 'invalid.illegal.closing-curly-bracket.python']});
expect(tokens[4]).toEqual({value: 'llo', scopes: ['source.python', "string.quoted.single.single-line.format.python"]});
expect(tokens[5]).toEqual({value: "'", scopes: ['source.python', "string.quoted.single.single-line.format.python", 'punctuation.definition.string.end.python']});
});
describe("in expressions", function() {
it("tokenizes variables", function() {
const {tokens} = grammar.tokenizeLine("f'{abc}'");
expect(tokens[2]).toEqual({value: '{', scopes: ['source.python', "string.quoted.single.single-line.format.python", 'meta.interpolation.python', 'punctuation.definition.interpolation.begin.bracket.curly.python']});
expect(tokens[3]).toEqual({value: 'abc', scopes: ['source.python', "string.quoted.single.single-line.format.python", 'meta.interpolation.python', 'meta.embedded.python']});
expect(tokens[4]).toEqual({value: '}', scopes: ['source.python', "string.quoted.single.single-line.format.python", 'meta.interpolation.python', 'punctuation.definition.interpolation.end.bracket.curly.python']});
});
it("tokenizes arithmetic", function() {
const {tokens} = grammar.tokenizeLine("f'{5 - 3}'");
expect(tokens[2]).toEqual({value: '{', scopes: ['source.python', "string.quoted.single.single-line.format.python", 'meta.interpolation.python', 'punctuation.definition.interpolation.begin.bracket.curly.python']});
expect(tokens[3]).toEqual({value: '5', scopes: ['source.python', "string.quoted.single.single-line.format.python", 'meta.interpolation.python', 'meta.embedded.python', 'constant.numeric.integer.decimal.python']});
expect(tokens[5]).toEqual({value: '-', scopes: ['source.python', "string.quoted.single.single-line.format.python", 'meta.interpolation.python', 'meta.embedded.python', 'keyword.operator.arithmetic.python']});
expect(tokens[7]).toEqual({value: '3', scopes: ['source.python', "string.quoted.single.single-line.format.python", 'meta.interpolation.python', 'meta.embedded.python', 'constant.numeric.integer.decimal.python']});
expect(tokens[8]).toEqual({value: '}', scopes: ['source.python', "string.quoted.single.single-line.format.python", 'meta.interpolation.python', 'punctuation.definition.interpolation.end.bracket.curly.python']});
});
it("tokenizes function and method calls", function() {
const {tokens} = grammar.tokenizeLine("f'{name.decode(\"utf-8\").lower()}'");
expect(tokens[2]).toEqual({value: '{', scopes: ['source.python', "string.quoted.single.single-line.format.python", 'meta.interpolation.python', 'punctuation.definition.interpolation.begin.bracket.curly.python']});
expect(tokens[3]).toEqual({value: 'name', scopes: ['source.python', "string.quoted.single.single-line.format.python", 'meta.interpolation.python', 'meta.embedded.python', 'variable.other.object.python']});
expect(tokens[4]).toEqual({value: '.', scopes: ['source.python', "string.quoted.single.single-line.format.python", 'meta.interpolation.python', 'meta.embedded.python', 'meta.method-call.python', 'punctuation.separator.method.period.python']});
expect(tokens[5]).toEqual({value: 'decode', scopes: ['source.python', "string.quoted.single.single-line.format.python", 'meta.interpolation.python', 'meta.embedded.python', 'meta.method-call.python', 'entity.name.function.python']});
expect(tokens[6]).toEqual({value: '(', scopes: ['source.python', "string.quoted.single.single-line.format.python", 'meta.interpolation.python', 'meta.embedded.python', 'meta.method-call.python', 'punctuation.definition.arguments.begin.bracket.round.python']});
expect(tokens[7]).toEqual({value: '"', scopes: ['source.python', "string.quoted.single.single-line.format.python", 'meta.interpolation.python', 'meta.embedded.python', 'meta.method-call.python', 'meta.method-call.arguments.python', "string.quoted.double.single-line.python", 'punctuation.definition.string.begin.python']});
expect(tokens[8]).toEqual({value: 'utf-8', scopes: ['source.python', "string.quoted.single.single-line.format.python", 'meta.interpolation.python', 'meta.embedded.python', 'meta.method-call.python', 'meta.method-call.arguments.python', "string.quoted.double.single-line.python"]});
expect(tokens[9]).toEqual({value: '"', scopes: ['source.python', "string.quoted.single.single-line.format.python", 'meta.interpolation.python', 'meta.embedded.python', 'meta.method-call.python', 'meta.method-call.arguments.python', "string.quoted.double.single-line.python", 'punctuation.definition.string.end.python']});
expect(tokens[10]).toEqual({value: ')', scopes: ['source.python', "string.quoted.single.single-line.format.python", 'meta.interpolation.python', 'meta.embedded.python', 'meta.method-call.python', 'punctuation.definition.arguments.end.bracket.round.python']});
expect(tokens[11]).toEqual({value: '.', scopes: ['source.python', "string.quoted.single.single-line.format.python", 'meta.interpolation.python', 'meta.embedded.python', 'meta.method-call.python', 'punctuation.separator.method.period.python']});
expect(tokens[12]).toEqual({value: 'lower', scopes: ['source.python', "string.quoted.single.single-line.format.python", 'meta.interpolation.python', 'meta.embedded.python', 'meta.method-call.python', 'entity.name.function.python']});
expect(tokens[13]).toEqual({value: '(', scopes: ['source.python', "string.quoted.single.single-line.format.python", 'meta.interpolation.python', 'meta.embedded.python', 'meta.method-call.python', 'punctuation.definition.arguments.begin.bracket.round.python']});
expect(tokens[14]).toEqual({value: ')', scopes: ['source.python', "string.quoted.single.single-line.format.python", 'meta.interpolation.python', 'meta.embedded.python', 'meta.method-call.python', 'punctuation.definition.arguments.end.bracket.round.python']});
expect(tokens[15]).toEqual({value: '}', scopes: ['source.python', "string.quoted.single.single-line.format.python", 'meta.interpolation.python', 'punctuation.definition.interpolation.end.bracket.curly.python']});
});
it("tokenizes conversion flags", function() {
const {tokens} = grammar.tokenizeLine("f'{abc!r}'");
expect(tokens[2]).toEqual({value: '{', scopes: ['source.python', "string.quoted.single.single-line.format.python", 'meta.interpolation.python', 'punctuation.definition.interpolation.begin.bracket.curly.python']});
expect(tokens[3]).toEqual({value: 'abc', scopes: ['source.python', "string.quoted.single.single-line.format.python", 'meta.interpolation.python', 'meta.embedded.python']});
expect(tokens[4]).toEqual({value: '!r', scopes: ['source.python', "string.quoted.single.single-line.format.python", 'meta.interpolation.python', 'constant.other.placeholder.python']});
expect(tokens[5]).toEqual({value: '}', scopes: ['source.python', "string.quoted.single.single-line.format.python", 'meta.interpolation.python', 'punctuation.definition.interpolation.end.bracket.curly.python']});
});
it("tokenizes format specifiers", function() {
const {tokens} = grammar.tokenizeLine("f'{abc:^d}'");
expect(tokens[2]).toEqual({value: '{', scopes: ['source.python', "string.quoted.single.single-line.format.python", 'meta.interpolation.python', 'punctuation.definition.interpolation.begin.bracket.curly.python']});
expect(tokens[3]).toEqual({value: 'abc', scopes: ['source.python', "string.quoted.single.single-line.format.python", 'meta.interpolation.python', 'meta.embedded.python']});
expect(tokens[4]).toEqual({value: ':^d', scopes: ['source.python', "string.quoted.single.single-line.format.python", 'meta.interpolation.python', 'constant.other.placeholder.python']});
expect(tokens[5]).toEqual({value: '}', scopes: ['source.python', "string.quoted.single.single-line.format.python", 'meta.interpolation.python', 'punctuation.definition.interpolation.end.bracket.curly.python']});
});
it("tokenizes nested replacement fields in top-level format specifiers", function() {
const {tokens} = grammar.tokenizeLine("f'{abc:{align}d}'");
expect(tokens[2]).toEqual({value: '{', scopes: ['source.python', "string.quoted.single.single-line.format.python", 'meta.interpolation.python', 'punctuation.definition.interpolation.begin.bracket.curly.python']});
expect(tokens[3]).toEqual({value: 'abc', scopes: ['source.python', "string.quoted.single.single-line.format.python", 'meta.interpolation.python', 'meta.embedded.python']});
expect(tokens[4]).toEqual({value: ':', scopes: ['source.python', "string.quoted.single.single-line.format.python", 'meta.interpolation.python', 'constant.other.placeholder.python']});
expect(tokens[5]).toEqual({value: '{align}', scopes: ['source.python', "string.quoted.single.single-line.format.python", 'meta.interpolation.python', 'constant.other.placeholder.python', 'constant.other.placeholder.python']});
expect(tokens[6]).toEqual({value: 'd', scopes: ['source.python', "string.quoted.single.single-line.format.python", 'meta.interpolation.python', 'constant.other.placeholder.python']});
expect(tokens[7]).toEqual({value: '}', scopes: ['source.python', "string.quoted.single.single-line.format.python", 'meta.interpolation.python', 'punctuation.definition.interpolation.end.bracket.curly.python']});
});
it("tokenizes backslashes as invalid", function() {
const {tokens} = grammar.tokenizeLine("f'{ab\\n}'");
expect(tokens[2]).toEqual({value: '{', scopes: ['source.python', "string.quoted.single.single-line.format.python", 'meta.interpolation.python', 'punctuation.definition.interpolation.begin.bracket.curly.python']});
expect(tokens[3]).toEqual({value: 'ab', scopes: ['source.python', "string.quoted.single.single-line.format.python", 'meta.interpolation.python', 'meta.embedded.python']});
expect(tokens[4]).toEqual({value: '\\', scopes: ['source.python', "string.quoted.single.single-line.format.python", 'meta.interpolation.python', 'meta.embedded.python', 'invalid.illegal.backslash.python']});
expect(tokens[6]).toEqual({value: '}', scopes: ['source.python', "string.quoted.single.single-line.format.python", 'meta.interpolation.python', 'punctuation.definition.interpolation.end.bracket.curly.python']});
});
});
});
describe("binary strings", function() {
it("tokenizes them", function() {
const {tokens} = grammar.tokenizeLine("b'test'");
expect(tokens[0]).toEqual({value: 'b', scopes: ['source.python', "string.quoted.single.single-line.binary.python", 'storage.type.string.python']});
expect(tokens[1]).toEqual({value: "'", scopes: ['source.python', "string.quoted.single.single-line.binary.python", 'punctuation.definition.string.begin.python']});
expect(tokens[2]).toEqual({value: 'test', scopes: ['source.python', "string.quoted.single.single-line.binary.python"]});
expect(tokens[3]).toEqual({value: "'", scopes: ['source.python', "string.quoted.single.single-line.binary.python", 'punctuation.definition.string.end.python']});
});
it("tokenizes invalid characters", function() {
const {tokens} = grammar.tokenizeLine("b'tést'");
expect(tokens[0]).toEqual({value: 'b', scopes: ['source.python', "string.quoted.single.single-line.binary.python", 'storage.type.string.python']});
expect(tokens[1]).toEqual({value: "'", scopes: ['source.python', "string.quoted.single.single-line.binary.python", 'punctuation.definition.string.begin.python']});
expect(tokens[2]).toEqual({value: 't', scopes: ['source.python', "string.quoted.single.single-line.binary.python"]});
expect(tokens[3]).toEqual({value: 'é', scopes: ['source.python', "string.quoted.single.single-line.binary.python", 'invalid.illegal.character-out-of-range.python']});
expect(tokens[4]).toEqual({value: 'st', scopes: ['source.python', "string.quoted.single.single-line.binary.python"]});
expect(tokens[5]).toEqual({value: "'", scopes: ['source.python', "string.quoted.single.single-line.binary.python", 'punctuation.definition.string.end.python']});
});
});
describe("docstrings", () => it("tokenizes them", function() {
let lines = grammar.tokenizeLines(`\
"""
Bla bla bla "wow" what's this?
"""\
`
);
expect(lines[0][0]).toEqual({value: '"""', scopes: ['source.python', 'string.quoted.double.block.python', 'punctuation.definition.string.begin.python']});
expect(lines[1][0]).toEqual({value: ' Bla bla bla "wow" what\'s this?', scopes: ['source.python', 'string.quoted.double.block.python']});
expect(lines[2][0]).toEqual({value: '"""', scopes: ['source.python', 'string.quoted.double.block.python', 'punctuation.definition.string.end.python']});
lines = grammar.tokenizeLines(`\
'''
Bla bla bla "wow" what's this?
'''\
`
);
expect(lines[0][0]).toEqual({value: "'''", scopes: ['source.python', 'string.quoted.single.block.python', 'punctuation.definition.string.begin.python']});
expect(lines[1][0]).toEqual({value: ' Bla bla bla "wow" what\'s this?', scopes: ['source.python', 'string.quoted.single.block.python']});
expect(lines[2][0]).toEqual({value: "'''", scopes: ['source.python', 'string.quoted.single.block.python', 'punctuation.definition.string.end.python']});
}));
describe("string formatting", function() {
describe("%-style formatting", function() {
it("tokenizes the conversion type", function() {
const {tokens} = grammar.tokenizeLine('"%d"');
expect(tokens[0]).toEqual({value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.begin.python']});
expect(tokens[1]).toEqual({value: '%d', scopes: ['source.python', 'string.quoted.double.single-line.python', 'constant.other.placeholder.python']});
expect(tokens[2]).toEqual({value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.end.python']});
});
it("tokenizes an optional mapping key", function() {
const {tokens} = grammar.tokenizeLine('"%(key)x"');
expect(tokens[0]).toEqual({value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.begin.python']});
expect(tokens[1]).toEqual({value: '%(key)x', scopes: ['source.python', 'string.quoted.double.single-line.python', 'constant.other.placeholder.python']});
expect(tokens[2]).toEqual({value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.end.python']});
});
it("tokenizes an optional conversion flag", function() {
const {tokens} = grammar.tokenizeLine('"% F"');
expect(tokens[0]).toEqual({value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.begin.python']});
expect(tokens[1]).toEqual({value: '% F', scopes: ['source.python', 'string.quoted.double.single-line.python', 'constant.other.placeholder.python']});
expect(tokens[2]).toEqual({value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.end.python']});
});
it("tokenizes an optional field width", function() {
const {tokens} = grammar.tokenizeLine('"%11s"');
expect(tokens[0]).toEqual({value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.begin.python']});
expect(tokens[1]).toEqual({value: '%11s', scopes: ['source.python', 'string.quoted.double.single-line.python', 'constant.other.placeholder.python']});
expect(tokens[2]).toEqual({value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.end.python']});
});
it("tokenizes * as the optional field width", function() {
const {tokens} = grammar.tokenizeLine('"%*g"');
expect(tokens[0]).toEqual({value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.begin.python']});
expect(tokens[1]).toEqual({value: '%*g', scopes: ['source.python', 'string.quoted.double.single-line.python', 'constant.other.placeholder.python']});
expect(tokens[2]).toEqual({value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.end.python']});
});
it("tokenizes an optional precision", function() {
const {tokens} = grammar.tokenizeLine('"%.4r"');
expect(tokens[0]).toEqual({value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.begin.python']});
expect(tokens[1]).toEqual({value: '%.4r', scopes: ['source.python', 'string.quoted.double.single-line.python', 'constant.other.placeholder.python']});
expect(tokens[2]).toEqual({value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.end.python']});
});
it("tokenizes * as the optional precision", function() {
const {tokens} = grammar.tokenizeLine('"%.*%"');
expect(tokens[0]).toEqual({value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.begin.python']});
expect(tokens[1]).toEqual({value: '%.*%', scopes: ['source.python', 'string.quoted.double.single-line.python', 'constant.other.placeholder.python']});
expect(tokens[2]).toEqual({value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.end.python']});
});
it("tokenizes an optional length modifier", function() {
const {tokens} = grammar.tokenizeLine('"%Lo"');
expect(tokens[0]).toEqual({value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.begin.python']});
expect(tokens[1]).toEqual({value: '%Lo', scopes: ['source.python', 'string.quoted.double.single-line.python', 'constant.other.placeholder.python']});
expect(tokens[2]).toEqual({value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.end.python']});
});
it("tokenizes complex formats", function() {
const {tokens} = grammar.tokenizeLine('"%(key)#5.*hc"');
expect(tokens[0]).toEqual({value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.begin.python']});
expect(tokens[1]).toEqual({value: '%(key)#5.*hc', scopes: ['source.python', 'string.quoted.double.single-line.python', 'constant.other.placeholder.python']});
expect(tokens[2]).toEqual({value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.end.python']});
});
});
describe("{}-style formatting", function() {
it("tokenizes the empty replacement field", function() {
const {tokens} = grammar.tokenizeLine('"{}"');
expect(tokens[0]).toEqual({value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.begin.python']});
expect(tokens[1]).toEqual({value: '{}', scopes: ['source.python', 'string.quoted.double.single-line.python', 'constant.other.placeholder.python']});
expect(tokens[2]).toEqual({value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.end.python']});
});
it("tokenizes a number as the field name", function() {
const {tokens} = grammar.tokenizeLine('"{1}"');
expect(tokens[0]).toEqual({value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.begin.python']});
expect(tokens[1]).toEqual({value: '{1}', scopes: ['source.python', 'string.quoted.double.single-line.python', 'constant.other.placeholder.python']});
expect(tokens[2]).toEqual({value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.end.python']});
});
it("tokenizes a variable name as the field name", function() {
const {tokens} = grammar.tokenizeLine('"{key}"');
expect(tokens[0]).toEqual({value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.begin.python']});
expect(tokens[1]).toEqual({value: '{key}', scopes: ['source.python', 'string.quoted.double.single-line.python', 'constant.other.placeholder.python']});
expect(tokens[2]).toEqual({value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.end.python']});
});
it("tokenizes field name attributes", function() {
let {tokens} = grammar.tokenizeLine('"{key.length}"');
expect(tokens[0]).toEqual({value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.begin.python']});
expect(tokens[1]).toEqual({value: '{key.length}', scopes: ['source.python', 'string.quoted.double.single-line.python', 'constant.other.placeholder.python']});
expect(tokens[2]).toEqual({value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.end.python']});
({tokens} = grammar.tokenizeLine('"{4.width}"'));
expect(tokens[0]).toEqual({value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.begin.python']});
expect(tokens[1]).toEqual({value: '{4.width}', scopes: ['source.python', 'string.quoted.double.single-line.python', 'constant.other.placeholder.python']});
expect(tokens[2]).toEqual({value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.end.python']});
({tokens} = grammar.tokenizeLine('"{python2[\'3\']}"'));
expect(tokens[0]).toEqual({value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.begin.python']});
expect(tokens[1]).toEqual({value: '{python2[\'3\']}', scopes: ['source.python', 'string.quoted.double.single-line.python', 'constant.other.placeholder.python']});
expect(tokens[2]).toEqual({value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.end.python']});
({tokens} = grammar.tokenizeLine('"{2[4]}"'));
expect(tokens[0]).toEqual({value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.begin.python']});
expect(tokens[1]).toEqual({value: '{2[4]}', scopes: ['source.python', 'string.quoted.double.single-line.python', 'constant.other.placeholder.python']});
expect(tokens[2]).toEqual({value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.end.python']});
});
it("tokenizes multiple field name attributes", function() {
const {tokens} = grammar.tokenizeLine('"{nested.a[2][\'val\'].value}"');
expect(tokens[0]).toEqual({value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.begin.python']});
expect(tokens[1]).toEqual({value: '{nested.a[2][\'val\'].value}', scopes: ['source.python', 'string.quoted.double.single-line.python', 'constant.other.placeholder.python']});
expect(tokens[2]).toEqual({value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.end.python']});
});
it("tokenizes conversions", function() {
const {tokens} = grammar.tokenizeLine('"{!r}"');
expect(tokens[0]).toEqual({value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.begin.python']});
expect(tokens[1]).toEqual({value: '{!r}', scopes: ['source.python', 'string.quoted.double.single-line.python', 'constant.other.placeholder.python']});
expect(tokens[2]).toEqual({value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.end.python']});
});
describe("format specifiers", function() {
it("tokenizes alignment", function() {
let {tokens} = grammar.tokenizeLine('"{:<}"');
expect(tokens[0]).toEqual({value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.begin.python']});
expect(tokens[1]).toEqual({value: '{:<}', scopes: ['source.python', 'string.quoted.double.single-line.python', 'constant.other.placeholder.python']});
expect(tokens[2]).toEqual({value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.end.python']});
({tokens} = grammar.tokenizeLine('"{:a^}"'));
expect(tokens[0]).toEqual({value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.begin.python']});
expect(tokens[1]).toEqual({value: '{:a^}', scopes: ['source.python', 'string.quoted.double.single-line.python', 'constant.other.placeholder.python']});
expect(tokens[2]).toEqual({value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.end.python']});
});
it("tokenizes signs", function() {
let {tokens} = grammar.tokenizeLine('"{:+}"');
expect(tokens[0]).toEqual({value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.begin.python']});
expect(tokens[1]).toEqual({value: '{:+}', scopes: ['source.python', 'string.quoted.double.single-line.python', 'constant.other.placeholder.python']});
expect(tokens[2]).toEqual({value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.end.python']});
({tokens} = grammar.tokenizeLine('"{: }"'));
expect(tokens[0]).toEqual({value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.begin.python']});
expect(tokens[1]).toEqual({value: '{: }', scopes: ['source.python', 'string.quoted.double.single-line.python', 'constant.other.placeholder.python']});
expect(tokens[2]).toEqual({value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.end.python']});
});
it("tokenizes the alternate form indicator", function() {
const {tokens} = grammar.tokenizeLine('"{:#}"');
expect(tokens[0]).toEqual({value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.begin.python']});
expect(tokens[1]).toEqual({value: '{:#}', scopes: ['source.python', 'string.quoted.double.single-line.python', 'constant.other.placeholder.python']});
expect(tokens[2]).toEqual({value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.end.python']});
});
it("tokenizes 0", function() {
const {tokens} = grammar.tokenizeLine('"{:0}"');
expect(tokens[0]).toEqual({value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.begin.python']});
expect(tokens[1]).toEqual({value: '{:0}', scopes: ['source.python', 'string.quoted.double.single-line.python', 'constant.other.placeholder.python']});
expect(tokens[2]).toEqual({value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.end.python']});
});
it("tokenizes the width", function() {
const {tokens} = grammar.tokenizeLine('"{:34}"');
expect(tokens[0]).toEqual({value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.begin.python']});
expect(tokens[1]).toEqual({value: '{:34}', scopes: ['source.python', 'string.quoted.double.single-line.python', 'constant.other.placeholder.python']});
expect(tokens[2]).toEqual({value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.end.python']});
});
it("tokenizes the grouping option", function() {
const {tokens} = grammar.tokenizeLine('"{:,}"');
expect(tokens[0]).toEqual({value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.begin.python']});
expect(tokens[1]).toEqual({value: '{:,}', scopes: ['source.python', 'string.quoted.double.single-line.python', 'constant.other.placeholder.python']});
expect(tokens[2]).toEqual({value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.end.python']});
});
it("tokenizes the precision", function() {
const {tokens} = grammar.tokenizeLine('"{:.5}"');
expect(tokens[0]).toEqual({value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.begin.python']});
expect(tokens[1]).toEqual({value: '{:.5}', scopes: ['source.python', 'string.quoted.double.single-line.python', 'constant.other.placeholder.python']});
expect(tokens[2]).toEqual({value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.end.python']});
});
it("tokenizes the type", function() {
const {tokens} = grammar.tokenizeLine('"{:b}"');
expect(tokens[0]).toEqual({value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.begin.python']});
expect(tokens[1]).toEqual({value: '{:b}', scopes: ['source.python', 'string.quoted.double.single-line.python', 'constant.other.placeholder.python']});
expect(tokens[2]).toEqual({value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.end.python']});
});
it("tokenizes nested replacement fields", function() {
const {tokens} = grammar.tokenizeLine('"{:{align}-.{precision}%}"');
expect(tokens[0]).toEqual({value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.begin.python']});
expect(tokens[1]).toEqual({value: '{:', scopes: ['source.python', 'string.quoted.double.single-line.python', 'constant.other.placeholder.python']});
expect(tokens[2]).toEqual({value: '{align}', scopes: ['source.python', 'string.quoted.double.single-line.python', 'constant.other.placeholder.python', 'constant.other.placeholder.python']});
expect(tokens[3]).toEqual({value: '-.', scopes: ['source.python', 'string.quoted.double.single-line.python', 'constant.other.placeholder.python']});
expect(tokens[4]).toEqual({value: '{precision}', scopes: ['source.python', 'string.quoted.double.single-line.python', 'constant.other.placeholder.python', 'constant.other.placeholder.python']});
expect(tokens[5]).toEqual({value: '%}', scopes: ['source.python', 'string.quoted.double.single-line.python', 'constant.other.placeholder.python']});
expect(tokens[6]).toEqual({value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.end.python']});
});
});
it("tokenizes complex formats", function() {
const {tokens} = grammar.tokenizeLine('"{0.players[2]!a:2>-#01_.3d}"');
expect(tokens[0]).toEqual({value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.begin.python']});
expect(tokens[1]).toEqual({value: '{0.players[2]!a:2>-#01_.3d}', scopes: ['source.python', 'string.quoted.double.single-line.python', 'constant.other.placeholder.python']});
expect(tokens[2]).toEqual({value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.end.python']});
});
it("tokenizes {{ and }} as escape characters and not formatters", function() {
const {tokens} = grammar.tokenizeLine('"{{hello}}"');
expect(tokens[0]).toEqual({value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.begin.python']});
expect(tokens[1]).toEqual({value: '{{', scopes: ['source.python', 'string.quoted.double.single-line.python', 'constant.character.escape.curly-bracket.python']});
expect(tokens[2]).toEqual({value: 'hello', scopes: ['source.python', 'string.quoted.double.single-line.python']});
expect(tokens[3]).toEqual({value: '}}', scopes: ['source.python', 'string.quoted.double.single-line.python', 'constant.character.escape.curly-bracket.python']});
expect(tokens[4]).toEqual({value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.end.python']});
});
});
});
it("tokenizes properties of self as self-type variables", function() {
const tokens = grammar.tokenizeLines('self.foo');
expect(tokens[0][0]).toEqual({value: 'self', scopes: ['source.python', 'variable.language.self.python']});
expect(tokens[0][1]).toEqual({value: '.', scopes: ['source.python', 'punctuation.separator.property.period.python']});
expect(tokens[0][2]).toEqual({value: 'foo', scopes: ['source.python', 'variable.other.property.python']});
});
it("tokenizes cls as a self-type variable", function() {
const tokens = grammar.tokenizeLines('cls.foo');
expect(tokens[0][0]).toEqual({value: 'cls', scopes: ['source.python', 'variable.language.self.python']});
expect(tokens[0][1]).toEqual({value: '.', scopes: ['source.python', 'punctuation.separator.property.period.python']});
expect(tokens[0][2]).toEqual({value: 'foo', scopes: ['source.python', 'variable.other.property.python']});
});
it("tokenizes properties of a variable as variables", function() {
const tokens = grammar.tokenizeLines('bar.foo');
expect(tokens[0][0]).toEqual({value: 'bar', scopes: ['source.python', 'variable.other.object.python']});
expect(tokens[0][1]).toEqual({value: '.', scopes: ['source.python', 'punctuation.separator.property.period.python']});
expect(tokens[0][2]).toEqual({value: 'foo', scopes: ['source.python', 'variable.other.property.python']});
});
// Add the grammar test fixtures
grammarTest(path.join(__dirname, 'fixtures/grammar/syntax_test_python.py'));
grammarTest(path.join(__dirname, 'fixtures/grammar/syntax_test_python_functions.py'));
grammarTest(path.join(__dirname, 'fixtures/grammar/syntax_test_python_lambdas.py'));
grammarTest(path.join(__dirname, 'fixtures/grammar/syntax_test_python_typing.py'));
describe("SQL highlighting", function() {
beforeEach(() => waitsForPromise(() => atom.packages.activatePackage('language-sql')));
it("tokenizes SQL inline highlighting on blocks", function() {
const delimsByScope = {
"string.quoted.double.block.sql.python": '"""',
"string.quoted.single.block.sql.python": "'''"
};
return (() => {
const result = [];
for (let delim = 0; delim < delimsByScope.length; delim++) {
const scope = delimsByScope[delim];
const tokens = grammar.tokenizeLines(
delim +
`SELECT bar \
FROM foo`,
+ delim
);
expect(tokens[0][0]).toEqual({value: delim, scopes: ['source.python', scope, 'punctuation.definition.string.begin.python']});
expect(tokens[1][0]).toEqual({value: 'SELECT', scopes: ['source.python', scope, 'meta.embedded.sql', 'keyword.other.DML.sql']});
expect(tokens[1][1]).toEqual({value: ' bar', scopes: ['source.python', scope, 'meta.embedded.sql']});
expect(tokens[2][0]).toEqual({value: 'FROM', scopes: ['source.python', scope, 'meta.embedded.sql', 'keyword.other.DML.sql']});
expect(tokens[2][1]).toEqual(value(' foo', {scopes: ['source.python', scope, 'meta.embedded.sql']}));
result.push(expect(tokens[3][0]).toEqual({value: delim, scopes: ['source.python', scope, 'punctuation.definition.string.end.python']}));
}
return result;
})();
});
it("tokenizes SQL inline highlighting on blocks with a CTE", function() {
// Note that these scopes do not contain .sql because we can't definitively tell
// if the string contains SQL or not
const delimsByScope = {
"string.quoted.double.block.python": '"""',
"string.quoted.single.block.python": "'''"
};
return (() => {
const result = [];
for (let scope in delimsByScope) {
const delim = delimsByScope[scope];
const tokens = grammar.tokenizeLines(`\
${delim}
WITH example_cte AS (
SELECT bar
FROM foo
GROUP BY bar
)
SELECT COUNT(*)
FROM example_cte
${delim}\
`);
expect(tokens[0][0]).toEqual({value: delim, scopes: ['source.python', scope, 'punctuation.definition.string.begin.python']});
expect(tokens[1][0]).toEqual({value: 'WITH', scopes: ['source.python', scope, 'meta.embedded.sql', 'keyword.other.DML.sql']});
expect(tokens[1][1]).toEqual({value: ' example_cte ', scopes: ['source.python', scope, 'meta.embedded.sql']});
expect(tokens[1][2]).toEqual({value: 'AS', scopes: ['source.python', scope, 'meta.embedded.sql', 'keyword.other.alias.sql']});
expect(tokens[1][3]).toEqual({value: ' ', scopes: ['source.python', scope, 'meta.embedded.sql']});
expect(tokens[1][4]).toEqual({value: '(', scopes: ['source.python', scope, 'meta.embedded.sql', 'punctuation.definition.section.bracket.round.begin.sql']});
expect(tokens[2][0]).toEqual({value: 'SELECT', scopes: ['source.python', scope, 'meta.embedded.sql', 'keyword.other.DML.sql']});
expect(tokens[2][1]).toEqual({value: ' bar', scopes: ['source.python', scope, 'meta.embedded.sql']});
expect(tokens[3][0]).toEqual({value: 'FROM', scopes: ['source.python', scope, 'meta.embedded.sql', 'keyword.other.DML.sql']});
expect(tokens[3][1]).toEqual({value: ' foo', scopes: ['source.python', scope, 'meta.embedded.sql']});
expect(tokens[4][0]).toEqual({value: 'GROUP BY', scopes: ['source.python', scope, 'meta.embedded.sql', 'keyword.other.DML.sql']});
expect(tokens[4][1]).toEqual({value: ' bar', scopes: ['source.python', scope, 'meta.embedded.sql']});
expect(tokens[5][0]).toEqual({value: ')', scopes: ['source.python', scope, 'meta.embedded.sql', 'punctuation.definition.section.bracket.round.end.sql']});
expect(tokens[7][0]).toEqual({value: 'SELECT', scopes: ['source.python', scope, 'meta.embedded.sql', 'keyword.other.DML.sql']});
expect(tokens[8][0]).toEqual({value: 'FROM', scopes: ['source.python', scope, 'meta.embedded.sql', 'keyword.other.DML.sql']});
result.push(expect(tokens[9][0]).toEqual({value: delim, scopes: ['source.python', scope, 'punctuation.definition.string.end.python']}));
}
return result;
})();
});
it("tokenizes SQL inline highlighting on single line with a CTE", function() {
const {tokens} = grammar.tokenizeLine('\'WITH example_cte AS (SELECT bar FROM foo) SELECT COUNT(*) FROM example_cte\'');
expect(tokens[0]).toEqual({value: '\'', scopes: ['source.python', 'string.quoted.single.single-line.sql.python', 'punctuation.definition.string.begin.python']});
expect(tokens[1]).toEqual({value: 'WITH', scopes: ['source.python', 'string.quoted.single.single-line.sql.python', 'meta.embedded.sql', 'keyword.other.DML.sql']});
expect(tokens[2]).toEqual({value: ' example_cte ', scopes: ['source.python', 'string.quoted.single.single-line.sql.python', 'meta.embedded.sql']});
expect(tokens[3]).toEqual({value: 'AS', scopes: ['source.python', 'string.quoted.single.single-line.sql.python', 'meta.embedded.sql', 'keyword.other.alias.sql']});
expect(tokens[4]).toEqual({value: ' ', scopes: ['source.python', 'string.quoted.single.single-line.sql.python', 'meta.embedded.sql']});
expect(tokens[5]).toEqual({value: '(', scopes: ['source.python', 'string.quoted.single.single-line.sql.python', 'meta.embedded.sql', 'punctuation.definition.section.bracket.round.begin.sql']});
expect(tokens[6]).toEqual({value: 'SELECT', scopes: ['source.python', 'string.quoted.single.single-line.sql.python', 'meta.embedded.sql', 'keyword.other.DML.sql']});
expect(tokens[7]).toEqual({value: ' bar ', scopes: ['source.python', 'string.quoted.single.single-line.sql.python', 'meta.embedded.sql']});
expect(tokens[8]).toEqual({value: 'FROM', scopes: ['source.python', 'string.quoted.single.single-line.sql.python', 'meta.embedded.sql', 'keyword.other.DML.sql']});
expect(tokens[9]).toEqual({value: ' foo', scopes: ['source.python', 'string.quoted.single.single-line.sql.python', 'meta.embedded.sql']});
expect(tokens[10]).toEqual({value: ')', scopes: ['source.python', 'string.quoted.single.single-line.sql.python', 'meta.embedded.sql', 'punctuation.definition.section.bracket.round.end.sql']});
expect(tokens[11]).toEqual({value: ' ', scopes: ['source.python', 'string.quoted.single.single-line.sql.python', 'meta.embedded.sql']});
expect(tokens[12]).toEqual({value: 'SELECT', scopes: ['source.python', 'string.quoted.single.single-line.sql.python', 'meta.embedded.sql', 'keyword.other.DML.sql']});
expect(tokens[13]).toEqual({value: ' ', scopes: ['source.python', 'string.quoted.single.single-line.sql.python', 'meta.embedded.sql']});
expect(tokens[14]).toEqual({value: 'COUNT', scopes: ['source.python', 'string.quoted.single.single-line.sql.python', 'meta.embedded.sql', 'support.function.aggregate.sql']});
expect(tokens[15]).toEqual({value: '(', scopes: ['source.python', 'string.quoted.single.single-line.sql.python', 'meta.embedded.sql', 'punctuation.definition.section.bracket.round.begin.sql']});
expect(tokens[16]).toEqual({value: '*', scopes: ['source.python', 'string.quoted.single.single-line.sql.python', 'meta.embedded.sql', 'keyword.operator.star.sql']});
expect(tokens[17]).toEqual({value: ')', scopes: ['source.python', 'string.quoted.single.single-line.sql.python', 'meta.embedded.sql', 'punctuation.definition.section.bracket.round.end.sql']});
expect(tokens[18]).toEqual({value: ' ', scopes: ['source.python', 'string.quoted.single.single-line.sql.python', 'meta.embedded.sql']});
expect(tokens[19]).toEqual({value: 'FROM', scopes: ['source.python', 'string.quoted.single.single-line.sql.python', 'meta.embedded.sql', 'keyword.other.DML.sql']});
expect(tokens[20]).toEqual({value: ' example_cte', scopes: ['source.python', 'string.quoted.single.single-line.sql.python', 'meta.embedded.sql']});
expect(tokens[21]).toEqual({value: '\'', scopes: ['source.python', 'string.quoted.single.single-line.sql.python', 'punctuation.definition.string.end.python']});
});
it("tokenizes Python escape characters and formatting specifiers in SQL strings", function() {
const {tokens} = grammar.tokenizeLine('"INSERT INTO url (image_uri) VALUES (\\\'%s\\\');" % values');
expect(tokens[0]).toEqual({value: '"', scopes: ['source.python', 'string.quoted.double.single-line.sql.python', 'punctuation.definition.string.begin.python']});
expect(tokens[10]).toEqual({value: '\\\'', scopes: ['source.python', 'string.quoted.double.single-line.sql.python', 'meta.embedded.sql', 'constant.character.escape.single-quote.python']});
expect(tokens[11]).toEqual({value: '%s', scopes: ['source.python', 'string.quoted.double.single-line.sql.python', 'meta.embedded.sql', 'constant.other.placeholder.python']});
expect(tokens[12]).toEqual({value: '\\\'', scopes: ['source.python', 'string.quoted.double.single-line.sql.python', 'meta.embedded.sql', 'constant.character.escape.single-quote.python']});
expect(tokens[13]).toEqual({value: ')', scopes: ['source.python', 'string.quoted.double.single-line.sql.python', 'meta.embedded.sql', 'punctuation.definition.section.bracket.round.end.sql']});
expect(tokens[15]).toEqual({value: '"', scopes: ['source.python', 'string.quoted.double.single-line.sql.python', 'punctuation.definition.string.end.python']});
expect(tokens[17]).toEqual({value: '%', scopes: ['source.python', 'keyword.operator.arithmetic.python']});
});
it("recognizes DELETE as an HTTP method", function() {
const {tokens} = grammar.tokenizeLine('"DELETE /api/v1/endpoint"');
expect(tokens[0]).toEqual({value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.begin.python']});
expect(tokens[1]).toEqual({value: 'DELETE /api/v1/endpoint', scopes: ['source.python', 'string.quoted.double.single-line.python']});
expect(tokens[2]).toEqual({value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.end.python']});
});
});
});

View File

@ -1,29 +0,0 @@
describe "Ruby on Rails package", ->
beforeEach ->
waitsForPromise ->
atom.packages.activatePackage("language-ruby-on-rails")
it "parses the HTML grammar", ->
grammar = atom.grammars.grammarForScopeName("text.html.ruby")
expect(grammar).toBeDefined()
expect(grammar.scopeName).toBe "text.html.ruby"
it "parses the JavaScript grammar", ->
grammar = atom.grammars.grammarForScopeName("source.js.rails source.js.jquery")
expect(grammar).toBeDefined()
expect(grammar.scopeName).toBe "source.js.rails source.js.jquery"
it "parses the RJS grammar", ->
grammar = atom.grammars.grammarForScopeName("source.ruby.rails.rjs")
expect(grammar).toBeDefined()
expect(grammar.scopeName).toBe "source.ruby.rails.rjs"
it "parses the Rails grammar", ->
grammar = atom.grammars.grammarForScopeName("source.ruby.rails")
expect(grammar).toBeDefined()
expect(grammar.scopeName).toBe "source.ruby.rails"
it "parses the SQL grammar", ->
grammar = atom.grammars.grammarForScopeName("source.sql.ruby")
expect(grammar).toBeDefined()
expect(grammar.scopeName).toBe "source.sql.ruby"

View File

@ -0,0 +1,34 @@
describe("Ruby on Rails package", () => {
beforeEach(() => waitsForPromise(() => atom.packages.activatePackage("language-ruby-on-rails")));
it("parses the HTML grammar", () => {
const grammar = atom.grammars.grammarForScopeName("text.html.ruby");
expect(grammar).toBeDefined();
expect(grammar.scopeName).toBe("text.html.ruby");
});
it("parses the JavaScript grammar", () => {
const grammar = atom.grammars.grammarForScopeName("source.js.rails source.js.jquery");
expect(grammar).toBeDefined();
expect(grammar.scopeName).toBe("source.js.rails source.js.jquery");
});
it("parses the RJS grammar", () => {
const grammar = atom.grammars.grammarForScopeName("source.ruby.rails.rjs");
expect(grammar).toBeDefined();
expect(grammar.scopeName).toBe("source.ruby.rails.rjs");
});
it("parses the Rails grammar", () => {
const grammar = atom.grammars.grammarForScopeName("source.ruby.rails");
expect(grammar).toBeDefined();
expect(grammar.scopeName).toBe("source.ruby.rails");
});
it("parses the SQL grammar", () => {
const grammar = atom.grammars.grammarForScopeName("source.sql.ruby");
expect(grammar).toBeDefined();
expect(grammar.scopeName).toBe("source.sql.ruby");
});
});

View File

@ -1,29 +0,0 @@
describe "Ruby on Rails snippets", ->
grammar = null
beforeEach ->
waitsForPromise ->
atom.packages.activatePackage("language-ruby-on-rails")
runs ->
grammar = atom.grammars.grammarForScopeName("source.ruby.rails")
it "tokenizes ActionMailer::Base", ->
railsMailer = 'class RailsMailer < ActionMailer::Base'
{tokens} = grammar.tokenizeLine railsMailer
expect(tokens[0]).toEqual value: railsMailer, scopes: ['source.ruby.rails', 'meta.rails.mailer']
it "tokenizes ApplicationMailer", ->
rails5Mailer = 'class Rails5Mailer < ApplicationMailer'
{tokens} = grammar.tokenizeLine rails5Mailer
expect(tokens[0]).toEqual value: rails5Mailer, scopes: ['source.ruby.rails', 'meta.rails.mailer']
it "tokenizes ActiveRecord::Base", ->
railsModel = 'class RailsModel < ActiveRecord::Base'
{tokens} = grammar.tokenizeLine railsModel
expect(tokens[0]).toEqual value: railsModel, scopes: ['source.ruby.rails', 'meta.rails.model']
it "tokenizes ApplicationRecord", ->
rails5Model = 'class Rails5Model < ApplicationRecord'
{tokens} = grammar.tokenizeLine rails5Model
expect(tokens[0]).toEqual value: rails5Model, scopes: ['source.ruby.rails', 'meta.rails.model']

View File

@ -0,0 +1,34 @@
describe("Ruby on Rails snippets", () => {
let grammar = null;
beforeEach(() => {
waitsForPromise(() => atom.packages.activatePackage("language-ruby-on-rails"));
runs(() => grammar = atom.grammars.grammarForScopeName("source.ruby.rails"));
});
it("tokenizes ActionMailer::Base", () => {
const railsMailer = 'class RailsMailer < ActionMailer::Base';
const {tokens} = grammar.tokenizeLine(railsMailer);
expect(tokens[0]).toEqual({value: railsMailer, scopes: ['source.ruby.rails', 'meta.rails.mailer']});
});
it("tokenizes ApplicationMailer", () => {
const rails5Mailer = 'class Rails5Mailer < ApplicationMailer';
const {tokens} = grammar.tokenizeLine(rails5Mailer);
expect(tokens[0]).toEqual({value: rails5Mailer, scopes: ['source.ruby.rails', 'meta.rails.mailer']});
});
it("tokenizes ActiveRecord::Base", () => {
const railsModel = 'class RailsModel < ActiveRecord::Base';
const {tokens} = grammar.tokenizeLine(railsModel);
expect(tokens[0]).toEqual({value: railsModel, scopes: ['source.ruby.rails', 'meta.rails.model']});
});
it("tokenizes ApplicationRecord", () => {
const rails5Model = 'class Rails5Model < ApplicationRecord';
const {tokens} = grammar.tokenizeLine(rails5Model);
expect(tokens[0]).toEqual({value: rails5Model, scopes: ['source.ruby.rails', 'meta.rails.model']});
});
});

View File

@ -1,29 +0,0 @@
describe "TextMate HTML (Ruby - ERB) grammar", ->
grammar = null
beforeEach ->
atom.config.set 'core.useTreeSitterParsers', false
waitsForPromise ->
atom.packages.activatePackage("language-ruby")
runs ->
grammar = atom.grammars.grammarForScopeName("text.html.erb")
it "parses the grammar", ->
expect(grammar).toBeTruthy()
expect(grammar.scopeName).toBe "text.html.erb"
it "tokenizes embedded ruby", ->
{tokens} = grammar.tokenizeLine('<%= self %>')
expect(tokens[0]).toEqual value: '<%=', scopes: ['text.html.erb', 'meta.embedded.line.erb', 'punctuation.section.embedded.begin.erb']
expect(tokens[1]).toEqual value: ' ', scopes: ['text.html.erb', 'meta.embedded.line.erb', 'source.ruby.embedded.erb']
expect(tokens[2]).toEqual value: 'self', scopes: ['text.html.erb', 'meta.embedded.line.erb', 'source.ruby.embedded.erb', 'variable.language.self.ruby']
expect(tokens[3]).toEqual value: ' ', scopes: ['text.html.erb', 'meta.embedded.line.erb', 'source.ruby.embedded.erb']
expect(tokens[4]).toEqual value: '%>', scopes: ['text.html.erb', 'meta.embedded.line.erb', 'punctuation.section.embedded.end.erb']
lines = grammar.tokenizeLines('<%=\nself\n%>')
expect(lines[0][0]).toEqual value: '<%=', scopes: ['text.html.erb', 'meta.embedded.block.erb', 'punctuation.section.embedded.begin.erb']
expect(lines[1][0]).toEqual value: 'self', scopes: ['text.html.erb', 'meta.embedded.block.erb', 'source.ruby.embedded.erb', 'variable.language.self.ruby']
expect(lines[2][0]).toEqual value: '%>', scopes: ['text.html.erb', 'meta.embedded.block.erb', 'punctuation.section.embedded.end.erb']

View File

@ -0,0 +1,32 @@
describe("TextMate HTML (Ruby - ERB) grammar", () => {
let grammar = null;
beforeEach(() => {
atom.config.set('core.useTreeSitterParsers', false);
waitsForPromise(() => atom.packages.activatePackage("language-ruby"));
runs(() => grammar = atom.grammars.grammarForScopeName("text.html.erb"));
});
it("parses the grammar", () => {
expect(grammar).toBeTruthy();
expect(grammar.scopeName).toBe("text.html.erb");
});
it("tokenizes embedded ruby", () => {
const {tokens} = grammar.tokenizeLine('<%= self %>');
expect(tokens[0]).toEqual({value: '<%=', scopes: ['text.html.erb', 'meta.embedded.line.erb', 'punctuation.section.embedded.begin.erb']});
expect(tokens[1]).toEqual({value: ' ', scopes: ['text.html.erb', 'meta.embedded.line.erb', 'source.ruby.embedded.erb']});
expect(tokens[2]).toEqual({value: 'self', scopes: ['text.html.erb', 'meta.embedded.line.erb', 'source.ruby.embedded.erb', 'variable.language.self.ruby']});
expect(tokens[3]).toEqual({value: ' ', scopes: ['text.html.erb', 'meta.embedded.line.erb', 'source.ruby.embedded.erb']});
expect(tokens[4]).toEqual({value: '%>', scopes: ['text.html.erb', 'meta.embedded.line.erb', 'punctuation.section.embedded.end.erb']});
const lines = grammar.tokenizeLines('<%=\nself\n%>');
expect(lines[0][0]).toEqual({value: '<%=', scopes: ['text.html.erb', 'meta.embedded.block.erb', 'punctuation.section.embedded.begin.erb']});
expect(lines[1][0]).toEqual({value: 'self', scopes: ['text.html.erb', 'meta.embedded.block.erb', 'source.ruby.embedded.erb', 'variable.language.self.ruby']});
expect(lines[2][0]).toEqual({value: '%>', scopes: ['text.html.erb', 'meta.embedded.block.erb', 'punctuation.section.embedded.end.erb']});
});
});

View File

@ -1,57 +0,0 @@
describe "TextMate Gemfile grammar", ->
grammar = null
beforeEach ->
atom.config.set 'core.useTreeSitterParsers', false
waitsForPromise ->
atom.packages.activatePackage("language-ruby")
runs ->
grammar = atom.grammars.grammarForScopeName("source.ruby.gemfile")
it "parses the grammar", ->
expect(grammar).toBeTruthy()
expect(grammar.scopeName).toBe "source.ruby.gemfile"
it "tokenizes ruby", ->
{tokens} = grammar.tokenizeLine('ruby')
expect(tokens[0]).toEqual value: 'ruby', scopes: ['source.ruby.gemfile', 'meta.declaration.ruby.gemfile', 'keyword.other.special-method.ruby.gemfile']
it "tokenizes source", ->
{tokens} = grammar.tokenizeLine('source')
expect(tokens[0]).toEqual value: 'source', scopes: ['source.ruby.gemfile', 'meta.declaration.ruby.gemfile', 'keyword.other.special-method.ruby.gemfile']
it "tokenizes group", ->
{tokens} = grammar.tokenizeLine('group')
expect(tokens[0]).toEqual value: 'group', scopes: ['source.ruby.gemfile', 'meta.declaration.ruby.gemfile', 'keyword.other.special-method.ruby.gemfile']
it "tokenizes gem", ->
{tokens} = grammar.tokenizeLine('gem')
expect(tokens[0]).toEqual value: 'gem', scopes: ['source.ruby.gemfile', 'meta.declaration.ruby.gemfile', 'keyword.other.special-method.ruby.gemfile']
it "tokenizes double-quoted strings", ->
{tokens} = grammar.tokenizeLine('"foo"')
expect(tokens[0]).toEqual value: '"', scopes: ['source.ruby.gemfile', 'string.quoted.double.interpolated.ruby', 'punctuation.definition.string.begin.ruby']
expect(tokens[1]).toEqual value: 'foo', scopes: ['source.ruby.gemfile', 'string.quoted.double.interpolated.ruby']
expect(tokens[2]).toEqual value: '"', scopes: ['source.ruby.gemfile', 'string.quoted.double.interpolated.ruby', 'punctuation.definition.string.end.ruby']
it "tokenizes single-quoted strings", ->
{tokens} = grammar.tokenizeLine('\'foo\'')
expect(tokens[0]).toEqual value: '\'', scopes: ['source.ruby.gemfile', 'string.quoted.single.ruby', 'punctuation.definition.string.begin.ruby']
expect(tokens[1]).toEqual value: 'foo', scopes: ['source.ruby.gemfile', 'string.quoted.single.ruby']
expect(tokens[2]).toEqual value: '\'', scopes: ['source.ruby.gemfile', 'string.quoted.single.ruby', 'punctuation.definition.string.end.ruby']
it "tokenizes group names", ->
{tokens} = grammar.tokenizeLine(':foo')
expect(tokens[0]).toEqual value: ':', scopes: ['source.ruby.gemfile', 'constant.other.symbol.ruby', 'punctuation.definition.constant.ruby']
expect(tokens[1]).toEqual value: 'foo', scopes: ['source.ruby.gemfile', 'constant.other.symbol.ruby']
it "tokenizes group properly in ruby code", ->
{tokens} = grammar.tokenizeLine('do |group|')
expect(tokens[0]).toEqual value: 'do', scopes: ['source.ruby.gemfile', 'keyword.control.start-block.ruby']
expect(tokens[1]).toEqual value: ' ', scopes: ['source.ruby.gemfile']
expect(tokens[2]).toEqual value: '|', scopes: ['source.ruby.gemfile', 'punctuation.separator.variable.ruby']
expect(tokens[3]).toEqual value: 'group', scopes: ['source.ruby.gemfile', 'variable.other.block.ruby']
expect(tokens[4]).toEqual value: '|', scopes: ['source.ruby.gemfile', 'punctuation.separator.variable.ruby']

View File

@ -0,0 +1,67 @@
describe("TextMate Gemfile grammar", () => {
let grammar = null;
beforeEach(() => {
atom.config.set('core.useTreeSitterParsers', false);
waitsForPromise(() => atom.packages.activatePackage("language-ruby"));
runs(() => grammar = atom.grammars.grammarForScopeName("source.ruby.gemfile"));
});
it("parses the grammar", () => {
expect(grammar).toBeTruthy();
expect(grammar.scopeName).toBe("source.ruby.gemfile");
});
it("tokenizes ruby", () => {
const {tokens} = grammar.tokenizeLine('ruby');
expect(tokens[0]).toEqual({value: 'ruby', scopes: ['source.ruby.gemfile', 'meta.declaration.ruby.gemfile', 'keyword.other.special-method.ruby.gemfile']});
});
it("tokenizes source", () => {
const {tokens} = grammar.tokenizeLine('source');
expect(tokens[0]).toEqual({value: 'source', scopes: ['source.ruby.gemfile', 'meta.declaration.ruby.gemfile', 'keyword.other.special-method.ruby.gemfile']});
});
it("tokenizes group", () => {
const {tokens} = grammar.tokenizeLine('group');
expect(tokens[0]).toEqual({value: 'group', scopes: ['source.ruby.gemfile', 'meta.declaration.ruby.gemfile', 'keyword.other.special-method.ruby.gemfile']});
});
it("tokenizes gem", () => {
const {tokens} = grammar.tokenizeLine('gem');
expect(tokens[0]).toEqual({value: 'gem', scopes: ['source.ruby.gemfile', 'meta.declaration.ruby.gemfile', 'keyword.other.special-method.ruby.gemfile']});
});
it("tokenizes double-quoted strings", () => {
const {tokens} = grammar.tokenizeLine('"foo"');
expect(tokens[0]).toEqual({value: '"', scopes: ['source.ruby.gemfile', 'string.quoted.double.interpolated.ruby', 'punctuation.definition.string.begin.ruby']});
expect(tokens[1]).toEqual({value: 'foo', scopes: ['source.ruby.gemfile', 'string.quoted.double.interpolated.ruby']});
expect(tokens[2]).toEqual({value: '"', scopes: ['source.ruby.gemfile', 'string.quoted.double.interpolated.ruby', 'punctuation.definition.string.end.ruby']});
});
it("tokenizes single-quoted strings", () => {
const {tokens} = grammar.tokenizeLine('\'foo\'');
expect(tokens[0]).toEqual({value: '\'', scopes: ['source.ruby.gemfile', 'string.quoted.single.ruby', 'punctuation.definition.string.begin.ruby']});
expect(tokens[1]).toEqual({value: 'foo', scopes: ['source.ruby.gemfile', 'string.quoted.single.ruby']});
expect(tokens[2]).toEqual({value: '\'', scopes: ['source.ruby.gemfile', 'string.quoted.single.ruby', 'punctuation.definition.string.end.ruby']});
});
it("tokenizes group names", () => {
const {tokens} = grammar.tokenizeLine(':foo');
expect(tokens[0]).toEqual({value: ':', scopes: ['source.ruby.gemfile', 'constant.other.symbol.ruby', 'punctuation.definition.constant.ruby']});
expect(tokens[1]).toEqual({value: 'foo', scopes: ['source.ruby.gemfile', 'constant.other.symbol.ruby']});
});
it("tokenizes group properly in ruby code", () => {
const {tokens} = grammar.tokenizeLine('do |group|');
expect(tokens[0]).toEqual({value: 'do', scopes: ['source.ruby.gemfile', 'keyword.control.start-block.ruby']});
expect(tokens[1]).toEqual({value: ' ', scopes: ['source.ruby.gemfile']});
expect(tokens[2]).toEqual({value: '|', scopes: ['source.ruby.gemfile', 'punctuation.separator.variable.ruby']});
expect(tokens[3]).toEqual({value: 'group', scopes: ['source.ruby.gemfile', 'variable.other.block.ruby']});
expect(tokens[4]).toEqual({value: '|', scopes: ['source.ruby.gemfile', 'punctuation.separator.variable.ruby']});
});
});

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@ -1,627 +0,0 @@
describe 'Sass grammar', ->
grammar = null
beforeEach ->
waitsForPromise ->
atom.packages.activatePackage('language-css')
waitsForPromise ->
atom.packages.activatePackage('language-sass')
runs ->
grammar = atom.grammars.grammarForScopeName('source.sass')
it 'parses the grammar', ->
expect(grammar).toBeTruthy()
expect(grammar.scopeName).toBe 'source.sass'
describe 'vendor-prefixed properties', ->
it 'tokenizes them as properties', ->
tokens = grammar.tokenizeLines '''
.something
-webkit-mask-repeat: no-repeat
'''
expect(tokens[1][1]).toEqual value: '-webkit-mask-repeat', scopes: ['source.sass', 'meta.property-name.sass', 'support.type.vendored.property-name.css']
describe 'property-list', ->
it 'tokenizes the property-name and property-value', ->
tokens = grammar.tokenizeLines '''
very-custom
color: inherit
'''
expect(tokens[1][1]).toEqual value: 'color', scopes: ['source.sass', 'meta.property-name.sass', 'support.type.property-name.css']
expect(tokens[1][2]).toEqual value: ':', scopes: ['source.sass', 'meta.property-value.sass', 'punctuation.separator.key-value.css']
expect(tokens[1][3]).toEqual value: ' ', scopes: ['source.sass', 'meta.property-value.sass']
expect(tokens[1][4]).toEqual value: 'inherit', scopes: ['source.sass', 'meta.property-value.sass', 'support.constant.property-value.css']
it 'tokenizes nested property-lists', ->
tokens = grammar.tokenizeLines '''
very-custom
very-very-custom
color: inherit
margin: top
'''
expect(tokens[2][1]).toEqual value: 'color', scopes: ['source.sass', 'meta.property-name.sass', 'support.type.property-name.css']
expect(tokens[2][2]).toEqual value: ':', scopes: ['source.sass', 'meta.property-value.sass', 'punctuation.separator.key-value.css']
expect(tokens[2][3]).toEqual value: ' ', scopes: ['source.sass', 'meta.property-value.sass']
expect(tokens[2][4]).toEqual value: 'inherit', scopes: ['source.sass', 'meta.property-value.sass', 'support.constant.property-value.css']
expect(tokens[3][1]).toEqual value: 'margin', scopes: ['source.sass', 'meta.property-name.sass', 'support.type.property-name.css']
expect(tokens[3][2]).toEqual value: ':', scopes: ['source.sass', 'meta.property-value.sass', 'punctuation.separator.key-value.css']
expect(tokens[3][3]).toEqual value: ' ', scopes: ['source.sass', 'meta.property-value.sass']
expect(tokens[3][4]).toEqual value: 'top', scopes: ['source.sass', 'meta.property-value.sass', 'support.constant.property-value.css']
it 'tokenizes colon-first property-list syntax', ->
tokens = grammar.tokenizeLines '''
very-custom
:color inherit
'''
expect(tokens[1][1]).toEqual value: ':', scopes: ['source.sass', 'punctuation.separator.key-value.css']
expect(tokens[1][2]).toEqual value: 'color', scopes: ['source.sass', 'meta.property-name.sass', 'support.type.property-name.css']
expect(tokens[1][3]).toEqual value: ' ', scopes: ['source.sass', 'meta.property-value.sass']
expect(tokens[1][4]).toEqual value: 'inherit', scopes: ['source.sass', 'meta.property-value.sass', 'support.constant.property-value.css']
it 'tokenizes nested colon-first property-list syntax', ->
tokens = grammar.tokenizeLines '''
very-custom
very-very-custom
:color inherit
:margin top
'''
expect(tokens[2][1]).toEqual value: ':', scopes: ['source.sass', 'punctuation.separator.key-value.css']
expect(tokens[2][2]).toEqual value: 'color', scopes: ['source.sass', 'meta.property-name.sass', 'support.type.property-name.css']
expect(tokens[2][3]).toEqual value: ' ', scopes: ['source.sass', 'meta.property-value.sass']
expect(tokens[2][4]).toEqual value: 'inherit', scopes: ['source.sass', 'meta.property-value.sass', 'support.constant.property-value.css']
expect(tokens[3][1]).toEqual value: ':', scopes: ['source.sass', 'punctuation.separator.key-value.css']
expect(tokens[3][2]).toEqual value: 'margin', scopes: ['source.sass', 'meta.property-name.sass', 'support.type.property-name.css']
expect(tokens[3][3]).toEqual value: ' ', scopes: ['source.sass', 'meta.property-value.sass']
expect(tokens[3][4]).toEqual value: 'top', scopes: ['source.sass', 'meta.property-value.sass', 'support.constant.property-value.css']
describe 'pseudo-classes and pseudo-elements', ->
it 'tokenizes pseudo-classes', ->
tokens = grammar.tokenizeLines '''
a:hover
display: none
'''
expect(tokens[0][0]).toEqual value: 'a', scopes: ['source.sass', 'meta.selector.css', 'entity.name.tag.css']
expect(tokens[0][1]).toEqual value: ':', scopes: ['source.sass', 'meta.selector.css', 'entity.other.attribute-name.pseudo-class.css', 'punctuation.definition.entity.css']
expect(tokens[0][2]).toEqual value: 'hover', scopes: ['source.sass', 'meta.selector.css', 'entity.other.attribute-name.pseudo-class.css']
it 'tokenizes pseudo-elements', ->
tokens = grammar.tokenizeLines '''
a::before
display: none
'''
expect(tokens[0][0]).toEqual value: 'a', scopes: ['source.sass', 'meta.selector.css', 'entity.name.tag.css']
expect(tokens[0][1]).toEqual value: '::', scopes: ['source.sass', 'meta.selector.css', 'entity.other.attribute-name.pseudo-element.css', 'punctuation.definition.entity.css']
expect(tokens[0][2]).toEqual value: 'before', scopes: ['source.sass', 'meta.selector.css', 'entity.other.attribute-name.pseudo-element.css']
it 'tokenizes functional pseudo-classes', ->
tokens = grammar.tokenizeLines '''
&:not(.selected)
display: none
'''
expect(tokens[0][1]).toEqual value: ':', scopes: ['source.sass', 'meta.selector.css', 'entity.other.attribute-name.pseudo-class.css', 'punctuation.definition.entity.css']
expect(tokens[0][2]).toEqual value: 'not', scopes: ['source.sass', 'meta.selector.css', 'entity.other.attribute-name.pseudo-class.css']
expect(tokens[0][3]).toEqual value: '(', scopes: ['source.sass', 'meta.selector.css', 'punctuation.section.function.begin.bracket.round.css']
expect(tokens[0][4]).toEqual value: '.', scopes: ['source.sass', 'meta.selector.css', 'entity.other.attribute-name.class.css', 'punctuation.definition.entity.css']
expect(tokens[0][5]).toEqual value: 'selected', scopes: ['source.sass', 'meta.selector.css', 'entity.other.attribute-name.class.css']
expect(tokens[0][6]).toEqual value: ')', scopes: ['source.sass', 'meta.selector.css', 'punctuation.section.function.end.bracket.round.css']
it 'tokenizes nested pseudo-classes', ->
tokens = grammar.tokenizeLines '''
body
a:hover
display: none
'''
expect(tokens[1][1]).toEqual value: 'a', scopes: ['source.sass', 'meta.selector.css', 'entity.name.tag.css']
expect(tokens[1][2]).toEqual value: ':', scopes: ['source.sass', 'meta.selector.css', 'entity.other.attribute-name.pseudo-class.css', 'punctuation.definition.entity.css']
expect(tokens[1][3]).toEqual value: 'hover', scopes: ['source.sass', 'meta.selector.css', 'entity.other.attribute-name.pseudo-class.css']
it 'tokenizes nested pseudo-elements', ->
tokens = grammar.tokenizeLines '''
body
a::before
display: none
'''
expect(tokens[1][1]).toEqual value: 'a', scopes: ['source.sass', 'meta.selector.css', 'entity.name.tag.css']
expect(tokens[1][2]).toEqual value: '::', scopes: ['source.sass', 'meta.selector.css', 'entity.other.attribute-name.pseudo-element.css', 'punctuation.definition.entity.css']
expect(tokens[1][3]).toEqual value: 'before', scopes: ['source.sass', 'meta.selector.css', 'entity.other.attribute-name.pseudo-element.css']
describe 'selectors', ->
describe 'attribute selectors', ->
it 'tokenizes a single attribute selector', ->
{tokens} = grammar.tokenizeLine '[something="1"]'
expect(tokens[0]).toEqual value: '[', scopes: ['source.sass', 'meta.selector.css', 'meta.attribute-selector.css.sass', 'punctuation.definition.attribute-selector.begin.bracket.square.sass']
expect(tokens[1]).toEqual value: 'something', scopes: ['source.sass', 'meta.selector.css', 'meta.attribute-selector.css.sass', 'entity.other.attribute-name.attribute.sass']
expect(tokens[2]).toEqual value: '=', scopes: ['source.sass', 'meta.selector.css', 'meta.attribute-selector.css.sass', 'keyword.operator.sass']
expect(tokens[3]).toEqual value: '"', scopes: ['source.sass', 'meta.selector.css', 'meta.attribute-selector.css.sass', 'string.quoted.double.attribute-value.sass', 'punctuation.definition.string.begin.sass']
expect(tokens[4]).toEqual value: '1', scopes: ['source.sass', 'meta.selector.css', 'meta.attribute-selector.css.sass', 'string.quoted.double.attribute-value.sass']
expect(tokens[5]).toEqual value: '"', scopes: ['source.sass', 'meta.selector.css', 'meta.attribute-selector.css.sass', 'string.quoted.double.attribute-value.sass', 'punctuation.definition.string.end.sass']
expect(tokens[6]).toEqual value: ']', scopes: ['source.sass', 'meta.selector.css', 'meta.attribute-selector.css.sass', 'punctuation.definition.attribute-selector.end.bracket.square.sass']
it "tokenizes multiple attribute selectors", ->
{tokens} = grammar.tokenizeLine '[data-name="text-color"][data-value="null"]'
expect(tokens[0]).toEqual value: '[', scopes: ['source.sass', 'meta.selector.css', 'meta.attribute-selector.css.sass', 'punctuation.definition.attribute-selector.begin.bracket.square.sass']
expect(tokens[1]).toEqual value: 'data-name', scopes: ['source.sass', 'meta.selector.css', 'meta.attribute-selector.css.sass', 'entity.other.attribute-name.attribute.sass']
expect(tokens[2]).toEqual value: '=', scopes: ['source.sass', 'meta.selector.css', 'meta.attribute-selector.css.sass', 'keyword.operator.sass']
expect(tokens[3]).toEqual value: '"', scopes: ['source.sass', 'meta.selector.css', 'meta.attribute-selector.css.sass', 'string.quoted.double.attribute-value.sass', 'punctuation.definition.string.begin.sass']
expect(tokens[4]).toEqual value: 'text-color', scopes: ['source.sass', 'meta.selector.css', 'meta.attribute-selector.css.sass', 'string.quoted.double.attribute-value.sass']
expect(tokens[5]).toEqual value: '"', scopes: ['source.sass', 'meta.selector.css', 'meta.attribute-selector.css.sass', 'string.quoted.double.attribute-value.sass', 'punctuation.definition.string.end.sass']
expect(tokens[6]).toEqual value: ']', scopes: ['source.sass', 'meta.selector.css', 'meta.attribute-selector.css.sass', 'punctuation.definition.attribute-selector.end.bracket.square.sass']
expect(tokens[7]).toEqual value: '[', scopes: ['source.sass', 'meta.selector.css', 'meta.attribute-selector.css.sass', 'punctuation.definition.attribute-selector.begin.bracket.square.sass']
expect(tokens[8]).toEqual value: 'data-value', scopes: ['source.sass', 'meta.selector.css', 'meta.attribute-selector.css.sass', 'entity.other.attribute-name.attribute.sass']
expect(tokens[9]).toEqual value: '=', scopes: ['source.sass', 'meta.selector.css', 'meta.attribute-selector.css.sass', 'keyword.operator.sass']
expect(tokens[10]).toEqual value: '"', scopes: ['source.sass', 'meta.selector.css', 'meta.attribute-selector.css.sass', 'string.quoted.double.attribute-value.sass', 'punctuation.definition.string.begin.sass']
expect(tokens[11]).toEqual value: 'null', scopes: ['source.sass', 'meta.selector.css', 'meta.attribute-selector.css.sass', 'string.quoted.double.attribute-value.sass']
expect(tokens[12]).toEqual value: '"', scopes: ['source.sass', 'meta.selector.css', 'meta.attribute-selector.css.sass', 'string.quoted.double.attribute-value.sass', 'punctuation.definition.string.end.sass']
expect(tokens[13]).toEqual value: ']', scopes: ['source.sass', 'meta.selector.css', 'meta.attribute-selector.css.sass', 'punctuation.definition.attribute-selector.end.bracket.square.sass']
describe 'numbers', ->
it 'tokenizes them', ->
tokens = grammar.tokenizeLines '''
.something
top: 50%
'''
expect(tokens[1][4]).toEqual value: '50', scopes: ['source.sass', 'meta.property-value.sass', 'constant.numeric.css']
it 'tokenizes number operations', ->
tokens = grammar.tokenizeLines '''
.something
top: +50%
'''
expect(tokens[1][4]).toEqual value: '+50', scopes: ['source.sass', 'meta.property-value.sass', 'constant.numeric.css']
tokens = grammar.tokenizeLines '''
.something
top: 50% - 30%
'''
expect(tokens[1][7]).toEqual value: '-', scopes: ['source.sass', 'meta.property-value.sass', 'keyword.operator.css']
describe 'variables', ->
it 'tokenizes them', ->
{tokens} = grammar.tokenizeLine '$test: bla'
expect(tokens[0]).toEqual value: '$', scopes: ['source.sass', 'meta.variable-declaration.sass', 'punctuation.definition.entity.sass']
expect(tokens[1]).toEqual value: 'test', scopes: ['source.sass', 'meta.variable-declaration.sass', 'variable.other.sass']
expect(tokens[2]).toEqual value: ':', scopes: ['source.sass', 'meta.variable-declaration.sass', 'meta.property-value.sass', 'punctuation.separator.key-value.css']
expect(tokens[3]).toEqual value: ' ', scopes: ['source.sass', 'meta.variable-declaration.sass', 'meta.property-value.sass']
expect(tokens[4]).toEqual value: 'bla', scopes: ['source.sass', 'meta.variable-declaration.sass', 'meta.property-value.sass']
it 'tokenizes indented variables', ->
{tokens} = grammar.tokenizeLine ' $test: bla'
expect(tokens[1]).toEqual value: '$', scopes: ['source.sass', 'meta.variable-declaration.sass', 'punctuation.definition.entity.sass']
expect(tokens[2]).toEqual value: 'test', scopes: ['source.sass', 'meta.variable-declaration.sass', 'variable.other.sass']
expect(tokens[3]).toEqual value: ':', scopes: ['source.sass', 'meta.variable-declaration.sass', 'meta.property-value.sass', 'punctuation.separator.key-value.css']
expect(tokens[4]).toEqual value: ' ', scopes: ['source.sass', 'meta.variable-declaration.sass', 'meta.property-value.sass']
expect(tokens[5]).toEqual value: 'bla', scopes: ['source.sass', 'meta.variable-declaration.sass', 'meta.property-value.sass']
describe 'strings', ->
it 'tokenizes single-quote strings', ->
tokens = grammar.tokenizeLines """
.a
content: 'hi'
"""
expect(tokens[1][4]).toEqual value: "'", scopes: ['source.sass', 'meta.property-value.sass', 'string.quoted.single.sass', 'punctuation.definition.string.begin.sass']
expect(tokens[1][5]).toEqual value: 'hi', scopes: ['source.sass', 'meta.property-value.sass', 'string.quoted.single.sass']
expect(tokens[1][6]).toEqual value: "'", scopes: ['source.sass', 'meta.property-value.sass', 'string.quoted.single.sass', 'punctuation.definition.string.end.sass']
it 'tokenizes double-quote strings', ->
tokens = grammar.tokenizeLines '''
.a
content: "hi"
'''
expect(tokens[1][4]).toEqual value: '"', scopes: ['source.sass', 'meta.property-value.sass', 'string.quoted.double.sass', 'punctuation.definition.string.begin.sass']
expect(tokens[1][5]).toEqual value: 'hi', scopes: ['source.sass', 'meta.property-value.sass', 'string.quoted.double.sass']
expect(tokens[1][6]).toEqual value: '"', scopes: ['source.sass', 'meta.property-value.sass', 'string.quoted.double.sass', 'punctuation.definition.string.end.sass']
it 'tokenizes escape characters', ->
tokens = grammar.tokenizeLines """
.a
content: '\\abcdef'
"""
expect(tokens[1][5]).toEqual value: '\\abcdef', scopes: ['source.sass', 'meta.property-value.sass', 'string.quoted.single.sass', 'constant.character.escape.sass']
tokens = grammar.tokenizeLines '''
.a
content: "\\abcdef"
'''
expect(tokens[1][5]).toEqual value: '\\abcdef', scopes: ['source.sass', 'meta.property-value.sass', 'string.quoted.double.sass', 'constant.character.escape.sass']
describe 'comments', ->
it 'only tokenizes comments that start at the beginning of a line', ->
{tokens} = grammar.tokenizeLine ' //A comment?'
expect(tokens[1]).toEqual value: '//', scopes: ['source.sass', 'comment.line.sass', 'punctuation.definition.comment.sass']
expect(tokens[2]).toEqual value: 'A comment?', scopes: ['source.sass', 'comment.line.sass']
{tokens} = grammar.tokenizeLine '/* also a comment */'
expect(tokens[0]).toEqual value: '/*', scopes: ['source.sass', 'comment.block.sass', 'punctuation.definition.comment.sass']
expect(tokens[1]).toEqual value: ' also a comment ', scopes: ['source.sass', 'comment.block.sass']
expect(tokens[2]).toEqual value: '*/', scopes: ['source.sass', 'comment.block.sass', 'punctuation.definition.comment.sass']
{tokens} = grammar.tokenizeLine 'this //is not a comment'
expect(tokens[1]).toEqual value: '//is not a comment', scopes: ['source.sass', 'meta.selector.css', 'invalid.illegal.sass']
{tokens} = grammar.tokenizeLine 'this /* is also not a comment */'
expect(tokens[1]).toEqual value: '/* is also not a comment */', scopes: ['source.sass', 'meta.selector.css', 'invalid.illegal.sass']
it 'correctly tokenizes block comments based on indentation', ->
tokens = grammar.tokenizeLines '''
/* hi1
hi2
hi3
hi4
'''
expect(tokens[0][0]).toEqual value: '/*', scopes: ['source.sass', 'comment.block.sass', 'punctuation.definition.comment.sass']
expect(tokens[0][1]).toEqual value: ' hi1', scopes: ['source.sass', 'comment.block.sass']
expect(tokens[1][0]).toEqual value: ' hi2', scopes: ['source.sass', 'comment.block.sass']
expect(tokens[2][0]).toEqual value: ' hi3', scopes: ['source.sass', 'comment.block.sass']
expect(tokens[3][0]).toEqual value: 'hi4', scopes: ['source.sass', 'meta.selector.css']
it 'correctly tokenizes line comments based on indentation', ->
tokens = grammar.tokenizeLines '''
// hi1
hi2
hi3
hi4
'''
expect(tokens[0][0]).toEqual value: '//', scopes: ['source.sass', 'comment.line.sass', 'punctuation.definition.comment.sass']
expect(tokens[0][1]).toEqual value: ' hi1', scopes: ['source.sass', 'comment.line.sass']
expect(tokens[1][0]).toEqual value: ' hi2', scopes: ['source.sass', 'comment.line.sass']
expect(tokens[2][0]).toEqual value: ' hi3', scopes: ['source.sass', 'comment.line.sass']
expect(tokens[3][0]).toEqual value: 'hi4', scopes: ['source.sass', 'meta.selector.css']
describe 'at-rules and directives', ->
it 'tokenizes @media', ->
tokens = grammar.tokenizeLines '''
@media screen
background: none
'''
expect(tokens[0][0]).toEqual value: '@', scopes: ['source.sass', 'meta.at-rule.media.sass', 'keyword.control.at-rule.media.sass', 'punctuation.definition.keyword.sass']
expect(tokens[0][1]).toEqual value: 'media', scopes: ['source.sass', 'meta.at-rule.media.sass', 'keyword.control.at-rule.media.sass']
expect(tokens[0][2]).toEqual value: ' ', scopes: ['source.sass', 'meta.at-rule.media.sass']
expect(tokens[0][3]).toEqual value: 'screen', scopes: ['source.sass', 'meta.at-rule.media.sass', 'support.constant.media.css']
expect(tokens[1][1]).toEqual value: 'background', scopes: ['source.sass', 'meta.property-name.sass', 'support.type.property-name.css']
tokens = grammar.tokenizeLines '''
@media (orientation: landscape) and (min-width: 700px)
background: none
'''
expect(tokens[0][0]).toEqual value: '@', scopes: ['source.sass', 'meta.at-rule.media.sass', 'keyword.control.at-rule.media.sass', 'punctuation.definition.keyword.sass']
expect(tokens[0][1]).toEqual value: 'media', scopes: ['source.sass', 'meta.at-rule.media.sass', 'keyword.control.at-rule.media.sass']
expect(tokens[0][2]).toEqual value: ' ', scopes: ['source.sass', 'meta.at-rule.media.sass']
expect(tokens[0][3]).toEqual value: '(', scopes: ['source.sass', 'meta.at-rule.media.sass', 'meta.property-list.media-query.sass', 'punctuation.definition.media-query.begin.bracket.round.sass']
expect(tokens[0][4]).toEqual value: 'orientation', scopes: ['source.sass', 'meta.at-rule.media.sass', 'meta.property-list.media-query.sass', 'meta.property-name.media-query.sass', 'support.type.property-name.media.css']
expect(tokens[0][5]).toEqual value: ':', scopes: ['source.sass', 'meta.at-rule.media.sass', 'meta.property-list.media-query.sass', 'punctuation.separator.key-value.sass']
expect(tokens[0][6]).toEqual value: ' ', scopes: ['source.sass', 'meta.at-rule.media.sass', 'meta.property-list.media-query.sass']
expect(tokens[0][7]).toEqual value: 'landscape', scopes: ['source.sass', 'meta.at-rule.media.sass', 'meta.property-list.media-query.sass', 'meta.property-value.media-query.sass', 'support.constant.property-value.css']
expect(tokens[0][8]).toEqual value: ')', scopes: ['source.sass', 'meta.at-rule.media.sass', 'meta.property-list.media-query.sass', 'punctuation.definition.media-query.end.bracket.round.sass']
expect(tokens[0][9]).toEqual value: ' ', scopes: ['source.sass', 'meta.at-rule.media.sass']
expect(tokens[0][10]).toEqual value: 'and', scopes: ['source.sass', 'meta.at-rule.media.sass', 'keyword.operator.logical.sass']
expect(tokens[0][12]).toEqual value: '(', scopes: ['source.sass', 'meta.at-rule.media.sass', 'meta.property-list.media-query.sass', 'punctuation.definition.media-query.begin.bracket.round.sass']
expect(tokens[0][13]).toEqual value: 'min-width', scopes: ['source.sass', 'meta.at-rule.media.sass', 'meta.property-list.media-query.sass', 'meta.property-name.media-query.sass', 'support.type.property-name.media.css']
expect(tokens[0][14]).toEqual value: ':', scopes: ['source.sass', 'meta.at-rule.media.sass', 'meta.property-list.media-query.sass', 'punctuation.separator.key-value.sass']
expect(tokens[0][16]).toEqual value: '700', scopes: ['source.sass', 'meta.at-rule.media.sass', 'meta.property-list.media-query.sass', 'meta.property-value.media-query.sass', 'constant.numeric.css']
expect(tokens[0][17]).toEqual value: 'px', scopes: ['source.sass', 'meta.at-rule.media.sass', 'meta.property-list.media-query.sass', 'meta.property-value.media-query.sass', 'constant.numeric.css', 'keyword.other.unit.px.css']
expect(tokens[0][18]).toEqual value: ')', scopes: ['source.sass', 'meta.at-rule.media.sass', 'meta.property-list.media-query.sass', 'punctuation.definition.media-query.end.bracket.round.sass']
expect(tokens[1][1]).toEqual value: 'background', scopes: ['source.sass', 'meta.property-name.sass', 'support.type.property-name.css']
it 'tokenizes @function', ->
{tokens} = grammar.tokenizeLine '@function function_name($p1, $p2)'
expect(tokens[0]).toEqual value: '@', scopes: ['source.sass', 'meta.at-rule.function.sass', 'keyword.control.at-rule.function.sass', 'punctuation.definition.entity.sass']
expect(tokens[1]).toEqual value: 'function', scopes: ['source.sass', 'meta.at-rule.function.sass', 'keyword.control.at-rule.function.sass']
expect(tokens[3]).toEqual value: 'function_name', scopes: ['source.sass', 'meta.at-rule.function.sass', 'support.function.misc.sass']
expect(tokens[5]).toEqual value: '$', scopes: ['source.sass', 'meta.at-rule.function.sass', 'meta.variable-usage.sass', 'punctuation.definition.entity.css']
expect(tokens[6]).toEqual value: 'p1', scopes: ['source.sass', 'meta.at-rule.function.sass', 'meta.variable-usage.sass', 'variable.other.sass']
expect(tokens[8]).toEqual value: '$', scopes: ['source.sass', 'meta.at-rule.function.sass', 'meta.variable-usage.sass', 'punctuation.definition.entity.css']
expect(tokens[9]).toEqual value: 'p2', scopes: ['source.sass', 'meta.at-rule.function.sass', 'meta.variable-usage.sass', 'variable.other.sass']
it 'tokenizes @return', ->
{tokens} = grammar.tokenizeLine '@return \'border\' + \' \' + \'1px solid pink\''
expect(tokens[0]).toEqual value: '@', scopes: ['source.sass', 'meta.at-rule.return.sass', 'keyword.control.return.sass', 'punctuation.definition.entity.sass']
expect(tokens[1]).toEqual value: 'return', scopes: ['source.sass', 'meta.at-rule.return.sass', 'keyword.control.return.sass']
expect(tokens[4]).toEqual value: 'border', scopes: ['source.sass', 'meta.at-rule.return.sass', 'string.quoted.single.sass']
expect(tokens[7]).toEqual value: '+', scopes: ['source.sass', 'meta.at-rule.return.sass', 'keyword.operator.css']
it 'tokenizes @if', ->
{tokens} = grammar.tokenizeLine '@if $var == true'
expect(tokens[0]).toEqual value: '@', scopes: ['source.sass', 'meta.at-rule.if.sass', 'keyword.control.if.sass', 'punctuation.definition.entity.sass']
expect(tokens[1]).toEqual value: 'if', scopes: ['source.sass', 'meta.at-rule.if.sass', 'keyword.control.if.sass']
expect(tokens[3]).toEqual value: '$', scopes: ['source.sass', 'meta.at-rule.if.sass', 'meta.variable-usage.sass', 'punctuation.definition.entity.css']
expect(tokens[4]).toEqual value: 'var', scopes: ['source.sass', 'meta.at-rule.if.sass', 'meta.variable-usage.sass', 'variable.other.sass']
expect(tokens[6]).toEqual value: '==', scopes: ['source.sass', 'meta.at-rule.if.sass', 'keyword.operator.comparison.sass']
expect(tokens[8]).toEqual value: 'true', scopes: ['source.sass', 'meta.at-rule.if.sass', 'support.constant.property-value.css.sass']
{tokens} = grammar.tokenizeLine '@if config.$setting == true'
expect(tokens[0]).toEqual value: '@', scopes: ['source.sass', 'meta.at-rule.if.sass', 'keyword.control.if.sass', 'punctuation.definition.entity.sass']
expect(tokens[1]).toEqual value: 'if', scopes: ['source.sass', 'meta.at-rule.if.sass', 'keyword.control.if.sass']
expect(tokens[3]).toEqual value: 'config', scopes: ['source.sass', 'meta.at-rule.if.sass', 'meta.variable-usage.sass', 'variable.sass']
expect(tokens[4]).toEqual value: '.', scopes: ['source.sass', 'meta.at-rule.if.sass', 'meta.variable-usage.sass', 'punctuation.access.module.sass']
expect(tokens[5]).toEqual value: '$', scopes: ['source.sass', 'meta.at-rule.if.sass', 'meta.variable-usage.sass', 'punctuation.definition.entity.css']
expect(tokens[6]).toEqual value: 'setting', scopes: ['source.sass', 'meta.at-rule.if.sass', 'meta.variable-usage.sass', 'variable.other.sass']
expect(tokens[8]).toEqual value: '==', scopes: ['source.sass', 'meta.at-rule.if.sass', 'keyword.operator.comparison.sass']
expect(tokens[10]).toEqual value: 'true', scopes: ['source.sass', 'meta.at-rule.if.sass', 'support.constant.property-value.css.sass']
it 'tokenizes @else if', ->
{tokens} = grammar.tokenizeLine '@else if $var == false'
expect(tokens[0]).toEqual value: '@', scopes: ['source.sass', 'meta.at-rule.else.sass', 'keyword.control.else.sass', 'punctuation.definition.entity.sass']
expect(tokens[1]).toEqual value: 'else if ', scopes: ['source.sass', 'meta.at-rule.else.sass', 'keyword.control.else.sass']
expect(tokens[2]).toEqual value: '$', scopes: ['source.sass', 'meta.at-rule.else.sass', 'meta.variable-usage.sass', 'punctuation.definition.entity.css']
expect(tokens[3]).toEqual value: 'var', scopes: ['source.sass', 'meta.at-rule.else.sass', 'meta.variable-usage.sass', 'variable.other.sass']
expect(tokens[5]).toEqual value: '==', scopes: ['source.sass', 'meta.at-rule.else.sass', 'keyword.operator.comparison.sass']
expect(tokens[7]).toEqual value: 'false', scopes: ['source.sass', 'meta.at-rule.else.sass', 'support.constant.property-value.css.sass']
{tokens} = grammar.tokenizeLine '@else if config.$setting == false'
expect(tokens[0]).toEqual value: '@', scopes: ['source.sass', 'meta.at-rule.else.sass', 'keyword.control.else.sass', 'punctuation.definition.entity.sass']
expect(tokens[1]).toEqual value: 'else if ', scopes: ['source.sass', 'meta.at-rule.else.sass', 'keyword.control.else.sass']
expect(tokens[2]).toEqual value: 'config', scopes: ['source.sass', 'meta.at-rule.else.sass', 'meta.variable-usage.sass', 'variable.sass']
expect(tokens[3]).toEqual value: '.', scopes: ['source.sass', 'meta.at-rule.else.sass', 'meta.variable-usage.sass', 'punctuation.access.module.sass']
expect(tokens[4]).toEqual value: '$', scopes: ['source.sass', 'meta.at-rule.else.sass', 'meta.variable-usage.sass', 'punctuation.definition.entity.css']
expect(tokens[5]).toEqual value: 'setting', scopes: ['source.sass', 'meta.at-rule.else.sass', 'meta.variable-usage.sass', 'variable.other.sass']
expect(tokens[7]).toEqual value: '==', scopes: ['source.sass', 'meta.at-rule.else.sass', 'keyword.operator.comparison.sass']
expect(tokens[9]).toEqual value: 'false', scopes: ['source.sass', 'meta.at-rule.else.sass', 'support.constant.property-value.css.sass']
it 'tokenizes @while', ->
{tokens} = grammar.tokenizeLine '@while 1'
expect(tokens[0]).toEqual value: '@', scopes: ['source.sass', 'meta.at-rule.while.sass', 'keyword.control.while.sass', 'punctuation.definition.entity.sass']
expect(tokens[1]).toEqual value: 'while', scopes: ['source.sass', 'meta.at-rule.while.sass', 'keyword.control.while.sass']
expect(tokens[3]).toEqual value: '1', scopes: ['source.sass', 'meta.at-rule.while.sass', 'constant.numeric.css']
it 'tokenizes @for', ->
{tokens} = grammar.tokenizeLine '@for $i from 1 through 100'
expect(tokens[0]).toEqual value: '@', scopes: ['source.sass', 'meta.at-rule.for.sass', 'keyword.control.for.sass', 'punctuation.definition.entity.sass']
expect(tokens[1]).toEqual value: 'for', scopes: ['source.sass', 'meta.at-rule.for.sass', 'keyword.control.for.sass']
expect(tokens[3]).toEqual value: '$', scopes: ['source.sass', 'meta.at-rule.for.sass', 'meta.variable-usage.sass', 'punctuation.definition.entity.css']
expect(tokens[4]).toEqual value: 'i', scopes: ['source.sass', 'meta.at-rule.for.sass', 'meta.variable-usage.sass', 'variable.other.sass']
expect(tokens[8]).toEqual value: '1', scopes: ['source.sass', 'meta.at-rule.for.sass', 'constant.numeric.css']
expect(tokens[12]).toEqual value: '100', scopes: ['source.sass', 'meta.at-rule.for.sass', 'constant.numeric.css']
# 'from' and 'thorugh' tested in operators
it 'tokenizes @each', ->
{tokens} = grammar.tokenizeLine '@each $item in $list'
expect(tokens[0]).toEqual value: '@', scopes: ['source.sass', 'meta.at-rule.each.sass', 'keyword.control.each.sass', 'punctuation.definition.entity.sass']
expect(tokens[1]).toEqual value: 'each', scopes: ['source.sass', 'meta.at-rule.each.sass', 'keyword.control.each.sass']
expect(tokens[3]).toEqual value: '$', scopes: ['source.sass', 'meta.at-rule.each.sass', 'meta.variable-usage.sass', 'punctuation.definition.entity.css']
expect(tokens[4]).toEqual value: 'item', scopes: ['source.sass', 'meta.at-rule.each.sass', 'meta.variable-usage.sass', 'variable.other.sass']
expect(tokens[8]).toEqual value: '$', scopes: ['source.sass', 'meta.at-rule.each.sass', 'meta.variable-usage.sass', 'punctuation.definition.entity.css']
expect(tokens[9]).toEqual value: 'list', scopes: ['source.sass', 'meta.at-rule.each.sass', 'meta.variable-usage.sass', 'variable.other.sass']
{tokens} = grammar.tokenizeLine '@each $item in module.$list'
expect(tokens[0]).toEqual value: '@', scopes: ['source.sass', 'meta.at-rule.each.sass', 'keyword.control.each.sass', 'punctuation.definition.entity.sass']
expect(tokens[1]).toEqual value: 'each', scopes: ['source.sass', 'meta.at-rule.each.sass', 'keyword.control.each.sass']
expect(tokens[3]).toEqual value: '$', scopes: ['source.sass', 'meta.at-rule.each.sass', 'meta.variable-usage.sass', 'punctuation.definition.entity.css']
expect(tokens[4]).toEqual value: 'item', scopes: ['source.sass', 'meta.at-rule.each.sass', 'meta.variable-usage.sass', 'variable.other.sass']
expect(tokens[8]).toEqual value: 'module', scopes: ['source.sass', 'meta.at-rule.each.sass', 'meta.variable-usage.sass', 'variable.sass']
expect(tokens[9]).toEqual value: '.', scopes: ['source.sass', 'meta.at-rule.each.sass', 'meta.variable-usage.sass', 'punctuation.access.module.sass']
expect(tokens[10]).toEqual value: '$', scopes: ['source.sass', 'meta.at-rule.each.sass', 'meta.variable-usage.sass', 'punctuation.definition.entity.css']
expect(tokens[11]).toEqual value: 'list', scopes: ['source.sass', 'meta.at-rule.each.sass', 'meta.variable-usage.sass', 'variable.other.sass']
# 'in' tested in operators
it 'tokenizes @include', ->
{tokens} = grammar.tokenizeLine '@include mixin-name'
expect(tokens[0]).toEqual value: '@', scopes: ['source.sass', 'meta.function.include.sass', 'keyword.control.at-rule.include.sass', 'punctuation.definition.entity.sass']
expect(tokens[1]).toEqual value: 'include', scopes: ['source.sass', 'meta.function.include.sass', 'keyword.control.at-rule.include.sass']
expect(tokens[3]).toEqual value: 'mixin-name', scopes: ['source.sass', 'meta.function.include.sass', 'variable.other.sass']
{tokens} = grammar.tokenizeLine '@include mixin.name'
expect(tokens[0]).toEqual value: '@', scopes: ['source.sass', 'meta.function.include.sass', 'keyword.control.at-rule.include.sass', 'punctuation.definition.entity.sass']
expect(tokens[1]).toEqual value: 'include', scopes: ['source.sass', 'meta.function.include.sass', 'keyword.control.at-rule.include.sass']
expect(tokens[3]).toEqual value: 'mixin', scopes: ['source.sass', 'meta.function.include.sass', 'variable.sass']
expect(tokens[4]).toEqual value: '.', scopes: ['source.sass', 'meta.function.include.sass', 'punctuation.access.module.sass']
expect(tokens[5]).toEqual value: 'name', scopes: ['source.sass', 'meta.function.include.sass', 'variable.other.sass']
it 'tokenizes \'+\'', ->
{tokens} = grammar.tokenizeLine '+mixin-name'
expect(tokens[0]).toEqual value: '+', scopes: ['source.sass', 'meta.function.include.sass', 'keyword.control.at-rule.include.sass']
expect(tokens[1]).toEqual value: 'mixin-name', scopes: ['source.sass', 'meta.function.include.sass', 'variable.other.sass']
{tokens} = grammar.tokenizeLine '+mixin.name'
expect(tokens[0]).toEqual value: '+', scopes: ['source.sass', 'meta.function.include.sass', 'keyword.control.at-rule.include.sass']
expect(tokens[1]).toEqual value: 'mixin', scopes: ['source.sass', 'meta.function.include.sass', 'variable.sass']
expect(tokens[2]).toEqual value: '.', scopes: ['source.sass', 'meta.function.include.sass', 'punctuation.access.module.sass']
expect(tokens[3]).toEqual value: 'name', scopes: ['source.sass', 'meta.function.include.sass', 'variable.other.sass']
it 'tokenizes @mixin or \'=\'', ->
{tokens} = grammar.tokenizeLine '@mixin mixin-name($p)'
expect(tokens[0]).toEqual value: '@', scopes: ['source.sass', 'meta.variable-declaration.sass.mixin', 'keyword.control.at-rule.mixin.sass', 'punctuation.definition.entity.sass']
expect(tokens[1]).toEqual value: 'mixin', scopes: ['source.sass', 'meta.variable-declaration.sass.mixin', 'keyword.control.at-rule.mixin.sass']
expect(tokens[3]).toEqual value: 'mixin-name', scopes: ['source.sass', 'meta.variable-declaration.sass.mixin', 'variable.other.sass']
expect(tokens[5]).toEqual value: '$', scopes: ['source.sass', 'meta.variable-declaration.sass.mixin', 'meta.variable-usage.sass', 'punctuation.definition.entity.css']
expect(tokens[6]).toEqual value: 'p', scopes: ['source.sass', 'meta.variable-declaration.sass.mixin', 'meta.variable-usage.sass', 'variable.other.sass']
{tokens} = grammar.tokenizeLine '=mixin-name($p)'
expect(tokens[0]).toEqual value: '\=', scopes: ['source.sass', 'meta.variable-declaration.sass.mixin', 'keyword.control.at-rule.keyframes.sass']
expect(tokens[1]).toEqual value: 'mixin-name', scopes: ['source.sass', 'meta.variable-declaration.sass.mixin', 'variable.other.sass']
expect(tokens[3]).toEqual value: '$', scopes: ['source.sass', 'meta.variable-declaration.sass.mixin', 'meta.variable-usage.sass', 'punctuation.definition.entity.css']
expect(tokens[4]).toEqual value: 'p', scopes: ['source.sass', 'meta.variable-declaration.sass.mixin', 'meta.variable-usage.sass', 'variable.other.sass']
it 'tokenizes @content', ->
tokens = grammar.tokenizeLines '''
@mixin mixin-name($p)
@content
'''
expect(tokens[1][1]).toEqual value: '@', scopes: ['source.sass', 'meta.at-rule.content.sass', 'keyword.control.content.sass', 'punctuation.definition.entity.sass']
expect(tokens[1][2]).toEqual value: 'content', scopes: ['source.sass', 'meta.at-rule.content.sass', 'keyword.control.content.sass']
it 'tokenizes @warn, @debug and @error', ->
{tokens} = grammar.tokenizeLine '@warn \'message\''
expect(tokens[0]).toEqual value: '@', scopes: ['source.sass', 'meta.at-rule.warn.sass', 'keyword.control.warn.sass', 'punctuation.definition.entity.sass']
expect(tokens[1]).toEqual value: 'warn', scopes: ['source.sass', 'meta.at-rule.warn.sass', 'keyword.control.warn.sass']
expect(tokens[4]).toEqual value: 'message', scopes: ['source.sass', 'meta.at-rule.warn.sass', 'string.quoted.single.sass']
{tokens} = grammar.tokenizeLine '@debug \'message\''
expect(tokens[0]).toEqual value: '@', scopes: ['source.sass', 'meta.at-rule.warn.sass', 'keyword.control.warn.sass', 'punctuation.definition.entity.sass']
expect(tokens[1]).toEqual value: 'debug', scopes: ['source.sass', 'meta.at-rule.warn.sass', 'keyword.control.warn.sass']
expect(tokens[4]).toEqual value: 'message', scopes: ['source.sass', 'meta.at-rule.warn.sass', 'string.quoted.single.sass']
{tokens} = grammar.tokenizeLine '@error \'message\''
expect(tokens[0]).toEqual value: '@', scopes: ['source.sass', 'meta.at-rule.warn.sass', 'keyword.control.warn.sass', 'punctuation.definition.entity.sass']
expect(tokens[1]).toEqual value: 'error', scopes: ['source.sass', 'meta.at-rule.warn.sass', 'keyword.control.warn.sass']
expect(tokens[4]).toEqual value: 'message', scopes: ['source.sass', 'meta.at-rule.warn.sass', 'string.quoted.single.sass']
it 'tokenizes @at-root', ->
tokens = grammar.tokenizeLines '''
.class
@at-root
#id
'''
expect(tokens[1][1]).toEqual value: '@', scopes: ['source.sass', 'meta.at-rule.at-root.sass', 'keyword.control.at-root.sass', 'punctuation.definition.entity.sass']
expect(tokens[1][2]).toEqual value: 'at-root', scopes: ['source.sass', 'meta.at-rule.at-root.sass', 'keyword.control.at-root.sass']
expect(tokens[2][1]).toEqual value: '#', scopes: [ 'source.sass', 'meta.selector.css', 'entity.other.attribute-name.id.css.sass', 'punctuation.definition.entity.sass']
expect(tokens[2][2]).toEqual value: 'id', scopes: ['source.sass', 'meta.selector.css', 'entity.other.attribute-name.id.css.sass']
describe '@use', ->
it 'tokenizes @use with explicit namespace correctly', ->
{tokens} = grammar.tokenizeLine "@use 'module' as _mod-ule"
expect(tokens[0]).toEqual value: '@', scopes: ['source.sass', 'meta.at-rule.use.sass', 'keyword.control.at-rule.use.sass', 'punctuation.definition.keyword.sass']
expect(tokens[1]).toEqual value: 'use', scopes: ['source.sass', 'meta.at-rule.use.sass', 'keyword.control.at-rule.use.sass']
expect(tokens[3]).toEqual value: "'", scopes: ['source.sass', 'meta.at-rule.use.sass', 'string.quoted.single.sass', 'punctuation.definition.string.begin.sass']
expect(tokens[4]).toEqual value: 'module', scopes: ['source.sass', 'meta.at-rule.use.sass', 'string.quoted.single.sass']
expect(tokens[7]).toEqual value: 'as', scopes: ['source.sass', 'meta.at-rule.use.sass', 'keyword.control.operator']
expect(tokens[9]).toEqual value: '_mod-ule', scopes: ['source.sass', 'meta.at-rule.use.sass', 'variable.sass']
it 'tokenizes @use with wildcard correctly', ->
{tokens} = grammar.tokenizeLine "@use 'module' as *;"
expect(tokens[0]).toEqual value: '@', scopes: ['source.sass', 'meta.at-rule.use.sass', 'keyword.control.at-rule.use.sass', 'punctuation.definition.keyword.sass']
expect(tokens[1]).toEqual value: 'use', scopes: ['source.sass', 'meta.at-rule.use.sass', 'keyword.control.at-rule.use.sass']
expect(tokens[3]).toEqual value: "'", scopes: ['source.sass', 'meta.at-rule.use.sass', 'string.quoted.single.sass', 'punctuation.definition.string.begin.sass']
expect(tokens[4]).toEqual value: 'module', scopes: ['source.sass', 'meta.at-rule.use.sass', 'string.quoted.single.sass']
expect(tokens[7]).toEqual value: 'as', scopes: ['source.sass', 'meta.at-rule.use.sass', 'keyword.control.operator']
expect(tokens[9]).toEqual value: '*', scopes: ['source.sass', 'meta.at-rule.use.sass', 'variable.language.expanded-namespace.sass']
it 'tokenizes @use with configuration correctly', ->
{tokens} = grammar.tokenizeLine "@use 'module' with ($black: #222, $border-radius: 0.1rem)"
expect(tokens[0]).toEqual value: '@', scopes: ['source.sass', 'meta.at-rule.use.sass', 'keyword.control.at-rule.use.sass', 'punctuation.definition.keyword.sass']
expect(tokens[1]).toEqual value: 'use', scopes: ['source.sass', 'meta.at-rule.use.sass', 'keyword.control.at-rule.use.sass']
expect(tokens[3]).toEqual value: "'", scopes: ['source.sass', 'meta.at-rule.use.sass', 'string.quoted.single.sass', 'punctuation.definition.string.begin.sass']
expect(tokens[4]).toEqual value: 'module', scopes: ['source.sass', 'meta.at-rule.use.sass', 'string.quoted.single.sass']
expect(tokens[7]).toEqual value: 'with', scopes: ['source.sass', 'meta.at-rule.use.sass', 'keyword.control.operator']
expect(tokens[9]).toEqual value: '(', scopes: ['source.sass', 'meta.at-rule.use.sass', 'punctuation.definition.module.begin.bracket.round.sass']
expect(tokens[10]).toEqual value: '$', scopes: ['source.sass', 'meta.at-rule.use.sass', 'meta.variable-usage.sass', 'punctuation.definition.entity.css']
expect(tokens[11]).toEqual value: 'black', scopes: ['source.sass', 'meta.at-rule.use.sass', 'meta.variable-usage.sass', 'variable.other.sass']
describe '@forward', ->
it 'tokenizes solitary @forward correctly', ->
{tokens} = grammar.tokenizeLine '@forward'
expect(tokens[0]).toEqual value: '@', scopes: ['source.sass', 'meta.at-rule.forward.sass', 'keyword.control.at-rule.forward.sass', 'punctuation.definition.keyword.sass']
expect(tokens[1]).toEqual value: 'forward', scopes: ['source.sass', 'meta.at-rule.forward.sass', 'keyword.control.at-rule.forward.sass']
it 'tokenizes @forward with path correctly', ->
{tokens} = grammar.tokenizeLine "@forward 'module'"
expect(tokens[0]).toEqual value: '@', scopes: ['source.sass', 'meta.at-rule.forward.sass', 'keyword.control.at-rule.forward.sass', 'punctuation.definition.keyword.sass']
expect(tokens[1]).toEqual value: 'forward', scopes: ['source.sass', 'meta.at-rule.forward.sass', 'keyword.control.at-rule.forward.sass']
expect(tokens[3]).toEqual value: "'", scopes: ['source.sass', 'meta.at-rule.forward.sass', 'string.quoted.single.sass', 'punctuation.definition.string.begin.sass']
expect(tokens[4]).toEqual value: 'module', scopes: ['source.sass', 'meta.at-rule.forward.sass', 'string.quoted.single.sass']
it 'tokenizes @forward with prefix correctly', ->
{tokens} = grammar.tokenizeLine "@forward 'module' as prefix*"
expect(tokens[0]).toEqual value: '@', scopes: ['source.sass', 'meta.at-rule.forward.sass', 'keyword.control.at-rule.forward.sass', 'punctuation.definition.keyword.sass']
expect(tokens[1]).toEqual value: 'forward', scopes: ['source.sass', 'meta.at-rule.forward.sass', 'keyword.control.at-rule.forward.sass']
expect(tokens[3]).toEqual value: "'", scopes: ['source.sass', 'meta.at-rule.forward.sass', 'string.quoted.single.sass', 'punctuation.definition.string.begin.sass']
expect(tokens[4]).toEqual value: 'module', scopes: ['source.sass', 'meta.at-rule.forward.sass', 'string.quoted.single.sass']
expect(tokens[7]).toEqual value: 'as', scopes: ['source.sass', 'meta.at-rule.forward.sass', 'keyword.control.operator']
expect(tokens[9]).toEqual value: 'prefix', scopes: ['source.sass', 'meta.at-rule.forward.sass', 'entity.other.attribute-name.module.sass']
expect(tokens[10]).toEqual value: '*', scopes: ['source.sass', 'meta.at-rule.forward.sass', 'punctuation.definition.wildcard.sass']
it 'tokenizes @forward with hide correctly', ->
{tokens} = grammar.tokenizeLine "@forward 'module' hide a-mixin $private-variable"
expect(tokens[0]).toEqual value: '@', scopes: ['source.sass', 'meta.at-rule.forward.sass', 'keyword.control.at-rule.forward.sass', 'punctuation.definition.keyword.sass']
expect(tokens[1]).toEqual value: 'forward', scopes: ['source.sass', 'meta.at-rule.forward.sass', 'keyword.control.at-rule.forward.sass']
expect(tokens[3]).toEqual value: "'", scopes: ['source.sass', 'meta.at-rule.forward.sass', 'string.quoted.single.sass', 'punctuation.definition.string.begin.sass']
expect(tokens[4]).toEqual value: 'module', scopes: ['source.sass', 'meta.at-rule.forward.sass', 'string.quoted.single.sass']
expect(tokens[7]).toEqual value: 'hide', scopes: ['source.sass', 'meta.at-rule.forward.sass', 'keyword.control.operator']
expect(tokens[9]).toEqual value: 'a-mixin', scopes: ['source.sass', 'meta.at-rule.forward.sass', 'entity.name.function.sass']
expect(tokens[11]).toEqual value: '$', scopes: ['source.sass', 'meta.at-rule.forward.sass', 'meta.variable-usage.sass', 'punctuation.definition.entity.css']
expect(tokens[12]).toEqual value: 'private-variable', scopes: ['source.sass', 'meta.at-rule.forward.sass', 'meta.variable-usage.sass', 'variable.other.sass']
it 'tokenizes @forward with show correctly', ->
{tokens} = grammar.tokenizeLine "@forward 'module' show public-mixin $public-variable"
expect(tokens[0]).toEqual value: '@', scopes: ['source.sass', 'meta.at-rule.forward.sass', 'keyword.control.at-rule.forward.sass', 'punctuation.definition.keyword.sass']
expect(tokens[1]).toEqual value: 'forward', scopes: ['source.sass', 'meta.at-rule.forward.sass', 'keyword.control.at-rule.forward.sass']
expect(tokens[3]).toEqual value: "'", scopes: ['source.sass', 'meta.at-rule.forward.sass', 'string.quoted.single.sass', 'punctuation.definition.string.begin.sass']
expect(tokens[4]).toEqual value: 'module', scopes: ['source.sass', 'meta.at-rule.forward.sass', 'string.quoted.single.sass']
expect(tokens[7]).toEqual value: 'show', scopes: ['source.sass', 'meta.at-rule.forward.sass', 'keyword.control.operator']
expect(tokens[9]).toEqual value: 'public-mixin', scopes: ['source.sass', 'meta.at-rule.forward.sass', 'entity.name.function.sass']
expect(tokens[11]).toEqual value: '$', scopes: ['source.sass', 'meta.at-rule.forward.sass', 'meta.variable-usage.sass', 'punctuation.definition.entity.css']
expect(tokens[12]).toEqual value: 'public-variable', scopes: ['source.sass', 'meta.at-rule.forward.sass', 'meta.variable-usage.sass', 'variable.other.sass']
describe 'operators', ->
it 'correctly tokenizes comparison and logical operators', ->
{tokens} = grammar.tokenizeLine '@if 1 == 1'
expect(tokens[5]).toEqual value: '==', scopes: ['source.sass', 'meta.at-rule.if.sass', 'keyword.operator.comparison.sass']
{tokens} = grammar.tokenizeLine '@if 1 != 1'
expect(tokens[5]).toEqual value: '!=', scopes: ['source.sass', 'meta.at-rule.if.sass', 'keyword.operator.comparison.sass']
{tokens} = grammar.tokenizeLine '@if 1 > 1'
expect(tokens[5]).toEqual value: '>', scopes: ['source.sass', 'meta.at-rule.if.sass', 'keyword.operator.comparison.sass']
{tokens} = grammar.tokenizeLine '@if 1 < 1'
expect(tokens[5]).toEqual value: '<', scopes: ['source.sass', 'meta.at-rule.if.sass', 'keyword.operator.comparison.sass']
{tokens} = grammar.tokenizeLine '@if 1 >= 1'
expect(tokens[5]).toEqual value: '>=', scopes: ['source.sass', 'meta.at-rule.if.sass', 'keyword.operator.comparison.sass']
{tokens} = grammar.tokenizeLine '@if 1 <= 1'
expect(tokens[5]).toEqual value: '<=', scopes: ['source.sass', 'meta.at-rule.if.sass', 'keyword.operator.comparison.sass']
{tokens} = grammar.tokenizeLine '@if 1 == 1 and 2 == 2'
expect(tokens[9]).toEqual value: 'and', scopes: ['source.sass', 'meta.at-rule.if.sass', 'keyword.operator.logical.sass']
{tokens} = grammar.tokenizeLine '@if 1 == 1 or 2 == 2'
expect(tokens[9]).toEqual value: 'or', scopes: ['source.sass', 'meta.at-rule.if.sass', 'keyword.operator.logical.sass']
{tokens} = grammar.tokenizeLine '@if not 1 == 1'
expect(tokens[3]).toEqual value: 'not', scopes: ['source.sass', 'meta.at-rule.if.sass', 'keyword.operator.logical.sass']
it 'correctly tokenizes control operators', ->
{tokens} = grammar.tokenizeLine '@for $i from 1 through 2'
expect(tokens[6]).toEqual value: 'from', scopes: ['source.sass', 'meta.at-rule.for.sass', 'keyword.operator.control.sass']
expect(tokens[10]).toEqual value: 'through', scopes: ['source.sass', 'meta.at-rule.for.sass', 'keyword.operator.control.sass']
{tokens} = grammar.tokenizeLine '@for $i from 1 to 2'
expect(tokens[10]).toEqual value: 'to', scopes: ['source.sass', 'meta.at-rule.for.sass', 'keyword.operator.control.sass']
{tokens} = grammar.tokenizeLine '@each $item in $list'
expect(tokens[6]).toEqual value: 'in', scopes: ['source.sass', 'meta.at-rule.each.sass', 'keyword.operator.control.sass']
describe 'module usage syntax', ->
it 'correctly tokenizes module functions', ->
tokens = grammar.tokenizeLines '''
body
font-size: fonts.size(normal)
'''
expect(tokens[1][4]).toEqual value: 'fonts', scopes: ['source.sass', 'meta.property-value.sass', 'variable.sass']
expect(tokens[1][5]).toEqual value: '.', scopes: ['source.sass', 'meta.property-value.sass', 'punctuation.access.module.sass']
expect(tokens[1][6]).toEqual value: 'size', scopes: ['source.sass', 'meta.property-value.sass', 'support.function.misc.sass']
expect(tokens[1][7]).toEqual value: '(', scopes: ['source.sass', 'meta.property-value.sass', 'punctuation.section.function.sass']
expect(tokens[1][9]).toEqual value: ')', scopes: ['source.sass', 'meta.property-value.sass', 'punctuation.section.function.sass']
it 'correctly tokenizes module variables', ->
tokens = grammar.tokenizeLines '''
body
font-size: fonts.$size
'''
expect(tokens[1][4]).toEqual value: 'fonts', scopes: ['source.sass', 'meta.property-value.sass', 'meta.variable-usage.sass', 'variable.sass']
expect(tokens[1][5]).toEqual value: '.', scopes: ['source.sass', 'meta.property-value.sass', 'meta.variable-usage.sass', 'punctuation.access.module.sass']
expect(tokens[1][6]).toEqual value: '$', scopes: ['source.sass', 'meta.property-value.sass', 'meta.variable-usage.sass', 'punctuation.definition.entity.css']
expect(tokens[1][7]).toEqual value: 'size', scopes: ['source.sass', 'meta.property-value.sass', 'meta.variable-usage.sass', 'variable.other.sass']

View File

@ -0,0 +1,711 @@
describe('Sass grammar', function() {
let grammar = null;
beforeEach(function() {
waitsForPromise(() => atom.packages.activatePackage('language-css'));
waitsForPromise(() => atom.packages.activatePackage('language-sass'));
runs(() => grammar = atom.grammars.grammarForScopeName('source.sass'));
});
it('parses the grammar', function() {
expect(grammar).toBeTruthy();
expect(grammar.scopeName).toBe('source.sass');
});
describe('vendor-prefixed properties', () => it('tokenizes them as properties', function() {
const tokens = grammar.tokenizeLines(`\
.something
-webkit-mask-repeat: no-repeat\
`
);
expect(tokens[1][1]).toEqual({value: '-webkit-mask-repeat', scopes: ['source.sass', 'meta.property-name.sass', 'support.type.vendored.property-name.css']});
}));
describe('property-list', function() {
it('tokenizes the property-name and property-value', function() {
const tokens = grammar.tokenizeLines(`\
very-custom
color: inherit\
`
);
expect(tokens[1][1]).toEqual({value: 'color', scopes: ['source.sass', 'meta.property-name.sass', 'support.type.property-name.css']});
expect(tokens[1][2]).toEqual({value: ':', scopes: ['source.sass', 'meta.property-value.sass', 'punctuation.separator.key-value.css']});
expect(tokens[1][3]).toEqual({value: ' ', scopes: ['source.sass', 'meta.property-value.sass']});
expect(tokens[1][4]).toEqual({value: 'inherit', scopes: ['source.sass', 'meta.property-value.sass', 'support.constant.property-value.css']});
});
it('tokenizes nested property-lists', function() {
const tokens = grammar.tokenizeLines(`\
very-custom
very-very-custom
color: inherit
margin: top\
`
);
expect(tokens[2][1]).toEqual({value: 'color', scopes: ['source.sass', 'meta.property-name.sass', 'support.type.property-name.css']});
expect(tokens[2][2]).toEqual({value: ':', scopes: ['source.sass', 'meta.property-value.sass', 'punctuation.separator.key-value.css']});
expect(tokens[2][3]).toEqual({value: ' ', scopes: ['source.sass', 'meta.property-value.sass']});
expect(tokens[2][4]).toEqual({value: 'inherit', scopes: ['source.sass', 'meta.property-value.sass', 'support.constant.property-value.css']});
expect(tokens[3][1]).toEqual({value: 'margin', scopes: ['source.sass', 'meta.property-name.sass', 'support.type.property-name.css']});
expect(tokens[3][2]).toEqual({value: ':', scopes: ['source.sass', 'meta.property-value.sass', 'punctuation.separator.key-value.css']});
expect(tokens[3][3]).toEqual({value: ' ', scopes: ['source.sass', 'meta.property-value.sass']});
expect(tokens[3][4]).toEqual({value: 'top', scopes: ['source.sass', 'meta.property-value.sass', 'support.constant.property-value.css']});
});
it('tokenizes colon-first property-list syntax', function() {
const tokens = grammar.tokenizeLines(`\
very-custom
:color inherit\
`
);
expect(tokens[1][1]).toEqual({value: ':', scopes: ['source.sass', 'punctuation.separator.key-value.css']});
expect(tokens[1][2]).toEqual({value: 'color', scopes: ['source.sass', 'meta.property-name.sass', 'support.type.property-name.css']});
expect(tokens[1][3]).toEqual({value: ' ', scopes: ['source.sass', 'meta.property-value.sass']});
expect(tokens[1][4]).toEqual({value: 'inherit', scopes: ['source.sass', 'meta.property-value.sass', 'support.constant.property-value.css']});
});
it('tokenizes nested colon-first property-list syntax', function() {
const tokens = grammar.tokenizeLines(`\
very-custom
very-very-custom
:color inherit
:margin top\
`
);
expect(tokens[2][1]).toEqual({value: ':', scopes: ['source.sass', 'punctuation.separator.key-value.css']});
expect(tokens[2][2]).toEqual({value: 'color', scopes: ['source.sass', 'meta.property-name.sass', 'support.type.property-name.css']});
expect(tokens[2][3]).toEqual({value: ' ', scopes: ['source.sass', 'meta.property-value.sass']});
expect(tokens[2][4]).toEqual({value: 'inherit', scopes: ['source.sass', 'meta.property-value.sass', 'support.constant.property-value.css']});
expect(tokens[3][1]).toEqual({value: ':', scopes: ['source.sass', 'punctuation.separator.key-value.css']});
expect(tokens[3][2]).toEqual({value: 'margin', scopes: ['source.sass', 'meta.property-name.sass', 'support.type.property-name.css']});
expect(tokens[3][3]).toEqual({value: ' ', scopes: ['source.sass', 'meta.property-value.sass']});
expect(tokens[3][4]).toEqual({value: 'top', scopes: ['source.sass', 'meta.property-value.sass', 'support.constant.property-value.css']});
});
});
describe('pseudo-classes and pseudo-elements', function() {
it('tokenizes pseudo-classes', function() {
const tokens = grammar.tokenizeLines(`\
a:hover
display: none\
`
);
expect(tokens[0][0]).toEqual({value: 'a', scopes: ['source.sass', 'meta.selector.css', 'entity.name.tag.css']});
expect(tokens[0][1]).toEqual({value: ':', scopes: ['source.sass', 'meta.selector.css', 'entity.other.attribute-name.pseudo-class.css', 'punctuation.definition.entity.css']});
expect(tokens[0][2]).toEqual({value: 'hover', scopes: ['source.sass', 'meta.selector.css', 'entity.other.attribute-name.pseudo-class.css']});
});
it('tokenizes pseudo-elements', function() {
const tokens = grammar.tokenizeLines(`\
a::before
display: none\
`
);
expect(tokens[0][0]).toEqual({value: 'a', scopes: ['source.sass', 'meta.selector.css', 'entity.name.tag.css']});
expect(tokens[0][1]).toEqual({value: '::', scopes: ['source.sass', 'meta.selector.css', 'entity.other.attribute-name.pseudo-element.css', 'punctuation.definition.entity.css']});
expect(tokens[0][2]).toEqual({value: 'before', scopes: ['source.sass', 'meta.selector.css', 'entity.other.attribute-name.pseudo-element.css']});
});
it('tokenizes functional pseudo-classes', function() {
const tokens = grammar.tokenizeLines(`\
&:not(.selected)
display: none\
`
);
expect(tokens[0][1]).toEqual({value: ':', scopes: ['source.sass', 'meta.selector.css', 'entity.other.attribute-name.pseudo-class.css', 'punctuation.definition.entity.css']});
expect(tokens[0][2]).toEqual({value: 'not', scopes: ['source.sass', 'meta.selector.css', 'entity.other.attribute-name.pseudo-class.css']});
expect(tokens[0][3]).toEqual({value: '(', scopes: ['source.sass', 'meta.selector.css', 'punctuation.section.function.begin.bracket.round.css']});
expect(tokens[0][4]).toEqual({value: '.', scopes: ['source.sass', 'meta.selector.css', 'entity.other.attribute-name.class.css', 'punctuation.definition.entity.css']});
expect(tokens[0][5]).toEqual({value: 'selected', scopes: ['source.sass', 'meta.selector.css', 'entity.other.attribute-name.class.css']});
expect(tokens[0][6]).toEqual({value: ')', scopes: ['source.sass', 'meta.selector.css', 'punctuation.section.function.end.bracket.round.css']});
});
it('tokenizes nested pseudo-classes', function() {
const tokens = grammar.tokenizeLines(`\
body
a:hover
display: none\
`
);
expect(tokens[1][1]).toEqual({value: 'a', scopes: ['source.sass', 'meta.selector.css', 'entity.name.tag.css']});
expect(tokens[1][2]).toEqual({value: ':', scopes: ['source.sass', 'meta.selector.css', 'entity.other.attribute-name.pseudo-class.css', 'punctuation.definition.entity.css']});
expect(tokens[1][3]).toEqual({value: 'hover', scopes: ['source.sass', 'meta.selector.css', 'entity.other.attribute-name.pseudo-class.css']});
});
it('tokenizes nested pseudo-elements', function() {
const tokens = grammar.tokenizeLines(`\
body
a::before
display: none\
`
);
expect(tokens[1][1]).toEqual({value: 'a', scopes: ['source.sass', 'meta.selector.css', 'entity.name.tag.css']});
expect(tokens[1][2]).toEqual({value: '::', scopes: ['source.sass', 'meta.selector.css', 'entity.other.attribute-name.pseudo-element.css', 'punctuation.definition.entity.css']});
expect(tokens[1][3]).toEqual({value: 'before', scopes: ['source.sass', 'meta.selector.css', 'entity.other.attribute-name.pseudo-element.css']});
});
});
describe('selectors', () => describe('attribute selectors', function() {
it('tokenizes a single attribute selector', function() {
const {tokens} = grammar.tokenizeLine('[something="1"]');
expect(tokens[0]).toEqual({value: '[', scopes: ['source.sass', 'meta.selector.css', 'meta.attribute-selector.css.sass', 'punctuation.definition.attribute-selector.begin.bracket.square.sass']});
expect(tokens[1]).toEqual({value: 'something', scopes: ['source.sass', 'meta.selector.css', 'meta.attribute-selector.css.sass', 'entity.other.attribute-name.attribute.sass']});
expect(tokens[2]).toEqual({value: '=', scopes: ['source.sass', 'meta.selector.css', 'meta.attribute-selector.css.sass', 'keyword.operator.sass']});
expect(tokens[3]).toEqual({value: '"', scopes: ['source.sass', 'meta.selector.css', 'meta.attribute-selector.css.sass', 'string.quoted.double.attribute-value.sass', 'punctuation.definition.string.begin.sass']});
expect(tokens[4]).toEqual({value: '1', scopes: ['source.sass', 'meta.selector.css', 'meta.attribute-selector.css.sass', 'string.quoted.double.attribute-value.sass']});
expect(tokens[5]).toEqual({value: '"', scopes: ['source.sass', 'meta.selector.css', 'meta.attribute-selector.css.sass', 'string.quoted.double.attribute-value.sass', 'punctuation.definition.string.end.sass']});
expect(tokens[6]).toEqual({value: ']', scopes: ['source.sass', 'meta.selector.css', 'meta.attribute-selector.css.sass', 'punctuation.definition.attribute-selector.end.bracket.square.sass']});
});
it("tokenizes multiple attribute selectors", function() {
const {tokens} = grammar.tokenizeLine('[data-name="text-color"][data-value="null"]');
expect(tokens[0]).toEqual({value: '[', scopes: ['source.sass', 'meta.selector.css', 'meta.attribute-selector.css.sass', 'punctuation.definition.attribute-selector.begin.bracket.square.sass']});
expect(tokens[1]).toEqual({value: 'data-name', scopes: ['source.sass', 'meta.selector.css', 'meta.attribute-selector.css.sass', 'entity.other.attribute-name.attribute.sass']});
expect(tokens[2]).toEqual({value: '=', scopes: ['source.sass', 'meta.selector.css', 'meta.attribute-selector.css.sass', 'keyword.operator.sass']});
expect(tokens[3]).toEqual({value: '"', scopes: ['source.sass', 'meta.selector.css', 'meta.attribute-selector.css.sass', 'string.quoted.double.attribute-value.sass', 'punctuation.definition.string.begin.sass']});
expect(tokens[4]).toEqual({value: 'text-color', scopes: ['source.sass', 'meta.selector.css', 'meta.attribute-selector.css.sass', 'string.quoted.double.attribute-value.sass']});
expect(tokens[5]).toEqual({value: '"', scopes: ['source.sass', 'meta.selector.css', 'meta.attribute-selector.css.sass', 'string.quoted.double.attribute-value.sass', 'punctuation.definition.string.end.sass']});
expect(tokens[6]).toEqual({value: ']', scopes: ['source.sass', 'meta.selector.css', 'meta.attribute-selector.css.sass', 'punctuation.definition.attribute-selector.end.bracket.square.sass']});
expect(tokens[7]).toEqual({value: '[', scopes: ['source.sass', 'meta.selector.css', 'meta.attribute-selector.css.sass', 'punctuation.definition.attribute-selector.begin.bracket.square.sass']});
expect(tokens[8]).toEqual({value: 'data-value', scopes: ['source.sass', 'meta.selector.css', 'meta.attribute-selector.css.sass', 'entity.other.attribute-name.attribute.sass']});
expect(tokens[9]).toEqual({value: '=', scopes: ['source.sass', 'meta.selector.css', 'meta.attribute-selector.css.sass', 'keyword.operator.sass']});
expect(tokens[10]).toEqual({value: '"', scopes: ['source.sass', 'meta.selector.css', 'meta.attribute-selector.css.sass', 'string.quoted.double.attribute-value.sass', 'punctuation.definition.string.begin.sass']});
expect(tokens[11]).toEqual({value: 'null', scopes: ['source.sass', 'meta.selector.css', 'meta.attribute-selector.css.sass', 'string.quoted.double.attribute-value.sass']});
expect(tokens[12]).toEqual({value: '"', scopes: ['source.sass', 'meta.selector.css', 'meta.attribute-selector.css.sass', 'string.quoted.double.attribute-value.sass', 'punctuation.definition.string.end.sass']});
expect(tokens[13]).toEqual({value: ']', scopes: ['source.sass', 'meta.selector.css', 'meta.attribute-selector.css.sass', 'punctuation.definition.attribute-selector.end.bracket.square.sass']});
});
}));
describe('numbers', function() {
it('tokenizes them', function() {
const tokens = grammar.tokenizeLines(`\
.something
top: 50%\
`
);
expect(tokens[1][4]).toEqual({value: '50', scopes: ['source.sass', 'meta.property-value.sass', 'constant.numeric.css']});
});
it('tokenizes number operations', function() {
let tokens = grammar.tokenizeLines(`\
.something
top: +50%\
`
);
expect(tokens[1][4]).toEqual({value: '+50', scopes: ['source.sass', 'meta.property-value.sass', 'constant.numeric.css']});
tokens = grammar.tokenizeLines(`\
.something
top: 50% - 30%\
`
);
expect(tokens[1][7]).toEqual({value: '-', scopes: ['source.sass', 'meta.property-value.sass', 'keyword.operator.css']});
});
});
describe('variables', function() {
it('tokenizes them', function() {
const {tokens} = grammar.tokenizeLine('$test: bla');
expect(tokens[0]).toEqual({value: '$', scopes: ['source.sass', 'meta.variable-declaration.sass', 'punctuation.definition.entity.sass']});
expect(tokens[1]).toEqual({value: 'test', scopes: ['source.sass', 'meta.variable-declaration.sass', 'variable.other.sass']});
expect(tokens[2]).toEqual({value: ':', scopes: ['source.sass', 'meta.variable-declaration.sass', 'meta.property-value.sass', 'punctuation.separator.key-value.css']});
expect(tokens[3]).toEqual({value: ' ', scopes: ['source.sass', 'meta.variable-declaration.sass', 'meta.property-value.sass']});
expect(tokens[4]).toEqual({value: 'bla', scopes: ['source.sass', 'meta.variable-declaration.sass', 'meta.property-value.sass']});
});
it('tokenizes indented variables', function() {
const {tokens} = grammar.tokenizeLine(' $test: bla');
expect(tokens[1]).toEqual({value: '$', scopes: ['source.sass', 'meta.variable-declaration.sass', 'punctuation.definition.entity.sass']});
expect(tokens[2]).toEqual({value: 'test', scopes: ['source.sass', 'meta.variable-declaration.sass', 'variable.other.sass']});
expect(tokens[3]).toEqual({value: ':', scopes: ['source.sass', 'meta.variable-declaration.sass', 'meta.property-value.sass', 'punctuation.separator.key-value.css']});
expect(tokens[4]).toEqual({value: ' ', scopes: ['source.sass', 'meta.variable-declaration.sass', 'meta.property-value.sass']});
expect(tokens[5]).toEqual({value: 'bla', scopes: ['source.sass', 'meta.variable-declaration.sass', 'meta.property-value.sass']});
});
});
describe('strings', function() {
it('tokenizes single-quote strings', function() {
const tokens = grammar.tokenizeLines(`\
.a
content: 'hi'\
`
);
expect(tokens[1][4]).toEqual({value: "'", scopes: ['source.sass', 'meta.property-value.sass', 'string.quoted.single.sass', 'punctuation.definition.string.begin.sass']});
expect(tokens[1][5]).toEqual({value: 'hi', scopes: ['source.sass', 'meta.property-value.sass', 'string.quoted.single.sass']});
expect(tokens[1][6]).toEqual({value: "'", scopes: ['source.sass', 'meta.property-value.sass', 'string.quoted.single.sass', 'punctuation.definition.string.end.sass']});
});
it('tokenizes double-quote strings', function() {
const tokens = grammar.tokenizeLines(`\
.a
content: "hi"\
`
);
expect(tokens[1][4]).toEqual({value: '"', scopes: ['source.sass', 'meta.property-value.sass', 'string.quoted.double.sass', 'punctuation.definition.string.begin.sass']});
expect(tokens[1][5]).toEqual({value: 'hi', scopes: ['source.sass', 'meta.property-value.sass', 'string.quoted.double.sass']});
expect(tokens[1][6]).toEqual({value: '"', scopes: ['source.sass', 'meta.property-value.sass', 'string.quoted.double.sass', 'punctuation.definition.string.end.sass']});
});
it('tokenizes escape characters', function() {
let tokens = grammar.tokenizeLines(`\
.a
content: '\\abcdef'\
`
);
expect(tokens[1][5]).toEqual({value: '\\abcdef', scopes: ['source.sass', 'meta.property-value.sass', 'string.quoted.single.sass', 'constant.character.escape.sass']});
tokens = grammar.tokenizeLines(`\
.a
content: "\\abcdef"\
`
);
expect(tokens[1][5]).toEqual({value: '\\abcdef', scopes: ['source.sass', 'meta.property-value.sass', 'string.quoted.double.sass', 'constant.character.escape.sass']});
});
});
describe('comments', function() {
it('only tokenizes comments that start at the beginning of a line', function() {
let {tokens} = grammar.tokenizeLine(' //A comment?');
expect(tokens[1]).toEqual({value: '//', scopes: ['source.sass', 'comment.line.sass', 'punctuation.definition.comment.sass']});
expect(tokens[2]).toEqual({value: 'A comment?', scopes: ['source.sass', 'comment.line.sass']});
({tokens} = grammar.tokenizeLine('/* also a comment */'));
expect(tokens[0]).toEqual({value: '/*', scopes: ['source.sass', 'comment.block.sass', 'punctuation.definition.comment.sass']});
expect(tokens[1]).toEqual({value: ' also a comment ', scopes: ['source.sass', 'comment.block.sass']});
expect(tokens[2]).toEqual({value: '*/', scopes: ['source.sass', 'comment.block.sass', 'punctuation.definition.comment.sass']});
({tokens} = grammar.tokenizeLine('this //is not a comment'));
expect(tokens[1]).toEqual({value: '//is not a comment', scopes: ['source.sass', 'meta.selector.css', 'invalid.illegal.sass']});
({tokens} = grammar.tokenizeLine('this /* is also not a comment */'));
expect(tokens[1]).toEqual({value: '/* is also not a comment */', scopes: ['source.sass', 'meta.selector.css', 'invalid.illegal.sass']});
});
it('correctly tokenizes block comments based on indentation', function() {
const tokens = grammar.tokenizeLines(`\
/* hi1
hi2
hi3
hi4\
`
);
expect(tokens[0][0]).toEqual({value: '/*', scopes: ['source.sass', 'comment.block.sass', 'punctuation.definition.comment.sass']});
expect(tokens[0][1]).toEqual({value: ' hi1', scopes: ['source.sass', 'comment.block.sass']});
expect(tokens[1][0]).toEqual({value: ' hi2', scopes: ['source.sass', 'comment.block.sass']});
expect(tokens[2][0]).toEqual({value: ' hi3', scopes: ['source.sass', 'comment.block.sass']});
expect(tokens[3][0]).toEqual({value: 'hi4', scopes: ['source.sass', 'meta.selector.css']});
});
it('correctly tokenizes line comments based on indentation', function() {
const tokens = grammar.tokenizeLines(`\
// hi1
hi2
hi3
hi4\
`
);
expect(tokens[0][0]).toEqual({value: '//', scopes: ['source.sass', 'comment.line.sass', 'punctuation.definition.comment.sass']});
expect(tokens[0][1]).toEqual({value: ' hi1', scopes: ['source.sass', 'comment.line.sass']});
expect(tokens[1][0]).toEqual({value: ' hi2', scopes: ['source.sass', 'comment.line.sass']});
expect(tokens[2][0]).toEqual({value: ' hi3', scopes: ['source.sass', 'comment.line.sass']});
expect(tokens[3][0]).toEqual({value: 'hi4', scopes: ['source.sass', 'meta.selector.css']});
});
});
describe('at-rules and directives', function() {
it('tokenizes @media', function() {
let tokens = grammar.tokenizeLines(`\
@media screen
background: none\
`
);
expect(tokens[0][0]).toEqual({value: '@', scopes: ['source.sass', 'meta.at-rule.media.sass', 'keyword.control.at-rule.media.sass', 'punctuation.definition.keyword.sass']});
expect(tokens[0][1]).toEqual({value: 'media', scopes: ['source.sass', 'meta.at-rule.media.sass', 'keyword.control.at-rule.media.sass']});
expect(tokens[0][2]).toEqual({value: ' ', scopes: ['source.sass', 'meta.at-rule.media.sass']});
expect(tokens[0][3]).toEqual({value: 'screen', scopes: ['source.sass', 'meta.at-rule.media.sass', 'support.constant.media.css']});
expect(tokens[1][1]).toEqual({value: 'background', scopes: ['source.sass', 'meta.property-name.sass', 'support.type.property-name.css']});
tokens = grammar.tokenizeLines(`\
@media (orientation: landscape) and (min-width: 700px)
background: none\
`
);
expect(tokens[0][0]).toEqual({value: '@', scopes: ['source.sass', 'meta.at-rule.media.sass', 'keyword.control.at-rule.media.sass', 'punctuation.definition.keyword.sass']});
expect(tokens[0][1]).toEqual({value: 'media', scopes: ['source.sass', 'meta.at-rule.media.sass', 'keyword.control.at-rule.media.sass']});
expect(tokens[0][2]).toEqual({value: ' ', scopes: ['source.sass', 'meta.at-rule.media.sass']});
expect(tokens[0][3]).toEqual({value: '(', scopes: ['source.sass', 'meta.at-rule.media.sass', 'meta.property-list.media-query.sass', 'punctuation.definition.media-query.begin.bracket.round.sass']});
expect(tokens[0][4]).toEqual({value: 'orientation', scopes: ['source.sass', 'meta.at-rule.media.sass', 'meta.property-list.media-query.sass', 'meta.property-name.media-query.sass', 'support.type.property-name.media.css']});
expect(tokens[0][5]).toEqual({value: ':', scopes: ['source.sass', 'meta.at-rule.media.sass', 'meta.property-list.media-query.sass', 'punctuation.separator.key-value.sass']});
expect(tokens[0][6]).toEqual({value: ' ', scopes: ['source.sass', 'meta.at-rule.media.sass', 'meta.property-list.media-query.sass']});
expect(tokens[0][7]).toEqual({value: 'landscape', scopes: ['source.sass', 'meta.at-rule.media.sass', 'meta.property-list.media-query.sass', 'meta.property-value.media-query.sass', 'support.constant.property-value.css']});
expect(tokens[0][8]).toEqual({value: ')', scopes: ['source.sass', 'meta.at-rule.media.sass', 'meta.property-list.media-query.sass', 'punctuation.definition.media-query.end.bracket.round.sass']});
expect(tokens[0][9]).toEqual({value: ' ', scopes: ['source.sass', 'meta.at-rule.media.sass']});
expect(tokens[0][10]).toEqual({value: 'and', scopes: ['source.sass', 'meta.at-rule.media.sass', 'keyword.operator.logical.sass']});
expect(tokens[0][12]).toEqual({value: '(', scopes: ['source.sass', 'meta.at-rule.media.sass', 'meta.property-list.media-query.sass', 'punctuation.definition.media-query.begin.bracket.round.sass']});
expect(tokens[0][13]).toEqual({value: 'min-width', scopes: ['source.sass', 'meta.at-rule.media.sass', 'meta.property-list.media-query.sass', 'meta.property-name.media-query.sass', 'support.type.property-name.media.css']});
expect(tokens[0][14]).toEqual({value: ':', scopes: ['source.sass', 'meta.at-rule.media.sass', 'meta.property-list.media-query.sass', 'punctuation.separator.key-value.sass']});
expect(tokens[0][16]).toEqual({value: '700', scopes: ['source.sass', 'meta.at-rule.media.sass', 'meta.property-list.media-query.sass', 'meta.property-value.media-query.sass', 'constant.numeric.css']});
expect(tokens[0][17]).toEqual({value: 'px', scopes: ['source.sass', 'meta.at-rule.media.sass', 'meta.property-list.media-query.sass', 'meta.property-value.media-query.sass', 'constant.numeric.css', 'keyword.other.unit.px.css']});
expect(tokens[0][18]).toEqual({value: ')', scopes: ['source.sass', 'meta.at-rule.media.sass', 'meta.property-list.media-query.sass', 'punctuation.definition.media-query.end.bracket.round.sass']});
expect(tokens[1][1]).toEqual({value: 'background', scopes: ['source.sass', 'meta.property-name.sass', 'support.type.property-name.css']});
});
it('tokenizes @function', function() {
const {tokens} = grammar.tokenizeLine('@function function_name($p1, $p2)');
expect(tokens[0]).toEqual({value: '@', scopes: ['source.sass', 'meta.at-rule.function.sass', 'keyword.control.at-rule.function.sass', 'punctuation.definition.entity.sass']});
expect(tokens[1]).toEqual({value: 'function', scopes: ['source.sass', 'meta.at-rule.function.sass', 'keyword.control.at-rule.function.sass']});
expect(tokens[3]).toEqual({value: 'function_name', scopes: ['source.sass', 'meta.at-rule.function.sass', 'support.function.misc.sass']});
expect(tokens[5]).toEqual({value: '$', scopes: ['source.sass', 'meta.at-rule.function.sass', 'meta.variable-usage.sass', 'punctuation.definition.entity.css']});
expect(tokens[6]).toEqual({value: 'p1', scopes: ['source.sass', 'meta.at-rule.function.sass', 'meta.variable-usage.sass', 'variable.other.sass']});
expect(tokens[8]).toEqual({value: '$', scopes: ['source.sass', 'meta.at-rule.function.sass', 'meta.variable-usage.sass', 'punctuation.definition.entity.css']});
expect(tokens[9]).toEqual({value: 'p2', scopes: ['source.sass', 'meta.at-rule.function.sass', 'meta.variable-usage.sass', 'variable.other.sass']});
});
it('tokenizes @return', function() {
const {tokens} = grammar.tokenizeLine('@return \'border\' + \' \' + \'1px solid pink\'');
expect(tokens[0]).toEqual({value: '@', scopes: ['source.sass', 'meta.at-rule.return.sass', 'keyword.control.return.sass', 'punctuation.definition.entity.sass']});
expect(tokens[1]).toEqual({value: 'return', scopes: ['source.sass', 'meta.at-rule.return.sass', 'keyword.control.return.sass']});
expect(tokens[4]).toEqual({value: 'border', scopes: ['source.sass', 'meta.at-rule.return.sass', 'string.quoted.single.sass']});
expect(tokens[7]).toEqual({value: '+', scopes: ['source.sass', 'meta.at-rule.return.sass', 'keyword.operator.css']});
});
it('tokenizes @if', function() {
let {tokens} = grammar.tokenizeLine('@if $var == true');
expect(tokens[0]).toEqual({value: '@', scopes: ['source.sass', 'meta.at-rule.if.sass', 'keyword.control.if.sass', 'punctuation.definition.entity.sass']});
expect(tokens[1]).toEqual({value: 'if', scopes: ['source.sass', 'meta.at-rule.if.sass', 'keyword.control.if.sass']});
expect(tokens[3]).toEqual({value: '$', scopes: ['source.sass', 'meta.at-rule.if.sass', 'meta.variable-usage.sass', 'punctuation.definition.entity.css']});
expect(tokens[4]).toEqual({value: 'var', scopes: ['source.sass', 'meta.at-rule.if.sass', 'meta.variable-usage.sass', 'variable.other.sass']});
expect(tokens[6]).toEqual({value: '==', scopes: ['source.sass', 'meta.at-rule.if.sass', 'keyword.operator.comparison.sass']});
expect(tokens[8]).toEqual({value: 'true', scopes: ['source.sass', 'meta.at-rule.if.sass', 'support.constant.property-value.css.sass']});
({tokens} = grammar.tokenizeLine('@if config.$setting == true'));
expect(tokens[0]).toEqual({value: '@', scopes: ['source.sass', 'meta.at-rule.if.sass', 'keyword.control.if.sass', 'punctuation.definition.entity.sass']});
expect(tokens[1]).toEqual({value: 'if', scopes: ['source.sass', 'meta.at-rule.if.sass', 'keyword.control.if.sass']});
expect(tokens[3]).toEqual({value: 'config', scopes: ['source.sass', 'meta.at-rule.if.sass', 'meta.variable-usage.sass', 'variable.sass']});
expect(tokens[4]).toEqual({value: '.', scopes: ['source.sass', 'meta.at-rule.if.sass', 'meta.variable-usage.sass', 'punctuation.access.module.sass']});
expect(tokens[5]).toEqual({value: '$', scopes: ['source.sass', 'meta.at-rule.if.sass', 'meta.variable-usage.sass', 'punctuation.definition.entity.css']});
expect(tokens[6]).toEqual({value: 'setting', scopes: ['source.sass', 'meta.at-rule.if.sass', 'meta.variable-usage.sass', 'variable.other.sass']});
expect(tokens[8]).toEqual({value: '==', scopes: ['source.sass', 'meta.at-rule.if.sass', 'keyword.operator.comparison.sass']});
expect(tokens[10]).toEqual({value: 'true', scopes: ['source.sass', 'meta.at-rule.if.sass', 'support.constant.property-value.css.sass']});
});
it('tokenizes @else if', function() {
let {tokens} = grammar.tokenizeLine('@else if $var == false');
expect(tokens[0]).toEqual({value: '@', scopes: ['source.sass', 'meta.at-rule.else.sass', 'keyword.control.else.sass', 'punctuation.definition.entity.sass']});
expect(tokens[1]).toEqual({value: 'else if ', scopes: ['source.sass', 'meta.at-rule.else.sass', 'keyword.control.else.sass']});
expect(tokens[2]).toEqual({value: '$', scopes: ['source.sass', 'meta.at-rule.else.sass', 'meta.variable-usage.sass', 'punctuation.definition.entity.css']});
expect(tokens[3]).toEqual({value: 'var', scopes: ['source.sass', 'meta.at-rule.else.sass', 'meta.variable-usage.sass', 'variable.other.sass']});
expect(tokens[5]).toEqual({value: '==', scopes: ['source.sass', 'meta.at-rule.else.sass', 'keyword.operator.comparison.sass']});
expect(tokens[7]).toEqual({value: 'false', scopes: ['source.sass', 'meta.at-rule.else.sass', 'support.constant.property-value.css.sass']});
({tokens} = grammar.tokenizeLine('@else if config.$setting == false'));
expect(tokens[0]).toEqual({value: '@', scopes: ['source.sass', 'meta.at-rule.else.sass', 'keyword.control.else.sass', 'punctuation.definition.entity.sass']});
expect(tokens[1]).toEqual({value: 'else if ', scopes: ['source.sass', 'meta.at-rule.else.sass', 'keyword.control.else.sass']});
expect(tokens[2]).toEqual({value: 'config', scopes: ['source.sass', 'meta.at-rule.else.sass', 'meta.variable-usage.sass', 'variable.sass']});
expect(tokens[3]).toEqual({value: '.', scopes: ['source.sass', 'meta.at-rule.else.sass', 'meta.variable-usage.sass', 'punctuation.access.module.sass']});
expect(tokens[4]).toEqual({value: '$', scopes: ['source.sass', 'meta.at-rule.else.sass', 'meta.variable-usage.sass', 'punctuation.definition.entity.css']});
expect(tokens[5]).toEqual({value: 'setting', scopes: ['source.sass', 'meta.at-rule.else.sass', 'meta.variable-usage.sass', 'variable.other.sass']});
expect(tokens[7]).toEqual({value: '==', scopes: ['source.sass', 'meta.at-rule.else.sass', 'keyword.operator.comparison.sass']});
expect(tokens[9]).toEqual({value: 'false', scopes: ['source.sass', 'meta.at-rule.else.sass', 'support.constant.property-value.css.sass']});
});
it('tokenizes @while', function() {
const {tokens} = grammar.tokenizeLine('@while 1');
expect(tokens[0]).toEqual({value: '@', scopes: ['source.sass', 'meta.at-rule.while.sass', 'keyword.control.while.sass', 'punctuation.definition.entity.sass']});
expect(tokens[1]).toEqual({value: 'while', scopes: ['source.sass', 'meta.at-rule.while.sass', 'keyword.control.while.sass']});
expect(tokens[3]).toEqual({value: '1', scopes: ['source.sass', 'meta.at-rule.while.sass', 'constant.numeric.css']});
});
it('tokenizes @for', function() {
const {tokens} = grammar.tokenizeLine('@for $i from 1 through 100');
expect(tokens[0]).toEqual({value: '@', scopes: ['source.sass', 'meta.at-rule.for.sass', 'keyword.control.for.sass', 'punctuation.definition.entity.sass']});
expect(tokens[1]).toEqual({value: 'for', scopes: ['source.sass', 'meta.at-rule.for.sass', 'keyword.control.for.sass']});
expect(tokens[3]).toEqual({value: '$', scopes: ['source.sass', 'meta.at-rule.for.sass', 'meta.variable-usage.sass', 'punctuation.definition.entity.css']});
expect(tokens[4]).toEqual({value: 'i', scopes: ['source.sass', 'meta.at-rule.for.sass', 'meta.variable-usage.sass', 'variable.other.sass']});
expect(tokens[8]).toEqual({value: '1', scopes: ['source.sass', 'meta.at-rule.for.sass', 'constant.numeric.css']});
expect(tokens[12]).toEqual({value: '100', scopes: ['source.sass', 'meta.at-rule.for.sass', 'constant.numeric.css']});
});
// 'from' and 'thorugh' tested in operators
it('tokenizes @each', function() {
let {tokens} = grammar.tokenizeLine('@each $item in $list');
expect(tokens[0]).toEqual({value: '@', scopes: ['source.sass', 'meta.at-rule.each.sass', 'keyword.control.each.sass', 'punctuation.definition.entity.sass']});
expect(tokens[1]).toEqual({value: 'each', scopes: ['source.sass', 'meta.at-rule.each.sass', 'keyword.control.each.sass']});
expect(tokens[3]).toEqual({value: '$', scopes: ['source.sass', 'meta.at-rule.each.sass', 'meta.variable-usage.sass', 'punctuation.definition.entity.css']});
expect(tokens[4]).toEqual({value: 'item', scopes: ['source.sass', 'meta.at-rule.each.sass', 'meta.variable-usage.sass', 'variable.other.sass']});
expect(tokens[8]).toEqual({value: '$', scopes: ['source.sass', 'meta.at-rule.each.sass', 'meta.variable-usage.sass', 'punctuation.definition.entity.css']});
expect(tokens[9]).toEqual({value: 'list', scopes: ['source.sass', 'meta.at-rule.each.sass', 'meta.variable-usage.sass', 'variable.other.sass']});
({tokens} = grammar.tokenizeLine('@each $item in module.$list'));
expect(tokens[0]).toEqual({value: '@', scopes: ['source.sass', 'meta.at-rule.each.sass', 'keyword.control.each.sass', 'punctuation.definition.entity.sass']});
expect(tokens[1]).toEqual({value: 'each', scopes: ['source.sass', 'meta.at-rule.each.sass', 'keyword.control.each.sass']});
expect(tokens[3]).toEqual({value: '$', scopes: ['source.sass', 'meta.at-rule.each.sass', 'meta.variable-usage.sass', 'punctuation.definition.entity.css']});
expect(tokens[4]).toEqual({value: 'item', scopes: ['source.sass', 'meta.at-rule.each.sass', 'meta.variable-usage.sass', 'variable.other.sass']});
expect(tokens[8]).toEqual({value: 'module', scopes: ['source.sass', 'meta.at-rule.each.sass', 'meta.variable-usage.sass', 'variable.sass']});
expect(tokens[9]).toEqual({value: '.', scopes: ['source.sass', 'meta.at-rule.each.sass', 'meta.variable-usage.sass', 'punctuation.access.module.sass']});
expect(tokens[10]).toEqual({value: '$', scopes: ['source.sass', 'meta.at-rule.each.sass', 'meta.variable-usage.sass', 'punctuation.definition.entity.css']});
expect(tokens[11]).toEqual({value: 'list', scopes: ['source.sass', 'meta.at-rule.each.sass', 'meta.variable-usage.sass', 'variable.other.sass']});
});
// 'in' tested in operators
it('tokenizes @include', function() {
let {tokens} = grammar.tokenizeLine('@include mixin-name');
expect(tokens[0]).toEqual({value: '@', scopes: ['source.sass', 'meta.function.include.sass', 'keyword.control.at-rule.include.sass', 'punctuation.definition.entity.sass']});
expect(tokens[1]).toEqual({value: 'include', scopes: ['source.sass', 'meta.function.include.sass', 'keyword.control.at-rule.include.sass']});
expect(tokens[3]).toEqual({value: 'mixin-name', scopes: ['source.sass', 'meta.function.include.sass', 'variable.other.sass']});
({tokens} = grammar.tokenizeLine('@include mixin.name'));
expect(tokens[0]).toEqual({value: '@', scopes: ['source.sass', 'meta.function.include.sass', 'keyword.control.at-rule.include.sass', 'punctuation.definition.entity.sass']});
expect(tokens[1]).toEqual({value: 'include', scopes: ['source.sass', 'meta.function.include.sass', 'keyword.control.at-rule.include.sass']});
expect(tokens[3]).toEqual({value: 'mixin', scopes: ['source.sass', 'meta.function.include.sass', 'variable.sass']});
expect(tokens[4]).toEqual({value: '.', scopes: ['source.sass', 'meta.function.include.sass', 'punctuation.access.module.sass']});
expect(tokens[5]).toEqual({value: 'name', scopes: ['source.sass', 'meta.function.include.sass', 'variable.other.sass']});
});
it('tokenizes \'+\'', function() {
let {tokens} = grammar.tokenizeLine('+mixin-name');
expect(tokens[0]).toEqual({value: '+', scopes: ['source.sass', 'meta.function.include.sass', 'keyword.control.at-rule.include.sass']});
expect(tokens[1]).toEqual({value: 'mixin-name', scopes: ['source.sass', 'meta.function.include.sass', 'variable.other.sass']});
({tokens} = grammar.tokenizeLine('+mixin.name'));
expect(tokens[0]).toEqual({value: '+', scopes: ['source.sass', 'meta.function.include.sass', 'keyword.control.at-rule.include.sass']});
expect(tokens[1]).toEqual({value: 'mixin', scopes: ['source.sass', 'meta.function.include.sass', 'variable.sass']});
expect(tokens[2]).toEqual({value: '.', scopes: ['source.sass', 'meta.function.include.sass', 'punctuation.access.module.sass']});
expect(tokens[3]).toEqual({value: 'name', scopes: ['source.sass', 'meta.function.include.sass', 'variable.other.sass']});
});
it('tokenizes @mixin or \'=\'', function() {
let {tokens} = grammar.tokenizeLine('@mixin mixin-name($p)');
expect(tokens[0]).toEqual({value: '@', scopes: ['source.sass', 'meta.variable-declaration.sass.mixin', 'keyword.control.at-rule.mixin.sass', 'punctuation.definition.entity.sass']});
expect(tokens[1]).toEqual({value: 'mixin', scopes: ['source.sass', 'meta.variable-declaration.sass.mixin', 'keyword.control.at-rule.mixin.sass']});
expect(tokens[3]).toEqual({value: 'mixin-name', scopes: ['source.sass', 'meta.variable-declaration.sass.mixin', 'variable.other.sass']});
expect(tokens[5]).toEqual({value: '$', scopes: ['source.sass', 'meta.variable-declaration.sass.mixin', 'meta.variable-usage.sass', 'punctuation.definition.entity.css']});
expect(tokens[6]).toEqual({value: 'p', scopes: ['source.sass', 'meta.variable-declaration.sass.mixin', 'meta.variable-usage.sass', 'variable.other.sass']});
({tokens} = grammar.tokenizeLine('=mixin-name($p)'));
expect(tokens[0]).toEqual({value: '\=', scopes: ['source.sass', 'meta.variable-declaration.sass.mixin', 'keyword.control.at-rule.keyframes.sass']});
expect(tokens[1]).toEqual({value: 'mixin-name', scopes: ['source.sass', 'meta.variable-declaration.sass.mixin', 'variable.other.sass']});
expect(tokens[3]).toEqual({value: '$', scopes: ['source.sass', 'meta.variable-declaration.sass.mixin', 'meta.variable-usage.sass', 'punctuation.definition.entity.css']});
expect(tokens[4]).toEqual({value: 'p', scopes: ['source.sass', 'meta.variable-declaration.sass.mixin', 'meta.variable-usage.sass', 'variable.other.sass']});
});
it('tokenizes @content', function() {
const tokens = grammar.tokenizeLines(`\
@mixin mixin-name($p)
@content\
`
);
expect(tokens[1][1]).toEqual({value: '@', scopes: ['source.sass', 'meta.at-rule.content.sass', 'keyword.control.content.sass', 'punctuation.definition.entity.sass']});
expect(tokens[1][2]).toEqual({value: 'content', scopes: ['source.sass', 'meta.at-rule.content.sass', 'keyword.control.content.sass']});
});
it('tokenizes @warn, @debug and @error', function() {
let {tokens} = grammar.tokenizeLine('@warn \'message\'');
expect(tokens[0]).toEqual({value: '@', scopes: ['source.sass', 'meta.at-rule.warn.sass', 'keyword.control.warn.sass', 'punctuation.definition.entity.sass']});
expect(tokens[1]).toEqual({value: 'warn', scopes: ['source.sass', 'meta.at-rule.warn.sass', 'keyword.control.warn.sass']});
expect(tokens[4]).toEqual({value: 'message', scopes: ['source.sass', 'meta.at-rule.warn.sass', 'string.quoted.single.sass']});
({tokens} = grammar.tokenizeLine('@debug \'message\''));
expect(tokens[0]).toEqual({value: '@', scopes: ['source.sass', 'meta.at-rule.warn.sass', 'keyword.control.warn.sass', 'punctuation.definition.entity.sass']});
expect(tokens[1]).toEqual({value: 'debug', scopes: ['source.sass', 'meta.at-rule.warn.sass', 'keyword.control.warn.sass']});
expect(tokens[4]).toEqual({value: 'message', scopes: ['source.sass', 'meta.at-rule.warn.sass', 'string.quoted.single.sass']});
({tokens} = grammar.tokenizeLine('@error \'message\''));
expect(tokens[0]).toEqual({value: '@', scopes: ['source.sass', 'meta.at-rule.warn.sass', 'keyword.control.warn.sass', 'punctuation.definition.entity.sass']});
expect(tokens[1]).toEqual({value: 'error', scopes: ['source.sass', 'meta.at-rule.warn.sass', 'keyword.control.warn.sass']});
expect(tokens[4]).toEqual({value: 'message', scopes: ['source.sass', 'meta.at-rule.warn.sass', 'string.quoted.single.sass']});
});
it('tokenizes @at-root', function() {
const tokens = grammar.tokenizeLines(`\
.class
@at-root
#id\
`
);
expect(tokens[1][1]).toEqual({value: '@', scopes: ['source.sass', 'meta.at-rule.at-root.sass', 'keyword.control.at-root.sass', 'punctuation.definition.entity.sass']});
expect(tokens[1][2]).toEqual({value: 'at-root', scopes: ['source.sass', 'meta.at-rule.at-root.sass', 'keyword.control.at-root.sass']});
expect(tokens[2][1]).toEqual({value: '#', scopes: [ 'source.sass', 'meta.selector.css', 'entity.other.attribute-name.id.css.sass', 'punctuation.definition.entity.sass']});
expect(tokens[2][2]).toEqual({value: 'id', scopes: ['source.sass', 'meta.selector.css', 'entity.other.attribute-name.id.css.sass']});
});
describe('@use', function() {
it('tokenizes @use with explicit namespace correctly', function() {
const {tokens} = grammar.tokenizeLine("@use 'module' as _mod-ule");
expect(tokens[0]).toEqual({value: '@', scopes: ['source.sass', 'meta.at-rule.use.sass', 'keyword.control.at-rule.use.sass', 'punctuation.definition.keyword.sass']});
expect(tokens[1]).toEqual({value: 'use', scopes: ['source.sass', 'meta.at-rule.use.sass', 'keyword.control.at-rule.use.sass']});
expect(tokens[3]).toEqual({value: "'", scopes: ['source.sass', 'meta.at-rule.use.sass', 'string.quoted.single.sass', 'punctuation.definition.string.begin.sass']});
expect(tokens[4]).toEqual({value: 'module', scopes: ['source.sass', 'meta.at-rule.use.sass', 'string.quoted.single.sass']});
expect(tokens[7]).toEqual({value: 'as', scopes: ['source.sass', 'meta.at-rule.use.sass', 'keyword.control.operator']});
expect(tokens[9]).toEqual({value: '_mod-ule', scopes: ['source.sass', 'meta.at-rule.use.sass', 'variable.sass']});
});
it('tokenizes @use with wildcard correctly', function() {
const {tokens} = grammar.tokenizeLine("@use 'module' as *;");
expect(tokens[0]).toEqual({value: '@', scopes: ['source.sass', 'meta.at-rule.use.sass', 'keyword.control.at-rule.use.sass', 'punctuation.definition.keyword.sass']});
expect(tokens[1]).toEqual({value: 'use', scopes: ['source.sass', 'meta.at-rule.use.sass', 'keyword.control.at-rule.use.sass']});
expect(tokens[3]).toEqual({value: "'", scopes: ['source.sass', 'meta.at-rule.use.sass', 'string.quoted.single.sass', 'punctuation.definition.string.begin.sass']});
expect(tokens[4]).toEqual({value: 'module', scopes: ['source.sass', 'meta.at-rule.use.sass', 'string.quoted.single.sass']});
expect(tokens[7]).toEqual({value: 'as', scopes: ['source.sass', 'meta.at-rule.use.sass', 'keyword.control.operator']});
expect(tokens[9]).toEqual({value: '*', scopes: ['source.sass', 'meta.at-rule.use.sass', 'variable.language.expanded-namespace.sass']});
});
it('tokenizes @use with configuration correctly', function() {
const {tokens} = grammar.tokenizeLine("@use 'module' with ($black: #222, $border-radius: 0.1rem)");
expect(tokens[0]).toEqual({value: '@', scopes: ['source.sass', 'meta.at-rule.use.sass', 'keyword.control.at-rule.use.sass', 'punctuation.definition.keyword.sass']});
expect(tokens[1]).toEqual({value: 'use', scopes: ['source.sass', 'meta.at-rule.use.sass', 'keyword.control.at-rule.use.sass']});
expect(tokens[3]).toEqual({value: "'", scopes: ['source.sass', 'meta.at-rule.use.sass', 'string.quoted.single.sass', 'punctuation.definition.string.begin.sass']});
expect(tokens[4]).toEqual({value: 'module', scopes: ['source.sass', 'meta.at-rule.use.sass', 'string.quoted.single.sass']});
expect(tokens[7]).toEqual({value: 'with', scopes: ['source.sass', 'meta.at-rule.use.sass', 'keyword.control.operator']});
expect(tokens[9]).toEqual({value: '(', scopes: ['source.sass', 'meta.at-rule.use.sass', 'punctuation.definition.module.begin.bracket.round.sass']});
expect(tokens[10]).toEqual({value: '$', scopes: ['source.sass', 'meta.at-rule.use.sass', 'meta.variable-usage.sass', 'punctuation.definition.entity.css']});
expect(tokens[11]).toEqual({value: 'black', scopes: ['source.sass', 'meta.at-rule.use.sass', 'meta.variable-usage.sass', 'variable.other.sass']});
});
});
describe('@forward', function() {
it('tokenizes solitary @forward correctly', function() {
const {tokens} = grammar.tokenizeLine('@forward');
expect(tokens[0]).toEqual({value: '@', scopes: ['source.sass', 'meta.at-rule.forward.sass', 'keyword.control.at-rule.forward.sass', 'punctuation.definition.keyword.sass']});
expect(tokens[1]).toEqual({value: 'forward', scopes: ['source.sass', 'meta.at-rule.forward.sass', 'keyword.control.at-rule.forward.sass']});
});
it('tokenizes @forward with path correctly', function() {
const {tokens} = grammar.tokenizeLine("@forward 'module'");
expect(tokens[0]).toEqual({value: '@', scopes: ['source.sass', 'meta.at-rule.forward.sass', 'keyword.control.at-rule.forward.sass', 'punctuation.definition.keyword.sass']});
expect(tokens[1]).toEqual({value: 'forward', scopes: ['source.sass', 'meta.at-rule.forward.sass', 'keyword.control.at-rule.forward.sass']});
expect(tokens[3]).toEqual({value: "'", scopes: ['source.sass', 'meta.at-rule.forward.sass', 'string.quoted.single.sass', 'punctuation.definition.string.begin.sass']});
expect(tokens[4]).toEqual({value: 'module', scopes: ['source.sass', 'meta.at-rule.forward.sass', 'string.quoted.single.sass']});
});
it('tokenizes @forward with prefix correctly', function() {
const {tokens} = grammar.tokenizeLine("@forward 'module' as prefix*");
expect(tokens[0]).toEqual({value: '@', scopes: ['source.sass', 'meta.at-rule.forward.sass', 'keyword.control.at-rule.forward.sass', 'punctuation.definition.keyword.sass']});
expect(tokens[1]).toEqual({value: 'forward', scopes: ['source.sass', 'meta.at-rule.forward.sass', 'keyword.control.at-rule.forward.sass']});
expect(tokens[3]).toEqual({value: "'", scopes: ['source.sass', 'meta.at-rule.forward.sass', 'string.quoted.single.sass', 'punctuation.definition.string.begin.sass']});
expect(tokens[4]).toEqual({value: 'module', scopes: ['source.sass', 'meta.at-rule.forward.sass', 'string.quoted.single.sass']});
expect(tokens[7]).toEqual({value: 'as', scopes: ['source.sass', 'meta.at-rule.forward.sass', 'keyword.control.operator']});
expect(tokens[9]).toEqual({value: 'prefix', scopes: ['source.sass', 'meta.at-rule.forward.sass', 'entity.other.attribute-name.module.sass']});
expect(tokens[10]).toEqual({value: '*', scopes: ['source.sass', 'meta.at-rule.forward.sass', 'punctuation.definition.wildcard.sass']});
});
it('tokenizes @forward with hide correctly', function() {
const {tokens} = grammar.tokenizeLine("@forward 'module' hide a-mixin $private-variable");
expect(tokens[0]).toEqual({value: '@', scopes: ['source.sass', 'meta.at-rule.forward.sass', 'keyword.control.at-rule.forward.sass', 'punctuation.definition.keyword.sass']});
expect(tokens[1]).toEqual({value: 'forward', scopes: ['source.sass', 'meta.at-rule.forward.sass', 'keyword.control.at-rule.forward.sass']});
expect(tokens[3]).toEqual({value: "'", scopes: ['source.sass', 'meta.at-rule.forward.sass', 'string.quoted.single.sass', 'punctuation.definition.string.begin.sass']});
expect(tokens[4]).toEqual({value: 'module', scopes: ['source.sass', 'meta.at-rule.forward.sass', 'string.quoted.single.sass']});
expect(tokens[7]).toEqual({value: 'hide', scopes: ['source.sass', 'meta.at-rule.forward.sass', 'keyword.control.operator']});
expect(tokens[9]).toEqual({value: 'a-mixin', scopes: ['source.sass', 'meta.at-rule.forward.sass', 'entity.name.function.sass']});
expect(tokens[11]).toEqual({value: '$', scopes: ['source.sass', 'meta.at-rule.forward.sass', 'meta.variable-usage.sass', 'punctuation.definition.entity.css']});
expect(tokens[12]).toEqual({value: 'private-variable', scopes: ['source.sass', 'meta.at-rule.forward.sass', 'meta.variable-usage.sass', 'variable.other.sass']});
});
it('tokenizes @forward with show correctly', function() {
const {tokens} = grammar.tokenizeLine("@forward 'module' show public-mixin $public-variable");
expect(tokens[0]).toEqual({value: '@', scopes: ['source.sass', 'meta.at-rule.forward.sass', 'keyword.control.at-rule.forward.sass', 'punctuation.definition.keyword.sass']});
expect(tokens[1]).toEqual({value: 'forward', scopes: ['source.sass', 'meta.at-rule.forward.sass', 'keyword.control.at-rule.forward.sass']});
expect(tokens[3]).toEqual({value: "'", scopes: ['source.sass', 'meta.at-rule.forward.sass', 'string.quoted.single.sass', 'punctuation.definition.string.begin.sass']});
expect(tokens[4]).toEqual({value: 'module', scopes: ['source.sass', 'meta.at-rule.forward.sass', 'string.quoted.single.sass']});
expect(tokens[7]).toEqual({value: 'show', scopes: ['source.sass', 'meta.at-rule.forward.sass', 'keyword.control.operator']});
expect(tokens[9]).toEqual({value: 'public-mixin', scopes: ['source.sass', 'meta.at-rule.forward.sass', 'entity.name.function.sass']});
expect(tokens[11]).toEqual({value: '$', scopes: ['source.sass', 'meta.at-rule.forward.sass', 'meta.variable-usage.sass', 'punctuation.definition.entity.css']});
expect(tokens[12]).toEqual({value: 'public-variable', scopes: ['source.sass', 'meta.at-rule.forward.sass', 'meta.variable-usage.sass', 'variable.other.sass']});
});
});
});
describe('operators', function() {
it('correctly tokenizes comparison and logical operators', function() {
let {tokens} = grammar.tokenizeLine('@if 1 == 1');
expect(tokens[5]).toEqual({value: '==', scopes: ['source.sass', 'meta.at-rule.if.sass', 'keyword.operator.comparison.sass']});
({tokens} = grammar.tokenizeLine('@if 1 != 1'));
expect(tokens[5]).toEqual({value: '!=', scopes: ['source.sass', 'meta.at-rule.if.sass', 'keyword.operator.comparison.sass']});
({tokens} = grammar.tokenizeLine('@if 1 > 1'));
expect(tokens[5]).toEqual({value: '>', scopes: ['source.sass', 'meta.at-rule.if.sass', 'keyword.operator.comparison.sass']});
({tokens} = grammar.tokenizeLine('@if 1 < 1'));
expect(tokens[5]).toEqual({value: '<', scopes: ['source.sass', 'meta.at-rule.if.sass', 'keyword.operator.comparison.sass']});
({tokens} = grammar.tokenizeLine('@if 1 >= 1'));
expect(tokens[5]).toEqual({value: '>=', scopes: ['source.sass', 'meta.at-rule.if.sass', 'keyword.operator.comparison.sass']});
({tokens} = grammar.tokenizeLine('@if 1 <= 1'));
expect(tokens[5]).toEqual({value: '<=', scopes: ['source.sass', 'meta.at-rule.if.sass', 'keyword.operator.comparison.sass']});
({tokens} = grammar.tokenizeLine('@if 1 == 1 and 2 == 2'));
expect(tokens[9]).toEqual({value: 'and', scopes: ['source.sass', 'meta.at-rule.if.sass', 'keyword.operator.logical.sass']});
({tokens} = grammar.tokenizeLine('@if 1 == 1 or 2 == 2'));
expect(tokens[9]).toEqual({value: 'or', scopes: ['source.sass', 'meta.at-rule.if.sass', 'keyword.operator.logical.sass']});
({tokens} = grammar.tokenizeLine('@if not 1 == 1'));
expect(tokens[3]).toEqual({value: 'not', scopes: ['source.sass', 'meta.at-rule.if.sass', 'keyword.operator.logical.sass']});
});
it('correctly tokenizes control operators', function() {
let {tokens} = grammar.tokenizeLine('@for $i from 1 through 2');
expect(tokens[6]).toEqual({value: 'from', scopes: ['source.sass', 'meta.at-rule.for.sass', 'keyword.operator.control.sass']});
expect(tokens[10]).toEqual({value: 'through', scopes: ['source.sass', 'meta.at-rule.for.sass', 'keyword.operator.control.sass']});
({tokens} = grammar.tokenizeLine('@for $i from 1 to 2'));
expect(tokens[10]).toEqual({value: 'to', scopes: ['source.sass', 'meta.at-rule.for.sass', 'keyword.operator.control.sass']});
({tokens} = grammar.tokenizeLine('@each $item in $list'));
expect(tokens[6]).toEqual({value: 'in', scopes: ['source.sass', 'meta.at-rule.each.sass', 'keyword.operator.control.sass']});
});
describe('module usage syntax', function() {
it('correctly tokenizes module functions', function() {
const tokens = grammar.tokenizeLines(`\
body
font-size: fonts.size(normal)\
`
);
expect(tokens[1][4]).toEqual({value: 'fonts', scopes: ['source.sass', 'meta.property-value.sass', 'variable.sass']});
expect(tokens[1][5]).toEqual({value: '.', scopes: ['source.sass', 'meta.property-value.sass', 'punctuation.access.module.sass']});
expect(tokens[1][6]).toEqual({value: 'size', scopes: ['source.sass', 'meta.property-value.sass', 'support.function.misc.sass']});
expect(tokens[1][7]).toEqual({value: '(', scopes: ['source.sass', 'meta.property-value.sass', 'punctuation.section.function.sass']});
expect(tokens[1][9]).toEqual({value: ')', scopes: ['source.sass', 'meta.property-value.sass', 'punctuation.section.function.sass']});
});
it('correctly tokenizes module variables', function() {
const tokens = grammar.tokenizeLines(`\
body
font-size: fonts.$size\
`
);
expect(tokens[1][4]).toEqual({value: 'fonts', scopes: ['source.sass', 'meta.property-value.sass', 'meta.variable-usage.sass', 'variable.sass']});
expect(tokens[1][5]).toEqual({value: '.', scopes: ['source.sass', 'meta.property-value.sass', 'meta.variable-usage.sass', 'punctuation.access.module.sass']});
expect(tokens[1][6]).toEqual({value: '$', scopes: ['source.sass', 'meta.property-value.sass', 'meta.variable-usage.sass', 'punctuation.definition.entity.css']});
expect(tokens[1][7]).toEqual({value: 'size', scopes: ['source.sass', 'meta.property-value.sass', 'meta.variable-usage.sass', 'variable.other.sass']});
});
});
});
});

View File

@ -1,62 +0,0 @@
describe 'SassDoc grammar', ->
grammar = null
beforeEach ->
waitsForPromise ->
atom.packages.activatePackage('language-sass')
runs ->
grammar = atom.grammars.grammarForScopeName('source.css.scss')
describe 'block tags', ->
it 'tokenises simple tags', ->
{tokens} = grammar.tokenizeLine('/// @deprecated')
expect(tokens[0]).toEqual value: '///', scopes: ['source.css.scss', 'comment.block.documentation.scss', 'punctuation.definition.comment.scss']
expect(tokens[1]).toEqual value: ' ', scopes: ['source.css.scss', 'comment.block.documentation.scss']
expect(tokens[2]).toEqual value: '@', scopes: ['source.css.scss', 'comment.block.documentation.scss', 'storage.type.class.sassdoc', 'punctuation.definition.block.tag.sassdoc']
expect(tokens[3]).toEqual value: 'deprecated', scopes: ['source.css.scss', 'comment.block.documentation.scss', 'storage.type.class.sassdoc']
it 'tokenises @param tags with a description', ->
{tokens} = grammar.tokenizeLine('/// @param {type} $name - Description')
expect(tokens[0]).toEqual value: '///', scopes: ['source.css.scss', 'comment.block.documentation.scss', 'punctuation.definition.comment.scss']
expect(tokens[2]).toEqual value: '@', scopes: ['source.css.scss', 'comment.block.documentation.scss', 'storage.type.class.sassdoc', 'punctuation.definition.block.tag.sassdoc']
expect(tokens[3]).toEqual value: 'param', scopes: ['source.css.scss', 'comment.block.documentation.scss', 'storage.type.class.sassdoc']
expect(tokens[5]).toEqual value: '{', scopes: ['source.css.scss', 'comment.block.documentation.scss', 'entity.name.type.instance.sassdoc', 'punctuation.definition.bracket.curly.begin.sassdoc']
expect(tokens[6]).toEqual value: 'type', scopes: ['source.css.scss', 'comment.block.documentation.scss', 'entity.name.type.instance.sassdoc']
expect(tokens[7]).toEqual value: '}', scopes: ['source.css.scss', 'comment.block.documentation.scss', 'entity.name.type.instance.sassdoc', 'punctuation.definition.bracket.curly.end.sassdoc']
expect(tokens[9]).toEqual value: '$name', scopes: ['source.css.scss', 'comment.block.documentation.scss', 'variable.other.sassdoc']
expect(tokens[10]).toEqual value: ' - Description', scopes: ['source.css.scss', 'comment.block.documentation.scss']
describe 'highlighted examples', ->
it 'highlights SCSS after an @example tag', ->
lines = grammar.tokenizeLines '''
///
/// @example scss - Description
/// .class{top:clamp(42,$min: 13)}
///
'''
expect(lines[1][0]).toEqual value: '///', scopes: ['source.css.scss', 'comment.block.documentation.scss', 'punctuation.definition.comment.scss']
expect(lines[1][1]).toEqual value: ' ', scopes: ['source.css.scss', 'comment.block.documentation.scss']
expect(lines[1][2]).toEqual value: '@', scopes: ['source.css.scss', 'comment.block.documentation.scss', 'meta.example.css.scss.sassdoc', 'storage.type.class.sassdoc', 'punctuation.definition.block.tag.sassdoc']
expect(lines[1][3]).toEqual value: 'example', scopes: ['source.css.scss', 'comment.block.documentation.scss', 'meta.example.css.scss.sassdoc', 'storage.type.class.sassdoc']
expect(lines[1][4]).toEqual value: ' ', scopes: ['source.css.scss', 'comment.block.documentation.scss', 'meta.example.css.scss.sassdoc']
expect(lines[1][5]).toEqual value: 'scss', scopes: ['source.css.scss', 'comment.block.documentation.scss', 'meta.example.css.scss.sassdoc', 'variable.other.sassdoc']
expect(lines[1][6]).toEqual value: ' ', scopes: ['source.css.scss', 'comment.block.documentation.scss', 'meta.example.css.scss.sassdoc']
expect(lines[1][7]).toEqual value: '- Description', scopes: ['source.css.scss', 'comment.block.documentation.scss', 'meta.example.css.scss.sassdoc', 'source.embedded.css.scss']
expect(lines[2][0]).toEqual value: '/// ', scopes: ['source.css.scss', 'comment.block.documentation.scss', 'meta.example.css.scss.sassdoc']
expect(lines[2][1]).toEqual value: '.', scopes: ['source.css.scss', 'comment.block.documentation.scss', 'meta.example.css.scss.sassdoc', 'source.embedded.css.scss', 'entity.other.attribute-name.class.css', 'punctuation.definition.entity.css']
expect(lines[2][2]).toEqual value: 'class', scopes: ['source.css.scss', 'comment.block.documentation.scss', 'meta.example.css.scss.sassdoc', 'source.embedded.css.scss', 'entity.other.attribute-name.class.css']
expect(lines[2][3]).toEqual value: '{', scopes: ['source.css.scss', 'comment.block.documentation.scss', 'meta.example.css.scss.sassdoc', 'source.embedded.css.scss', 'meta.property-list.scss', 'punctuation.section.property-list.begin.bracket.curly.scss']
expect(lines[2][4]).toEqual value: 'top', scopes: ['source.css.scss', 'comment.block.documentation.scss', 'meta.example.css.scss.sassdoc', 'source.embedded.css.scss', 'meta.property-list.scss', 'meta.property-name.scss']
expect(lines[2][5]).toEqual value: ':', scopes: ['source.css.scss', 'comment.block.documentation.scss', 'meta.example.css.scss.sassdoc', 'source.embedded.css.scss', 'meta.property-list.scss', 'punctuation.separator.key-value.scss']
expect(lines[2][6]).toEqual value: 'clamp', scopes: ['source.css.scss', 'comment.block.documentation.scss', 'meta.example.css.scss.sassdoc', 'source.embedded.css.scss', 'meta.property-list.scss', 'meta.property-value.scss', 'support.function.misc.scss']
expect(lines[2][7]).toEqual value: '(', scopes: ['source.css.scss', 'comment.block.documentation.scss', 'meta.example.css.scss.sassdoc', 'source.embedded.css.scss', 'meta.property-list.scss', 'meta.property-value.scss', 'punctuation.section.function.scss']
expect(lines[2][8]).toEqual value: '42', scopes: ['source.css.scss', 'comment.block.documentation.scss', 'meta.example.css.scss.sassdoc', 'source.embedded.css.scss', 'meta.property-list.scss', 'meta.property-value.scss', 'variable.parameter.url.scss']
expect(lines[2][9]).toEqual value: ',', scopes: ['source.css.scss', 'comment.block.documentation.scss', 'meta.example.css.scss.sassdoc', 'source.embedded.css.scss', 'meta.property-list.scss', 'meta.property-value.scss', 'punctuation.separator.delimiter.scss']
expect(lines[2][10]).toEqual value: '$min', scopes: ['source.css.scss', 'comment.block.documentation.scss', 'meta.example.css.scss.sassdoc', 'source.embedded.css.scss', 'meta.property-list.scss', 'meta.property-value.scss', 'variable.scss']
expect(lines[2][11]).toEqual value: ':', scopes: ['source.css.scss', 'comment.block.documentation.scss', 'meta.example.css.scss.sassdoc', 'source.embedded.css.scss', 'meta.property-list.scss', 'meta.property-value.scss', 'variable.parameter.url.scss']
expect(lines[2][12]).toEqual value: ' ', scopes: ['source.css.scss', 'comment.block.documentation.scss', 'meta.example.css.scss.sassdoc', 'source.embedded.css.scss', 'meta.property-list.scss', 'meta.property-value.scss']
expect(lines[2][13]).toEqual value: '13', scopes: ['source.css.scss', 'comment.block.documentation.scss', 'meta.example.css.scss.sassdoc', 'source.embedded.css.scss', 'meta.property-list.scss', 'meta.property-value.scss', 'variable.parameter.url.scss']
expect(lines[2][14]).toEqual value: ')', scopes: ['source.css.scss', 'comment.block.documentation.scss', 'meta.example.css.scss.sassdoc', 'source.embedded.css.scss', 'meta.property-list.scss', 'meta.property-value.scss', 'punctuation.section.function.scss']
expect(lines[2][15]).toEqual value: '}', scopes: ['source.css.scss', 'comment.block.documentation.scss', 'meta.example.css.scss.sassdoc', 'source.embedded.css.scss', 'meta.property-list.scss', 'punctuation.section.property-list.end.bracket.curly.scss']

View File

@ -0,0 +1,67 @@
describe('SassDoc grammar', function() {
let grammar = null;
beforeEach(function() {
waitsForPromise(() => atom.packages.activatePackage('language-sass'));
runs(() => grammar = atom.grammars.grammarForScopeName('source.css.scss'));
});
describe('block tags', function() {
it('tokenises simple tags', function() {
const {tokens} = grammar.tokenizeLine('/// @deprecated');
expect(tokens[0]).toEqual({value: '///', scopes: ['source.css.scss', 'comment.block.documentation.scss', 'punctuation.definition.comment.scss']});
expect(tokens[1]).toEqual({value: ' ', scopes: ['source.css.scss', 'comment.block.documentation.scss']});
expect(tokens[2]).toEqual({value: '@', scopes: ['source.css.scss', 'comment.block.documentation.scss', 'storage.type.class.sassdoc', 'punctuation.definition.block.tag.sassdoc']});
expect(tokens[3]).toEqual({value: 'deprecated', scopes: ['source.css.scss', 'comment.block.documentation.scss', 'storage.type.class.sassdoc']});
});
it('tokenises @param tags with a description', function() {
const {tokens} = grammar.tokenizeLine('/// @param {type} $name - Description');
expect(tokens[0]).toEqual({value: '///', scopes: ['source.css.scss', 'comment.block.documentation.scss', 'punctuation.definition.comment.scss']});
expect(tokens[2]).toEqual({value: '@', scopes: ['source.css.scss', 'comment.block.documentation.scss', 'storage.type.class.sassdoc', 'punctuation.definition.block.tag.sassdoc']});
expect(tokens[3]).toEqual({value: 'param', scopes: ['source.css.scss', 'comment.block.documentation.scss', 'storage.type.class.sassdoc']});
expect(tokens[5]).toEqual({value: '{', scopes: ['source.css.scss', 'comment.block.documentation.scss', 'entity.name.type.instance.sassdoc', 'punctuation.definition.bracket.curly.begin.sassdoc']});
expect(tokens[6]).toEqual({value: 'type', scopes: ['source.css.scss', 'comment.block.documentation.scss', 'entity.name.type.instance.sassdoc']});
expect(tokens[7]).toEqual({value: '}', scopes: ['source.css.scss', 'comment.block.documentation.scss', 'entity.name.type.instance.sassdoc', 'punctuation.definition.bracket.curly.end.sassdoc']});
expect(tokens[9]).toEqual({value: '$name', scopes: ['source.css.scss', 'comment.block.documentation.scss', 'variable.other.sassdoc']});
expect(tokens[10]).toEqual({value: ' - Description', scopes: ['source.css.scss', 'comment.block.documentation.scss']});
});
});
describe('highlighted examples', () => it('highlights SCSS after an @example tag', function() {
const lines = grammar.tokenizeLines(`\
///
/// @example scss - Description
/// .class{top:clamp(42,$min: 13)}
///\
`
);
expect(lines[1][0]).toEqual({value: '///', scopes: ['source.css.scss', 'comment.block.documentation.scss', 'punctuation.definition.comment.scss']});
expect(lines[1][1]).toEqual({value: ' ', scopes: ['source.css.scss', 'comment.block.documentation.scss']});
expect(lines[1][2]).toEqual({value: '@', scopes: ['source.css.scss', 'comment.block.documentation.scss', 'meta.example.css.scss.sassdoc', 'storage.type.class.sassdoc', 'punctuation.definition.block.tag.sassdoc']});
expect(lines[1][3]).toEqual({value: 'example', scopes: ['source.css.scss', 'comment.block.documentation.scss', 'meta.example.css.scss.sassdoc', 'storage.type.class.sassdoc']});
expect(lines[1][4]).toEqual({value: ' ', scopes: ['source.css.scss', 'comment.block.documentation.scss', 'meta.example.css.scss.sassdoc']});
expect(lines[1][5]).toEqual({value: 'scss', scopes: ['source.css.scss', 'comment.block.documentation.scss', 'meta.example.css.scss.sassdoc', 'variable.other.sassdoc']});
expect(lines[1][6]).toEqual({value: ' ', scopes: ['source.css.scss', 'comment.block.documentation.scss', 'meta.example.css.scss.sassdoc']});
expect(lines[1][7]).toEqual({value: '- Description', scopes: ['source.css.scss', 'comment.block.documentation.scss', 'meta.example.css.scss.sassdoc', 'source.embedded.css.scss']});
expect(lines[2][0]).toEqual({value: '/// ', scopes: ['source.css.scss', 'comment.block.documentation.scss', 'meta.example.css.scss.sassdoc']});
expect(lines[2][1]).toEqual({value: '.', scopes: ['source.css.scss', 'comment.block.documentation.scss', 'meta.example.css.scss.sassdoc', 'source.embedded.css.scss', 'entity.other.attribute-name.class.css', 'punctuation.definition.entity.css']});
expect(lines[2][2]).toEqual({value: 'class', scopes: ['source.css.scss', 'comment.block.documentation.scss', 'meta.example.css.scss.sassdoc', 'source.embedded.css.scss', 'entity.other.attribute-name.class.css']});
expect(lines[2][3]).toEqual({value: '{', scopes: ['source.css.scss', 'comment.block.documentation.scss', 'meta.example.css.scss.sassdoc', 'source.embedded.css.scss', 'meta.property-list.scss', 'punctuation.section.property-list.begin.bracket.curly.scss']});
expect(lines[2][4]).toEqual({value: 'top', scopes: ['source.css.scss', 'comment.block.documentation.scss', 'meta.example.css.scss.sassdoc', 'source.embedded.css.scss', 'meta.property-list.scss', 'meta.property-name.scss']});
expect(lines[2][5]).toEqual({value: ':', scopes: ['source.css.scss', 'comment.block.documentation.scss', 'meta.example.css.scss.sassdoc', 'source.embedded.css.scss', 'meta.property-list.scss', 'punctuation.separator.key-value.scss']});
expect(lines[2][6]).toEqual({value: 'clamp', scopes: ['source.css.scss', 'comment.block.documentation.scss', 'meta.example.css.scss.sassdoc', 'source.embedded.css.scss', 'meta.property-list.scss', 'meta.property-value.scss', 'support.function.misc.scss']});
expect(lines[2][7]).toEqual({value: '(', scopes: ['source.css.scss', 'comment.block.documentation.scss', 'meta.example.css.scss.sassdoc', 'source.embedded.css.scss', 'meta.property-list.scss', 'meta.property-value.scss', 'punctuation.section.function.scss']});
expect(lines[2][8]).toEqual({value: '42', scopes: ['source.css.scss', 'comment.block.documentation.scss', 'meta.example.css.scss.sassdoc', 'source.embedded.css.scss', 'meta.property-list.scss', 'meta.property-value.scss', 'variable.parameter.url.scss']});
expect(lines[2][9]).toEqual({value: ',', scopes: ['source.css.scss', 'comment.block.documentation.scss', 'meta.example.css.scss.sassdoc', 'source.embedded.css.scss', 'meta.property-list.scss', 'meta.property-value.scss', 'punctuation.separator.delimiter.scss']});
expect(lines[2][10]).toEqual({value: '$min', scopes: ['source.css.scss', 'comment.block.documentation.scss', 'meta.example.css.scss.sassdoc', 'source.embedded.css.scss', 'meta.property-list.scss', 'meta.property-value.scss', 'variable.scss']});
expect(lines[2][11]).toEqual({value: ':', scopes: ['source.css.scss', 'comment.block.documentation.scss', 'meta.example.css.scss.sassdoc', 'source.embedded.css.scss', 'meta.property-list.scss', 'meta.property-value.scss', 'variable.parameter.url.scss']});
expect(lines[2][12]).toEqual({value: ' ', scopes: ['source.css.scss', 'comment.block.documentation.scss', 'meta.example.css.scss.sassdoc', 'source.embedded.css.scss', 'meta.property-list.scss', 'meta.property-value.scss']});
expect(lines[2][13]).toEqual({value: '13', scopes: ['source.css.scss', 'comment.block.documentation.scss', 'meta.example.css.scss.sassdoc', 'source.embedded.css.scss', 'meta.property-list.scss', 'meta.property-value.scss', 'variable.parameter.url.scss']});
expect(lines[2][14]).toEqual({value: ')', scopes: ['source.css.scss', 'comment.block.documentation.scss', 'meta.example.css.scss.sassdoc', 'source.embedded.css.scss', 'meta.property-list.scss', 'meta.property-value.scss', 'punctuation.section.function.scss']});
expect(lines[2][15]).toEqual({value: '}', scopes: ['source.css.scss', 'comment.block.documentation.scss', 'meta.example.css.scss.sassdoc', 'source.embedded.css.scss', 'meta.property-list.scss', 'punctuation.section.property-list.end.bracket.curly.scss']});
}));
});

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@ -1,76 +0,0 @@
describe "Shell session grammar", ->
grammar = null
beforeEach ->
atom.config.set 'core.useTreeSitterParsers', false
waitsForPromise ->
atom.packages.activatePackage("language-shellscript")
runs ->
grammar = atom.grammars.grammarForScopeName("text.shell-session")
it "parses the grammar", ->
expect(grammar).toBeDefined()
expect(grammar.scopeName).toBe "text.shell-session"
prompts = [">", "$", "#", "%", "", ""]
it "tokenizes prompts", ->
for delim in prompts
{tokens} = grammar.tokenizeLine(delim + ' echo $FOO')
expect(tokens[0]).toEqual value: delim, scopes: ['text.shell-session', 'punctuation.separator.prompt.shell-session']
expect(tokens[1]).toEqual value: ' ', scopes: ['text.shell-session']
expect(tokens[2]).toEqual value: 'echo', scopes: ['text.shell-session', 'source.shell', 'support.function.builtin.shell']
it "tokenises prompts with Greek characters", ->
sigils = ["λ", "Λ", "Δ", "Σ", "Ω"]
for sigil in sigils
lines = grammar.tokenizeLines """
#{sigil} echo #{sigil}μμ
O#{sigil}tput Ω
"""
expect(lines[0][0]).toEqual value: sigil, scopes: ['text.shell-session', 'punctuation.separator.prompt.shell-session']
expect(lines[0][2]).toEqual value: 'echo', scopes: ['text.shell-session', 'source.shell', 'support.function.builtin.shell']
expect(lines[0][3]).toEqual value: " #{sigil}μμ", scopes: ['text.shell-session', 'source.shell']
expect(lines[1][0]).toEqual value: "O#{sigil}tput Ω", scopes: ['text.shell-session', 'meta.output.shell-session']
it "does not tokenize prompts with indents", ->
for delim in prompts
{tokens} = grammar.tokenizeLine(' ' + delim + ' echo $FOO')
expect(tokens[0]).toEqual value: ' ' + delim + ' echo $FOO', scopes: ['text.shell-session', 'meta.output.shell-session']
it "tokenizes prompts with prefixes", ->
{tokens} = grammar.tokenizeLine('user@machine $ echo $FOO')
expect(tokens[0]).toEqual value: 'user@machine', scopes: ['text.shell-session', 'entity.other.prompt-prefix.shell-session']
expect(tokens[1]).toEqual value: ' ', scopes: ['text.shell-session']
expect(tokens[2]).toEqual value: '$', scopes: ['text.shell-session', 'punctuation.separator.prompt.shell-session']
expect(tokens[3]).toEqual value: ' ', scopes: ['text.shell-session']
expect(tokens[4]).toEqual value: 'echo', scopes: ['text.shell-session', 'source.shell', 'support.function.builtin.shell']
it "tokenizes prompts with prefixes and a leading parenthetical", ->
{tokens} = grammar.tokenizeLine('(venv) machine:pwd user$ echo $FOO')
expect(tokens[0]).toEqual value: '(venv) machine:pwd user', scopes: ['text.shell-session', 'entity.other.prompt-prefix.shell-session']
expect(tokens[1]).toEqual value: '$', scopes: ['text.shell-session', 'punctuation.separator.prompt.shell-session']
expect(tokens[2]).toEqual value: ' ', scopes: ['text.shell-session']
expect(tokens[3]).toEqual value: 'echo', scopes: ['text.shell-session', 'source.shell', 'support.function.builtin.shell']
it "tokenizes prompts with prefixes with brackets", ->
{tokens} = grammar.tokenizeLine('[user@machine pwd]$ echo $FOO')
expect(tokens[0]).toEqual value: '[user@machine pwd]', scopes: ['text.shell-session', 'entity.other.prompt-prefix.shell-session']
expect(tokens[1]).toEqual value: '$', scopes: ['text.shell-session', 'punctuation.separator.prompt.shell-session']
expect(tokens[2]).toEqual value: ' ', scopes: ['text.shell-session']
expect(tokens[3]).toEqual value: 'echo', scopes: ['text.shell-session', 'source.shell', 'support.function.builtin.shell']
it "tokenizes shell output", ->
tokens = grammar.tokenizeLines """
$ echo $FOO
foo
"""
expect(tokens[1][0]).toEqual value: 'foo', scopes: ['text.shell-session', 'meta.output.shell-session']

View File

@ -0,0 +1,98 @@
describe("Shell session grammar", () => {
let grammar = null;
beforeEach(() => {
atom.config.set('core.useTreeSitterParsers', false);
waitsForPromise(() => atom.packages.activatePackage("language-shellscript"));
runs(() => grammar = atom.grammars.grammarForScopeName("text.shell-session"));
});
it("parses the grammar", () => {
expect(grammar).toBeDefined();
expect(grammar.scopeName).toBe("text.shell-session");
});
const prompts = [">", "$", "#", "%", "", "➜"];
it("tokenizes prompts", () => (() => {
const result = [];
for (let delim of prompts) {
const {tokens} = grammar.tokenizeLine(delim + ' echo $FOO');
expect(tokens[0]).toEqual({value: delim, scopes: ['text.shell-session', 'punctuation.separator.prompt.shell-session']});
expect(tokens[1]).toEqual({value: ' ', scopes: ['text.shell-session']});
result.push(expect(tokens[2]).toEqual({value: 'echo', scopes: ['text.shell-session', 'source.shell', 'support.function.builtin.shell']}));
}
return result;
})());
it("tokenises prompts with Greek characters", () => {
const sigils = ["λ", "Λ", "Δ", "Σ", "Ω"];
return (() => {
const result = [];
for (let sigil of sigils) {
const lines = grammar.tokenizeLines(`\
${sigil} echo ${sigil}μμ
O${sigil}tput Ω\
`
);
expect(lines[0][0]).toEqual({value: sigil, scopes: ['text.shell-session', 'punctuation.separator.prompt.shell-session']});
expect(lines[0][2]).toEqual({value: 'echo', scopes: ['text.shell-session', 'source.shell', 'support.function.builtin.shell']});
expect(lines[0][3]).toEqual({value: ` ${sigil}μμ`, scopes: ['text.shell-session', 'source.shell']});
result.push(expect(lines[1][0]).toEqual({value: `O${sigil}tput Ω`, scopes: ['text.shell-session', 'meta.output.shell-session']}));
}
return result;
})();
});
it("does not tokenize prompts with indents", () => (() => {
const result = [];
for (let delim of prompts) {
const {tokens} = grammar.tokenizeLine(' ' + delim + ' echo $FOO');
result.push(expect(tokens[0]).toEqual({value: ' ' + delim + ' echo $FOO', scopes: ['text.shell-session', 'meta.output.shell-session']}));
}
return result;
})());
it("tokenizes prompts with prefixes", () => {
const {tokens} = grammar.tokenizeLine('user@machine $ echo $FOO');
expect(tokens[0]).toEqual({value: 'user@machine', scopes: ['text.shell-session', 'entity.other.prompt-prefix.shell-session']});
expect(tokens[1]).toEqual({value: ' ', scopes: ['text.shell-session']});
expect(tokens[2]).toEqual({value: '$', scopes: ['text.shell-session', 'punctuation.separator.prompt.shell-session']});
expect(tokens[3]).toEqual({value: ' ', scopes: ['text.shell-session']});
expect(tokens[4]).toEqual({value: 'echo', scopes: ['text.shell-session', 'source.shell', 'support.function.builtin.shell']});
});
it("tokenizes prompts with prefixes and a leading parenthetical", () => {
const {tokens} = grammar.tokenizeLine('(venv) machine:pwd user$ echo $FOO');
expect(tokens[0]).toEqual({value: '(venv) machine:pwd user', scopes: ['text.shell-session', 'entity.other.prompt-prefix.shell-session']});
expect(tokens[1]).toEqual({value: '$', scopes: ['text.shell-session', 'punctuation.separator.prompt.shell-session']});
expect(tokens[2]).toEqual({value: ' ', scopes: ['text.shell-session']});
expect(tokens[3]).toEqual({value: 'echo', scopes: ['text.shell-session', 'source.shell', 'support.function.builtin.shell']});
});
it("tokenizes prompts with prefixes with brackets", () => {
const {tokens} = grammar.tokenizeLine('[user@machine pwd]$ echo $FOO');
expect(tokens[0]).toEqual({value: '[user@machine pwd]', scopes: ['text.shell-session', 'entity.other.prompt-prefix.shell-session']});
expect(tokens[1]).toEqual({value: '$', scopes: ['text.shell-session', 'punctuation.separator.prompt.shell-session']});
expect(tokens[2]).toEqual({value: ' ', scopes: ['text.shell-session']});
expect(tokens[3]).toEqual({value: 'echo', scopes: ['text.shell-session', 'source.shell', 'support.function.builtin.shell']});
});
it("tokenizes shell output", () => {
const tokens = grammar.tokenizeLines(`\
$ echo $FOO
foo\
`
);
expect(tokens[1][0]).toEqual({value: 'foo', scopes: ['text.shell-session', 'meta.output.shell-session']});
});
});

View File

@ -1,459 +0,0 @@
TextEditor = null
buildTextEditor = (params) ->
if atom.workspace.buildTextEditor?
atom.workspace.buildTextEditor(params)
else
TextEditor ?= require('atom').TextEditor
new TextEditor(params)
describe "Shell script grammar", ->
grammar = null
beforeEach ->
atom.config.set 'core.useTreeSitterParsers', false
waitsForPromise ->
atom.packages.activatePackage("language-shellscript")
runs ->
grammar = atom.grammars.grammarForScopeName("source.shell")
it "parses the grammar", ->
expect(grammar).toBeDefined()
expect(grammar.scopeName).toBe "source.shell"
it "tokenizes strings inside variable constructs", ->
{tokens} = grammar.tokenizeLine("${'root'}")
expect(tokens[0]).toEqual value: '${', scopes: ['source.shell', 'variable.other.bracket.shell', 'punctuation.definition.variable.shell']
expect(tokens[1]).toEqual value: "'", scopes: ['source.shell', 'variable.other.bracket.shell', 'string.quoted.single.shell', 'punctuation.definition.string.begin.shell']
expect(tokens[2]).toEqual value: "root", scopes: ['source.shell', 'variable.other.bracket.shell', 'string.quoted.single.shell']
expect(tokens[3]).toEqual value: "'", scopes: ['source.shell', 'variable.other.bracket.shell', 'string.quoted.single.shell', 'punctuation.definition.string.end.shell']
expect(tokens[4]).toEqual value: '}', scopes: ['source.shell', 'variable.other.bracket.shell', 'punctuation.definition.variable.shell']
it "tokenizes if correctly when it's a parameter", ->
{tokens} = grammar.tokenizeLine('dd if=/dev/random of=/dev/null')
expect(tokens[0]).toEqual value: 'dd if=/dev/random of=/dev/null', scopes: ['source.shell']
it "tokenizes if as a keyword", ->
brackets =
"[": "]"
"[[": "]]"
for openingBracket, closingBracket of brackets
{tokens} = grammar.tokenizeLine('if ' + openingBracket + ' -f /var/log/messages ' + closingBracket)
expect(tokens[0]).toEqual value: 'if', scopes: ['source.shell', 'meta.scope.if-block.shell', 'keyword.control.shell']
expect(tokens[2]).toEqual value: openingBracket, scopes: ['source.shell', 'meta.scope.if-block.shell', 'meta.scope.logical-expression.shell', 'punctuation.definition.logical-expression.shell']
expect(tokens[4]).toEqual value: '-f', scopes: ['source.shell', 'meta.scope.if-block.shell', 'meta.scope.logical-expression.shell', 'keyword.operator.logical.shell']
expect(tokens[5]).toEqual value: ' /var/log/messages ', scopes: ['source.shell', 'meta.scope.if-block.shell', 'meta.scope.logical-expression.shell']
expect(tokens[6]).toEqual value: closingBracket, scopes: ['source.shell', 'meta.scope.if-block.shell', 'meta.scope.logical-expression.shell', 'punctuation.definition.logical-expression.shell']
it "tokenizes for...in loops", ->
{tokens} = grammar.tokenizeLine('for variable in file do do-something-done done')
expect(tokens[0]).toEqual value: 'for', scopes: ['source.shell', 'meta.scope.for-in-loop.shell', 'keyword.control.shell']
expect(tokens[2]).toEqual value: 'variable', scopes: ['source.shell', 'meta.scope.for-in-loop.shell', 'variable.other.loop.shell']
expect(tokens[4]).toEqual value: 'in', scopes: ['source.shell', 'meta.scope.for-in-loop.shell', 'keyword.control.shell']
expect(tokens[5]).toEqual value: ' file ', scopes: ['source.shell', 'meta.scope.for-in-loop.shell']
expect(tokens[6]).toEqual value: 'do', scopes: ['source.shell', 'meta.scope.for-in-loop.shell', 'keyword.control.shell']
expect(tokens[7]).toEqual value: ' do-something-done ', scopes: ['source.shell', 'meta.scope.for-in-loop.shell']
expect(tokens[8]).toEqual value: 'done', scopes: ['source.shell', 'meta.scope.for-in-loop.shell', 'keyword.control.shell']
{tokens} = grammar.tokenizeLine('for "variable" in "${list[@]}" do something done')
expect(tokens[0]).toEqual value: 'for', scopes: ['source.shell', 'meta.scope.for-in-loop.shell', 'keyword.control.shell']
expect(tokens[2]).toEqual value: '"', scopes: ['source.shell', 'meta.scope.for-in-loop.shell', 'variable.other.loop.shell', 'string.quoted.double.shell', 'punctuation.definition.string.begin.shell']
expect(tokens[3]).toEqual value: 'variable', scopes: ['source.shell', 'meta.scope.for-in-loop.shell', 'variable.other.loop.shell', 'string.quoted.double.shell']
expect(tokens[4]).toEqual value: '"', scopes: ['source.shell', 'meta.scope.for-in-loop.shell', 'variable.other.loop.shell', 'string.quoted.double.shell', 'punctuation.definition.string.end.shell']
expect(tokens[6]).toEqual value: 'in', scopes: ['source.shell', 'meta.scope.for-in-loop.shell', 'keyword.control.shell']
expect(tokens[8]).toEqual value: '"', scopes: ['source.shell', 'meta.scope.for-in-loop.shell', 'string.quoted.double.shell', 'punctuation.definition.string.begin.shell']
expect(tokens[9]).toEqual value: '${', scopes: ['source.shell', 'meta.scope.for-in-loop.shell', 'string.quoted.double.shell', 'variable.other.bracket.shell', 'punctuation.definition.variable.shell']
expect(tokens[10]).toEqual value: 'list', scopes: ['source.shell', 'meta.scope.for-in-loop.shell', 'string.quoted.double.shell', 'variable.other.bracket.shell']
expect(tokens[11]).toEqual value: '[', scopes: ['source.shell', 'meta.scope.for-in-loop.shell', 'string.quoted.double.shell', 'variable.other.bracket.shell', 'punctuation.section.array.shell']
expect(tokens[12]).toEqual value: '@', scopes: ['source.shell', 'meta.scope.for-in-loop.shell', 'string.quoted.double.shell', 'variable.other.bracket.shell']
expect(tokens[13]).toEqual value: ']', scopes: ['source.shell', 'meta.scope.for-in-loop.shell', 'string.quoted.double.shell', 'variable.other.bracket.shell', 'punctuation.section.array.shell']
expect(tokens[14]).toEqual value: '}', scopes: ['source.shell', 'meta.scope.for-in-loop.shell', 'string.quoted.double.shell', 'variable.other.bracket.shell', 'punctuation.definition.variable.shell']
expect(tokens[15]).toEqual value: '"', scopes: ['source.shell', 'meta.scope.for-in-loop.shell', 'string.quoted.double.shell', 'punctuation.definition.string.end.shell']
expect(tokens[17]).toEqual value: 'do', scopes: ['source.shell', 'meta.scope.for-in-loop.shell', 'keyword.control.shell']
expect(tokens[18]).toEqual value: ' something ', scopes: ['source.shell', 'meta.scope.for-in-loop.shell']
expect(tokens[19]).toEqual value: 'done', scopes: ['source.shell', 'meta.scope.for-in-loop.shell', 'keyword.control.shell']
{tokens} = grammar.tokenizeLine('for variable in something do # in')
expect(tokens[4]).toEqual value: 'in', scopes: ['source.shell', 'meta.scope.for-in-loop.shell', 'keyword.control.shell']
expect(tokens[8]).toEqual value: '#', scopes: ['source.shell', 'meta.scope.for-in-loop.shell', 'comment.line.number-sign.shell', 'punctuation.definition.comment.shell']
expect(tokens[9]).toEqual value: ' in', scopes: ['source.shell', 'meta.scope.for-in-loop.shell', 'comment.line.number-sign.shell']
it "doesn't tokenize keywords when they're part of a phrase", ->
{tokens} = grammar.tokenizeLine('grep --ignore-case "something"')
expect(tokens[0]).toEqual value: 'grep --ignore-case ', scopes: ['source.shell']
expect(tokens[1]).toEqual value: '"', scopes: ['source.shell', 'string.quoted.double.shell', 'punctuation.definition.string.begin.shell']
strings = [
'iffy'
'enable-something'
'there.for'
'be+done'
'little,while'
'rest@until'
'lets:select words'
'in🚀case of stuff'
'the#fi%nal countdown'
'time⏰out'
]
for string in strings
{tokens} = grammar.tokenizeLine(string)
expect(tokens[0]).toEqual value: string, scopes: ['source.shell']
{tokens} = grammar.tokenizeLine('this/function ()')
expect(tokens[0]).toEqual value: 'this/function', scopes: ['source.shell', 'meta.function.shell', 'entity.name.function.shell']
expect(tokens[2]).toEqual value: '()', scopes: ['source.shell', 'meta.function.shell', 'punctuation.definition.arguments.shell']
{tokens} = grammar.tokenizeLine('and,for (( this ))')
expect(tokens[0]).toEqual value: 'and,for ', scopes: ['source.shell']
expect(tokens[1]).toEqual value: '((', scopes: ['source.shell', 'string.other.math.shell', 'punctuation.definition.string.begin.shell']
it "tokenizes herestrings", ->
delimsByScope =
"string.quoted.double.shell": '"'
"string.quoted.single.shell": "'"
for scope, delim of delimsByScope
tokens = grammar.tokenizeLines """
$cmd <<<#{delim}
lorem ipsum#{delim}
"""
expect(tokens[0][0]).toEqual value: '$', scopes: ['source.shell', 'variable.other.normal.shell', 'punctuation.definition.variable.shell']
expect(tokens[0][1]).toEqual value: 'cmd', scopes: ['source.shell', 'variable.other.normal.shell']
expect(tokens[0][3]).toEqual value: '<<<', scopes: ['source.shell', 'meta.herestring.shell', 'keyword.operator.herestring.shell']
expect(tokens[0][4]).toEqual value: delim, scopes: ['source.shell', 'meta.herestring.shell', scope, 'punctuation.definition.string.begin.shell']
expect(tokens[1][0]).toEqual value: 'lorem ipsum', scopes: ['source.shell', 'meta.herestring.shell', scope]
expect(tokens[1][1]).toEqual value: delim, scopes: ['source.shell', 'meta.herestring.shell', scope, 'punctuation.definition.string.end.shell']
for scope, delim of delimsByScope
tokens = grammar.tokenizeLines """
$cmd <<< #{delim}
lorem ipsum#{delim}
"""
expect(tokens[0][0]).toEqual value: '$', scopes: ['source.shell', 'variable.other.normal.shell', 'punctuation.definition.variable.shell']
expect(tokens[0][1]).toEqual value: 'cmd', scopes: ['source.shell', 'variable.other.normal.shell']
expect(tokens[0][3]).toEqual value: '<<<', scopes: ['source.shell', 'meta.herestring.shell', 'keyword.operator.herestring.shell']
expect(tokens[0][4]).toEqual value: ' ', scopes: ['source.shell', 'meta.herestring.shell']
expect(tokens[0][5]).toEqual value: delim, scopes: ['source.shell', 'meta.herestring.shell', scope, 'punctuation.definition.string.begin.shell']
expect(tokens[1][0]).toEqual value: 'lorem ipsum', scopes: ['source.shell', 'meta.herestring.shell', scope]
expect(tokens[1][1]).toEqual value: delim, scopes: ['source.shell', 'meta.herestring.shell', scope, 'punctuation.definition.string.end.shell']
{tokens} = grammar.tokenizeLine '$cmd = something <<< $COUNTRIES'
expect(tokens[3]).toEqual value: '<<<', scopes: ['source.shell', 'meta.herestring.shell', 'keyword.operator.herestring.shell']
expect(tokens[4]).toEqual value: ' ', scopes: ['source.shell', 'meta.herestring.shell']
expect(tokens[5]).toEqual value: '$', scopes: ['source.shell', 'meta.herestring.shell', 'string.unquoted.herestring.shell', 'variable.other.normal.shell', 'punctuation.definition.variable.shell']
expect(tokens[6]).toEqual value: 'COUNTRIES', scopes: ['source.shell', 'meta.herestring.shell', 'string.unquoted.herestring.shell', 'variable.other.normal.shell']
{tokens} = grammar.tokenizeLine '$cmd = something <<< TEST 1 2'
expect(tokens[3]).toEqual value: '<<<', scopes: ['source.shell', 'meta.herestring.shell', 'keyword.operator.herestring.shell']
expect(tokens[4]).toEqual value: ' ', scopes: ['source.shell', 'meta.herestring.shell']
expect(tokens[5]).toEqual value: 'TEST', scopes: ['source.shell', 'meta.herestring.shell', 'string.unquoted.herestring.shell']
expect(tokens[6]).toEqual value: ' 1 2', scopes: ['source.shell']
{tokens} = grammar.tokenizeLine '$cmd = "$(3 / x <<< $WORD)"'
expect(tokens[6]).toEqual value: '<<<', scopes: ['source.shell', 'string.quoted.double.shell', 'string.interpolated.dollar.shell', 'meta.herestring.shell', 'keyword.operator.herestring.shell']
expect(tokens[8]).toEqual value: '$', scopes: ['source.shell', 'string.quoted.double.shell', 'string.interpolated.dollar.shell', 'meta.herestring.shell', 'string.unquoted.herestring.shell', 'variable.other.normal.shell', 'punctuation.definition.variable.shell']
expect(tokens[9]).toEqual value: 'WORD', scopes: ['source.shell', 'string.quoted.double.shell', 'string.interpolated.dollar.shell', 'meta.herestring.shell', 'string.unquoted.herestring.shell', 'variable.other.normal.shell']
expect(tokens[10]).toEqual value: ')', scopes: ['source.shell', 'string.quoted.double.shell', 'string.interpolated.dollar.shell', 'punctuation.definition.string.end.shell']
it "tokenizes heredocs", ->
delimsByScope =
"ruby": "RUBY"
"python": "PYTHON"
"applescript": "APPLESCRIPT"
"shell": "SHELL"
for scope, delim of delimsByScope
tokens = grammar.tokenizeLines """
<<#{delim}
stuff
#{delim}
"""
expect(tokens[0][0]).toEqual value: '<<', scopes: ['source.shell', 'string.unquoted.heredoc.' + scope + '.shell', 'keyword.operator.heredoc.shell']
expect(tokens[0][1]).toEqual value: delim, scopes: ['source.shell', 'string.unquoted.heredoc.' + scope + '.shell', 'keyword.control.heredoc-token.shell']
expect(tokens[1][0]).toEqual value: 'stuff', scopes: ['source.shell', 'string.unquoted.heredoc.' + scope + '.shell', 'source.' + scope + '.embedded.shell']
expect(tokens[2][0]).toEqual value: delim, scopes: ['source.shell', 'string.unquoted.heredoc.' + scope + '.shell', 'keyword.control.heredoc-token.shell']
tokens = grammar.tokenizeLines """
<< #{delim}
stuff
#{delim}
"""
expect(tokens[0][0]).toEqual value: '<<', scopes: ['source.shell', 'string.unquoted.heredoc.' + scope + '.shell', 'keyword.operator.heredoc.shell']
expect(tokens[0][1]).toEqual value: ' ', scopes: ['source.shell', 'string.unquoted.heredoc.' + scope + '.shell']
expect(tokens[0][2]).toEqual value: delim, scopes: ['source.shell', 'string.unquoted.heredoc.' + scope + '.shell', 'keyword.control.heredoc-token.shell']
expect(tokens[1][0]).toEqual value: 'stuff', scopes: ['source.shell', 'string.unquoted.heredoc.' + scope + '.shell', 'source.' + scope + '.embedded.shell']
expect(tokens[2][0]).toEqual value: delim, scopes: ['source.shell', 'string.unquoted.heredoc.' + scope + '.shell', 'keyword.control.heredoc-token.shell']
tokens = grammar.tokenizeLines """
<<-#{delim}
stuff
#{delim}
"""
expect(tokens[0][0]).toEqual value: '<<', scopes: ['source.shell', 'string.unquoted.heredoc.no-indent.' + scope + '.shell', 'keyword.operator.heredoc.shell']
expect(tokens[0][2]).toEqual value: delim, scopes: ['source.shell', 'string.unquoted.heredoc.no-indent.' + scope + '.shell', 'keyword.control.heredoc-token.shell']
expect(tokens[1][0]).toEqual value: 'stuff', scopes: ['source.shell', 'string.unquoted.heredoc.no-indent.' + scope + '.shell', 'source.' + scope + '.embedded.shell']
expect(tokens[2][0]).toEqual value: delim, scopes: ['source.shell', 'string.unquoted.heredoc.no-indent.' + scope + '.shell', 'keyword.control.heredoc-token.shell']
tokens = grammar.tokenizeLines """
<<- #{delim}
stuff
#{delim}
"""
expect(tokens[0][0]).toEqual value: '<<', scopes: ['source.shell', 'string.unquoted.heredoc.no-indent.' + scope + '.shell', 'keyword.operator.heredoc.shell']
expect(tokens[0][1]).toEqual value: '- ', scopes: ['source.shell', 'string.unquoted.heredoc.no-indent.' + scope + '.shell']
expect(tokens[0][2]).toEqual value: delim, scopes: ['source.shell', 'string.unquoted.heredoc.no-indent.' + scope + '.shell', 'keyword.control.heredoc-token.shell']
expect(tokens[1][0]).toEqual value: 'stuff', scopes: ['source.shell', 'string.unquoted.heredoc.no-indent.' + scope + '.shell', 'source.' + scope + '.embedded.shell']
expect(tokens[2][0]).toEqual value: delim, scopes: ['source.shell', 'string.unquoted.heredoc.no-indent.' + scope + '.shell', 'keyword.control.heredoc-token.shell']
delims = [
"RANDOMTHING"
"RUBY@1.8"
"END-INPUT"
]
for delim in delims
tokens = grammar.tokenizeLines """
<<#{delim}
stuff
#{delim}
"""
expect(tokens[0][0]).toEqual value: '<<', scopes: ['source.shell', 'string.unquoted.heredoc.expanded.shell', 'keyword.operator.heredoc.shell']
expect(tokens[0][1]).toEqual value: delim, scopes: ['source.shell', 'string.unquoted.heredoc.expanded.shell', 'keyword.control.heredoc-token.shell']
expect(tokens[1][0]).toEqual value: 'stuff', scopes: ['source.shell', 'string.unquoted.heredoc.expanded.shell']
expect(tokens[2][0]).toEqual value: delim, scopes: ['source.shell', 'string.unquoted.heredoc.expanded.shell', 'keyword.control.heredoc-token.shell']
for delim in delims
tokens = grammar.tokenizeLines """
<< '#{delim}'
stuff
#{delim}
"""
expect(tokens[0][0]).toEqual value: '<<', scopes: ['source.shell', 'string.unquoted.heredoc.shell', 'keyword.operator.heredoc.shell']
expect(tokens[0][2]).toEqual value: delim, scopes: ['source.shell', 'string.unquoted.heredoc.shell', 'keyword.control.heredoc-token.shell']
expect(tokens[1][0]).toEqual value: 'stuff', scopes: ['source.shell', 'string.unquoted.heredoc.shell']
expect(tokens[2][0]).toEqual value: delim, scopes: ['source.shell', 'string.unquoted.heredoc.shell', 'keyword.control.heredoc-token.shell']
it "tokenizes shebangs", ->
{tokens} = grammar.tokenizeLine('#!/bin/sh')
expect(tokens[0]).toEqual value: '#!', scopes: ['source.shell', 'comment.line.number-sign.shebang.shell', 'punctuation.definition.comment.shebang.shell']
expect(tokens[1]).toEqual value: '/bin/sh', scopes: ['source.shell', 'comment.line.number-sign.shebang.shell']
it "tokenizes comments", ->
{tokens} = grammar.tokenizeLine('#comment')
expect(tokens[0]).toEqual value: '#', scopes: ['source.shell', 'comment.line.number-sign.shell', 'punctuation.definition.comment.shell']
expect(tokens[1]).toEqual value: 'comment', scopes: ['source.shell', 'comment.line.number-sign.shell']
it "tokenizes comments in interpolated strings", ->
{tokens} = grammar.tokenizeLine('`#comment`')
expect(tokens[1]).toEqual value: '#', scopes: ['source.shell', 'string.interpolated.backtick.shell', 'comment.line.number-sign.shell', 'punctuation.definition.comment.shell']
expect(tokens[3]).toEqual value: '`', scopes: ['source.shell', 'string.interpolated.backtick.shell', 'punctuation.definition.string.end.shell']
it "does not tokenize -# in argument lists as a comment", ->
{tokens} = grammar.tokenizeLine('curl -#')
expect(tokens[0]).toEqual value: 'curl -#', scopes: ['source.shell']
it "tokenizes nested variable expansions", ->
{tokens} = grammar.tokenizeLine('${${C}}')
expect(tokens[0]).toEqual value: '${', scopes: ['source.shell', 'variable.other.bracket.shell', 'punctuation.definition.variable.shell']
expect(tokens[1]).toEqual value: '${', scopes: ['source.shell', 'variable.other.bracket.shell', 'variable.other.bracket.shell', 'punctuation.definition.variable.shell']
expect(tokens[2]).toEqual value: 'C', scopes: ['source.shell', 'variable.other.bracket.shell', 'variable.other.bracket.shell']
expect(tokens[3]).toEqual value: '}', scopes: ['source.shell', 'variable.other.bracket.shell', 'variable.other.bracket.shell', 'punctuation.definition.variable.shell']
expect(tokens[4]).toEqual value: '}', scopes: ['source.shell', 'variable.other.bracket.shell', 'punctuation.definition.variable.shell']
it "tokenizes case blocks", ->
{tokens} = grammar.tokenizeLine('case word in esac);; esac')
expect(tokens[0]).toEqual value: 'case', scopes: ['source.shell', 'meta.scope.case-block.shell', 'keyword.control.shell']
expect(tokens[1]).toEqual value: ' word ', scopes: ['source.shell', 'meta.scope.case-block.shell']
expect(tokens[2]).toEqual value: 'in', scopes: ['source.shell', 'meta.scope.case-block.shell', 'meta.scope.case-body.shell', 'keyword.control.shell']
expect(tokens[3]).toEqual value: ' ', scopes: ['source.shell', 'meta.scope.case-block.shell', 'meta.scope.case-body.shell']
expect(tokens[4]).toEqual value: 'esac', scopes: ['source.shell', 'meta.scope.case-block.shell', 'meta.scope.case-body.shell', 'meta.scope.case-clause.shell', 'meta.scope.case-pattern.shell']
expect(tokens[5]).toEqual value: ')', scopes: ['source.shell', 'meta.scope.case-block.shell', 'meta.scope.case-body.shell', 'meta.scope.case-clause.shell', 'meta.scope.case-pattern.shell', 'punctuation.definition.case-pattern.shell']
expect(tokens[6]).toEqual value: ';;', scopes: ['source.shell', 'meta.scope.case-block.shell', 'meta.scope.case-body.shell', 'meta.scope.case-clause.shell', 'punctuation.terminator.case-clause.shell']
expect(tokens[7]).toEqual value: ' ', scopes: ['source.shell', 'meta.scope.case-block.shell', 'meta.scope.case-body.shell']
expect(tokens[8]).toEqual value: 'esac', scopes: ['source.shell', 'meta.scope.case-block.shell', 'keyword.control.shell']
it "does not confuse strings and functions", ->
{tokens} = grammar.tokenizeLine('echo "()"')
expect(tokens[0]).toEqual value: 'echo', scopes: ['source.shell', 'support.function.builtin.shell']
expect(tokens[2]).toEqual value: '"', scopes: ['source.shell', 'string.quoted.double.shell', 'punctuation.definition.string.begin.shell']
expect(tokens[3]).toEqual value: '()', scopes: ['source.shell', 'string.quoted.double.shell']
expect(tokens[4]).toEqual value: '"', scopes: ['source.shell', 'string.quoted.double.shell', 'punctuation.definition.string.end.shell']
describe "indentation", ->
editor = null
beforeEach ->
editor = buildTextEditor()
editor.setGrammar(grammar)
expectPreservedIndentation = (text) ->
editor.setText(text)
editor.autoIndentBufferRows(0, editor.getLineCount() - 1)
expectedLines = text.split("\n")
actualLines = editor.getText().split("\n")
for actualLine, i in actualLines
expect([
actualLine,
editor.indentLevelForLine(actualLine)
]).toEqual([
expectedLines[i],
editor.indentLevelForLine(expectedLines[i])
])
it "indents semicolon-style conditional", ->
expectPreservedIndentation """
if [ $? -eq 0 ]; then
echo "0"
elif [ $? -eq 1 ]; then
echo "1"
else
echo "other"
fi
"""
it "indents newline-style conditional", ->
expectPreservedIndentation """
if [ $? -eq 0 ]
then
echo "0"
elif [ $? -eq 1 ]
then
echo "1"
else
echo "other"
fi
"""
it "indents semicolon-style while loop", ->
expectPreservedIndentation """
while [ $x -gt 0 ]; do
x=$(($x-1))
done
"""
it "indents newline-style while loop", ->
expectPreservedIndentation """
while [ $x -gt 0 ]
do
x=$(($x-1))
done
"""
describe "firstLineMatch", ->
it "recognises interpreter directives", ->
valid = """
#!/bin/sh
#!/usr/sbin/env bash
#!/usr/bin/bash foo=bar/
#!/usr/sbin/ksh foo bar baz
#!/usr/bin/dash perl
#!/usr/bin/env bin/sh
#!/usr/bin/rc
#!/bin/env rc
#!/usr/bin/bash --script=usr/bin
#! /usr/bin/env A=003 B=149 C=150 D=xzd E=base64 F=tar G=gz H=head I=tail bash
#!\t/usr/bin/env --foo=bar bash --quu=quux
#! /usr/bin/bash
#!/usr/bin/env bash
"""
for line in valid.split /\n/
expect(grammar.firstLineRegex.findNextMatchSync(line)).not.toBeNull()
invalid = """
\x20#!/usr/sbin/bash
\t#!/usr/sbin/bash
#!/usr/bin/env-bash/node-env/
#!/usr/bin/env-bash
#! /usr/binbash
#! /usr/arc
#!\t/usr/bin/env --bash=bar
"""
for line in invalid.split /\n/
expect(grammar.firstLineRegex.findNextMatchSync(line)).toBeNull()
it "recognises Emacs modelines", ->
valid = """
#-*-shell-script-*-
#-*-mode:shell-script-*-
/* -*-sh-*- */
// -*- SHELL-SCRIPT -*-
/* -*- mode:shell-script -*- */
// -*- font:bar;mode:sh -*-
// -*- font:bar;mode:shell-script;foo:bar; -*-
// -*-font:mode;mode:SH-*-
// -*- foo:bar mode: sh bar:baz -*-
" -*-foo:bar;mode:sh;bar:foo-*- ";
" -*-font-mode:foo;mode:shell-script;foo-bar:quux-*-"
"-*-font:x;foo:bar; mode : sh;bar:foo;foooooo:baaaaar;fo:ba;-*-";
"-*- font:x;foo : bar ; mode : sH ; bar : foo ; foooooo:baaaaar;fo:ba-*-";
"""
for line in valid.split /\n/
expect(grammar.firstLineRegex.findNextMatchSync(line)).not.toBeNull()
invalid = """
/* --*sh-*- */
/* -*-- sh -*-
/* -*- -- sh -*-
/* -*- shell-scripts -;- -*-
// -*- SSSSSSSSSH -*-
// -*- SH; -*-
// -*- sh-stuff -*-
/* -*- model:sh -*-
/* -*- indent-mode:sh -*-
// -*- font:mode;SH -*-
// -*- mode: -*- SH
// -*- mode: secret-sh -*-
// -*-font:mode;mode:sh--*-
"""
for line in invalid.split /\n/
expect(grammar.firstLineRegex.findNextMatchSync(line)).toBeNull()
it "recognises Vim modelines", ->
valid = """
vim: se filetype=sh:
# vim: se ft=sh:
# vim: set ft=sh:
# vim: set filetype=sh:
# vim: ft=sh
# vim: syntax=sH
# vim: se syntax=SH:
# ex: syntax=SH
# vim:ft=sh
# vim600: ft=sh
# vim>600: set ft=sh:
# vi:noai:sw=3 ts=6 ft=sh
# vi::::::::::noai:::::::::::: ft=sh
# vim:ts=4:sts=4:sw=4:noexpandtab:ft=sh
# vi:: noai : : : : sw =3 ts =6 ft =sh
# vim: ts=4: pi sts=4: ft=sh: noexpandtab: sw=4:
# vim: ts=4 sts=4: ft=sh noexpandtab:
# vim:noexpandtab sts=4 ft=sh ts=4
# vim:noexpandtab:ft=sh
# vim:ts=4:sts=4 ft=sh:noexpandtab:\x20
# vim:noexpandtab titlestring=hi\|there\\\\ ft=sh ts=4
"""
for line in valid.split /\n/
expect(grammar.firstLineRegex.findNextMatchSync(line)).not.toBeNull()
invalid = """
ex: se filetype=sh:
_vi: se filetype=sh:
vi: se filetype=sh
# vim set ft=ssh
# vim: soft=sh
# vim: hairy-syntax=sh:
# vim set ft=sh:
# vim: setft=sh:
# vim: se ft=sh backupdir=tmp
# vim: set ft=sh set cmdheight=1
# vim:noexpandtab sts:4 ft:sh ts:4
# vim:noexpandtab titlestring=hi\\|there\\ ft=sh ts=4
# vim:noexpandtab titlestring=hi\\|there\\\\\\ ft=sh ts=4
"""
for line in invalid.split /\n/
expect(grammar.firstLineRegex.findNextMatchSync(line)).toBeNull()

Some files were not shown because too many files have changed in this diff Show More