From 477b075db5fe001a690128adb408ff1e17ac6c3f Mon Sep 17 00:00:00 2001 From: steven nguyen Date: Sat, 25 Jun 2022 19:58:57 +0000 Subject: [PATCH] =?UTF-8?q?=E2=9E=A1=EF=B8=8F=20Migrate=20all=20language?= =?UTF-8?q?=20packages?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit To make https://github.com/atom-community/atom/pull/386 reviewable, that pr will be separated into many simpler prs. This is one of them. This first commit does the following: - update package.json - update package-lock.json using `./script/build` which also seems to update `apm/package-lock.json` - update packages/README.md - clone all language packages. Specifically: - `mkdir packages/language-` - `cd packages/about` - For all languages: - `cd ../language-<>` - `git clone language-<>` - Move all files except `.git` from `language-<>/language-<>` to `language-<>` - delete `language-<>/language-<>` Since at first I accidentally updated `dependencies` then `packageDependencies`, it appears that since the versions of language-c, language-css, language-go, language-javascript, and language-sass don't match, `dependencies` was reset for those versions. [Those repos just happen to be precisely the ones that have tree-sitter v19](https://github.com/icecream17/atom-update-backlog/blob/main/Languages.md), (sans language-sass), which [currently breaks atom](https://github.com/atom/atom/issues/22129). So even though their repos are now in `packages`, **I've decided to not use them**. This is done by updating `packageDependencies` only for non-breaking languages. --- apm/package-lock.json | 570 +- package-lock.json | 152 +- package.json | 116 +- packages/README.md | 66 +- packages/language-c/.github/no-response.yml | 15 + .../language-c/.github/workflows/main.yml | 27 + packages/language-c/.gitignore | 1 + packages/language-c/CONTRIBUTING.md | 1 + packages/language-c/ISSUE_TEMPLATE.md | 40 + packages/language-c/LICENSE.md | 31 + packages/language-c/PULL_REQUEST_TEMPLATE.md | 28 + packages/language-c/README.md | 10 + packages/language-c/coffeelint.json | 37 + packages/language-c/grammars/c++.cson | 424 ++ packages/language-c/grammars/c.cson | 1799 ++++++ .../language-c/grammars/tree-sitter-c.cson | 185 + .../language-c/grammars/tree-sitter-cpp.cson | 251 + packages/language-c/lib/main.js | 12 + packages/language-c/package-lock.json | 398 ++ packages/language-c/package.json | 29 + packages/language-c/settings/language-c.cson | 19 + packages/language-c/snippets/language-c.cson | 113 + packages/language-c/spec/c-spec.coffee | 1140 ++++ packages/language-clojure/.coffeelintignore | 1 + .../language-clojure/.github/no-response.yml | 15 + .../language-clojure/.github/workflows/ci.yml | 23 + packages/language-clojure/.gitignore | 1 + packages/language-clojure/ISSUE_TEMPLATE.md | 40 + packages/language-clojure/LICENSE.md | 48 + .../language-clojure/PULL_REQUEST_TEMPLATE.md | 28 + packages/language-clojure/README.md | 9 + packages/language-clojure/coffeelint.json | 37 + .../language-clojure/grammars/clojure.cson | 398 ++ packages/language-clojure/package.json | 21 + .../settings/language-clojure.cson | 5 + .../snippets/language-clojure.cson | 111 + .../language-clojure/spec/clojure-spec.coffee | 391 ++ .../.github/no-response.yml | 15 + .../.github/workflows/ci.yml | 23 + packages/language-coffee-script/.gitignore | 2 + .../language-coffee-script/CONTRIBUTING.md | 1 + .../language-coffee-script/ISSUE_TEMPLATE.md | 40 + packages/language-coffee-script/LICENSE.md | 49 + .../PULL_REQUEST_TEMPLATE.md | 28 + packages/language-coffee-script/README.md | 9 + .../language-coffee-script/coffeelint.json | 37 + .../grammars/coffeescript (literate).cson | 724 +++ .../grammars/coffeescript.cson | 1235 ++++ .../language-coffee-script/package-lock.json | 156 + packages/language-coffee-script/package.json | 21 + .../settings/language-coffee-script.cson | 19 + .../snippets/language-coffee-script.cson | 89 + .../spec/coffee-script-literate-spec.coffee | 122 + .../spec/coffee-script-spec.coffee | 1489 +++++ packages/language-csharp/.coffeelintignore | 1 + .../language-csharp/.github/no-response.yml | 15 + .../language-csharp/.github/workflows/ci.yml | 23 + packages/language-csharp/.gitignore | 1 + packages/language-csharp/ISSUE_TEMPLATE.md | 40 + packages/language-csharp/LICENSE.md | 26 + .../language-csharp/PULL_REQUEST_TEMPLATE.md | 28 + packages/language-csharp/README.md | 9 + packages/language-csharp/grammars/cake.cson | 14 + packages/language-csharp/grammars/csharp.cson | 4195 ++++++++++++ packages/language-csharp/grammars/csx.cson | 14 + packages/language-csharp/package.json | 17 + packages/language-csharp/scripts/converter.py | 19 + .../settings/language-csharp.cson | 5 + .../snippets/language-csharp.cson | 142 + .../language-csharp/spec/grammar-spec.coffee | 17 + packages/language-css/.github/no-response.yml | 15 + .../language-css/.github/workflows/main.yml | 28 + packages/language-css/.gitignore | 1 + packages/language-css/CONTRIBUTING.md | 1 + packages/language-css/ISSUE_TEMPLATE.md | 40 + packages/language-css/LICENSE.md | 31 + .../language-css/PULL_REQUEST_TEMPLATE.md | 28 + packages/language-css/README.md | 10 + packages/language-css/coffeelint.json | 37 + packages/language-css/grammars/css.cson | 2135 +++++++ .../grammars/tree-sitter-css.cson | 107 + packages/language-css/package-lock.json | 169 + packages/language-css/package.json | 27 + .../language-css/settings/language-css.cson | 43 + .../language-css/snippets/language-css.cson | 51 + packages/language-css/spec/css-spec.coffee | 3638 +++++++++++ packages/language-gfm/.github/no-response.yml | 15 + .../language-gfm/.github/workflows/ci.yml | 23 + packages/language-gfm/.gitignore | 1 + packages/language-gfm/CONTRIBUTING.md | 1 + packages/language-gfm/ISSUE_TEMPLATE.md | 40 + packages/language-gfm/LICENSE.md | 20 + .../language-gfm/PULL_REQUEST_TEMPLATE.md | 28 + packages/language-gfm/README.md | 6 + packages/language-gfm/coffeelint.json | 37 + packages/language-gfm/grammars/gfm.json | 1926 ++++++ packages/language-gfm/package.json | 14 + packages/language-gfm/settings/gfm.cson | 5 + packages/language-gfm/snippets/gfm.cson | 42 + packages/language-gfm/spec/gfm-spec.coffee | 897 +++ packages/language-git/.coffeelintignore | 1 + packages/language-git/.github/no-response.yml | 15 + .../language-git/.github/workflows/ci.yml | 23 + packages/language-git/.gitignore | 4 + packages/language-git/CONTRIBUTING.md | 1 + packages/language-git/ISSUE_TEMPLATE.md | 40 + packages/language-git/LICENSE.md | 47 + .../language-git/PULL_REQUEST_TEMPLATE.md | 28 + packages/language-git/README.md | 32 + packages/language-git/coffeelint.json | 37 + .../grammars/git commit message.cson | 141 + .../language-git/grammars/git config.cson | 99 + .../grammars/git rebase message.cson | 38 + packages/language-git/package.json | 21 + .../language-git/settings/language-git.cson | 7 + .../language-git/snippets/language-git.cson | 8 + packages/language-git/spec/git-spec.coffee | 196 + packages/language-go/.coffeelintignore | 1 + packages/language-go/.github/no-response.yml | 15 + .../language-go/.github/workflows/main.yml | 28 + packages/language-go/.gitignore | 1 + packages/language-go/CONTRIBUTING.md | 1 + packages/language-go/ISSUE_TEMPLATE.md | 40 + packages/language-go/LICENSE.md | 72 + packages/language-go/PULL_REQUEST_TEMPLATE.md | 28 + packages/language-go/README.md | 8 + packages/language-go/coffeelint.json | 37 + packages/language-go/grammars/go.cson | 624 ++ packages/language-go/grammars/gohtml.cson | 13 + packages/language-go/grammars/gomod.cson | 55 + packages/language-go/grammars/gosum.cson | 17 + packages/language-go/grammars/gotemplate.cson | 101 + .../language-go/grammars/tree-sitter-go.cson | 130 + packages/language-go/package-lock.json | 380 ++ packages/language-go/package.json | 27 + .../language-go/settings/language-go.cson | 6 + .../language-go/snippets/language-go.cson | 140 + packages/language-go/spec/go-spec.coffee | 977 +++ .../language-go/spec/language-go-spec.coffee | 62 + packages/language-html/.coffeelintignore | 1 + .../language-html/.github/no-response.yml | 15 + .../language-html/.github/workflows/main.yml | 27 + packages/language-html/.gitignore | 1 + packages/language-html/CONTRIBUTING.md | 1 + packages/language-html/ISSUE_TEMPLATE.md | 40 + packages/language-html/LICENSE.md | 31 + .../language-html/PULL_REQUEST_TEMPLATE.md | 28 + packages/language-html/README.md | 10 + packages/language-html/coffeelint.json | 37 + packages/language-html/grammars/html.cson | 779 +++ .../grammars/tree-sitter-ejs.cson | 36 + .../grammars/tree-sitter-erb.cson | 36 + .../grammars/tree-sitter-html.cson | 56 + packages/language-html/lib/main.js | 39 + packages/language-html/package-lock.json | 202 + packages/language-html/package.json | 31 + .../language-html/settings/language-html.cson | 23 + .../language-html/snippets/language-html.cson | 667 ++ .../spec/fixtures/syntax_test_html.html | 27 + .../syntax_test_html_template_fragments.html | 40 + packages/language-html/spec/html-spec.coffee | 832 +++ .../language-html/spec/tree-sitter-spec.js | 81 + packages/language-hyperlink/.coffeelintignore | 1 + .../.github/no-response.yml | 15 + .../.github/workflows/ci.yml | 23 + packages/language-hyperlink/.gitignore | 1 + packages/language-hyperlink/CONTRIBUTING.md | 1 + packages/language-hyperlink/ISSUE_TEMPLATE.md | 40 + packages/language-hyperlink/LICENSE.md | 31 + .../PULL_REQUEST_TEMPLATE.md | 28 + packages/language-hyperlink/README.md | 11 + packages/language-hyperlink/coffeelint.json | 37 + .../grammars/hyperlink.cson | 16 + packages/language-hyperlink/package.json | 21 + .../spec/fixtures/test-grammar.cson | 10 + .../spec/hyperlink-spec.coffee | 92 + packages/language-java/.coffeelintignore | 1 + .../language-java/.github/no-response.yml | 15 + .../language-java/.github/workflows/build.yml | 88 + packages/language-java/.gitignore | 1 + packages/language-java/CONTRIBUTING.md | 1 + packages/language-java/ISSUE_TEMPLATE.md | 40 + packages/language-java/LICENSE.md | 31 + .../language-java/PULL_REQUEST_TEMPLATE.md | 28 + packages/language-java/README.md | 9 + packages/language-java/coffeelint.json | 37 + .../grammars/java server pages (jsp).cson | 893 +++ packages/language-java/grammars/java.cson | 1676 +++++ .../grammars/javaproperties.cson | 72 + .../grammars/junit test report.cson | 75 + .../grammars/tree-sitter-java.cson | 306 + .../unified expression language (el).cson | 88 + packages/language-java/package-lock.json | 169 + packages/language-java/package.json | 24 + .../language-java/settings/language-java.cson | 34 + .../language-java/snippets/language-java.cson | 134 + packages/language-java/spec/java-spec.coffee | 3142 +++++++++ .../spec/tree-sitter-java-spec.coffee | 1147 ++++ .../language-java/spec/unified-el-spec.coffee | 189 + .../.github/no-response.yml | 15 + .../.github/workflows/ci.yml | 45 + packages/language-javascript/.gitignore | 2 + packages/language-javascript/CONTRIBUTING.md | 1 + .../language-javascript/ISSUE_TEMPLATE.md | 40 + packages/language-javascript/LICENSE.md | 31 + .../PULL_REQUEST_TEMPLATE.md | 28 + packages/language-javascript/README.md | 12 + packages/language-javascript/appveyor.yml | 6 + packages/language-javascript/coffeelint.json | 37 + .../grammars/javascript.cson | 2028 ++++++ .../language-javascript/grammars/jsdoc.cson | 524 ++ ...r expression replacement (javascript).cson | 22 + .../regular expressions (javascript).cson | 123 + .../grammars/tree-sitter-javascript.cson | 262 + .../grammars/tree-sitter-jsdoc.cson | 16 + .../grammars/tree-sitter-regex.cson | 17 + packages/language-javascript/lib/main.js | 82 + packages/language-javascript/package.json | 30 + .../settings/language-javascript.cson | 13 + .../snippets/language-javascript.cson | 130 + .../spec/javascript-spec.coffee | 2501 ++++++++ .../spec/jsdoc-spec.coffee | 1537 +++++ ...regular-expression-replacement-spec.coffee | 87 + packages/language-json/.coffeelintignore | 1 + .../language-json/.github/no-response.yml | 15 + .../language-json/.github/workflows/main.yml | 27 + packages/language-json/.gitignore | 1 + packages/language-json/CONTRIBUTING.md | 1 + packages/language-json/ISSUE_TEMPLATE.md | 40 + packages/language-json/LICENSE.md | 31 + .../language-json/PULL_REQUEST_TEMPLATE.md | 28 + packages/language-json/README.md | 8 + packages/language-json/coffeelint.json | 37 + packages/language-json/grammars/json.cson | 165 + .../grammars/tree-sitter-json.cson | 76 + packages/language-json/package-lock.json | 169 + packages/language-json/package.json | 24 + .../language-json/settings/language-json.cson | 5 + packages/language-json/spec/json-spec.coffee | 110 + .../language-less/.github/no-response.yml | 15 + .../language-less/.github/workflows/ci.yml | 23 + packages/language-less/.gitignore | 1 + packages/language-less/CONTRIBUTING.md | 1 + packages/language-less/ISSUE_TEMPLATE.md | 40 + packages/language-less/LICENSE.md | 46 + .../language-less/PULL_REQUEST_TEMPLATE.md | 28 + packages/language-less/README.md | 10 + packages/language-less/coffeelint.json | 37 + packages/language-less/grammars/less.cson | 454 ++ packages/language-less/package.json | 24 + .../language-less/settings/language-less.cson | 487 ++ packages/language-less/spec/less-spec.coffee | 387 ++ packages/language-less/update.coffee | 49 + packages/language-make/.coffeelintignore | 1 + .../language-make/.github/no-response.yml | 15 + .../language-make/.github/workflows/ci.yml | 23 + packages/language-make/.gitignore | 1 + packages/language-make/CONTRIBUTING.md | 1 + packages/language-make/ISSUE_TEMPLATE.md | 40 + packages/language-make/LICENSE.md | 31 + .../language-make/PULL_REQUEST_TEMPLATE.md | 28 + packages/language-make/README.md | 10 + packages/language-make/coffeelint.json | 37 + packages/language-make/grammars/makefile.cson | 446 ++ packages/language-make/package.json | 21 + .../language-make/settings/language-make.cson | 5 + packages/language-make/spec/make-spec.coffee | 251 + packages/language-mustache/.coffeelintignore | 1 + .../language-mustache/.github/no-response.yml | 15 + .../.github/workflows/ci.yml | 23 + packages/language-mustache/.gitignore | 1 + packages/language-mustache/ISSUE_TEMPLATE.md | 40 + packages/language-mustache/LICENSE.md | 20 + .../PULL_REQUEST_TEMPLATE.md | 28 + packages/language-mustache/README.md | 8 + packages/language-mustache/coffeelint.json | 37 + .../language-mustache/grammars/mustache.cson | 114 + .../grammars/sql with mustaches.cson | 12 + packages/language-mustache/package.json | 21 + .../spec/mustache-spec.coffee | 104 + .../language-objective-c/.coffeelintignore | 1 + .../.github/no-response.yml | 15 + .../.github/workflows/ci.yml | 23 + packages/language-objective-c/.gitignore | 1 + packages/language-objective-c/CONTRIBUTING.md | 1 + .../language-objective-c/ISSUE_TEMPLATE.md | 40 + packages/language-objective-c/LICENSE.md | 31 + .../PULL_REQUEST_TEMPLATE.md | 28 + packages/language-objective-c/README.md | 10 + packages/language-objective-c/coffeelint.json | 37 + .../grammars/objective-c++.cson | 15 + .../grammars/objective-c.cson | 831 +++ .../grammars/strings file.cson | 44 + packages/language-objective-c/package.json | 21 + .../settings/language-objective-c.cson | 37 + .../snippets/language-objective-c.cson | 52 + .../spec/objective-c-spec.coffee | 49 + .../language-perl/.github/no-response.yml | 15 + .../language-perl/.github/workflows/ci.yml | 23 + packages/language-perl/.gitignore | 1 + packages/language-perl/CONTRIBUTING.md | 1 + packages/language-perl/ISSUE_TEMPLATE.md | 40 + packages/language-perl/LICENSE.md | 31 + .../language-perl/PULL_REQUEST_TEMPLATE.md | 28 + packages/language-perl/README.md | 8 + packages/language-perl/coffeelint.json | 37 + packages/language-perl/grammars/perl 6.cson | 344 + packages/language-perl/grammars/perl.cson | 2412 +++++++ packages/language-perl/package.json | 21 + .../language-perl/settings/language-perl.cson | 14 + .../language-perl/snippets/language-perl.cson | 49 + .../spec/grammar-perl6-spec.coffee | 293 + .../language-perl/spec/grammar-spec.coffee | 1543 +++++ packages/language-php/.coffeelintignore | 1 + packages/language-php/.github/no-response.yml | 15 + .../language-php/.github/workflows/main.yml | 27 + packages/language-php/.gitignore | 2 + packages/language-php/CONTRIBUTING.md | 1 + packages/language-php/ISSUE_TEMPLATE.md | 40 + packages/language-php/LICENSE.md | 31 + .../language-php/PULL_REQUEST_TEMPLATE.md | 28 + packages/language-php/README.md | 8 + packages/language-php/coffeelint.json | 37 + packages/language-php/grammars/html.cson | 114 + packages/language-php/grammars/php.cson | 4209 ++++++++++++ packages/language-php/package-lock.json | 156 + packages/language-php/package.json | 21 + .../language-php/settings/language-php.cson | 3347 ++++++++++ .../language-php/snippets/language-php.cson | 160 + packages/language-php/spec/html-spec.coffee | 296 + packages/language-php/spec/php-spec.coffee | 4155 ++++++++++++ .../language-property-list/.coffeelintignore | 1 + .../.github/no-response.yml | 15 + .../.github/workflows/ci.yml | 21 + packages/language-property-list/.gitignore | 1 + .../language-property-list/CONTRIBUTING.md | 1 + .../language-property-list/ISSUE_TEMPLATE.md | 40 + packages/language-property-list/LICENSE.md | 31 + .../PULL_REQUEST_TEMPLATE.md | 28 + packages/language-property-list/README.md | 10 + .../language-property-list/coffeelint.json | 37 + .../grammars/property list (old-style).cson | 287 + .../grammars/property list (xml).cson | 529 ++ packages/language-property-list/package.json | 21 + .../settings/language-property-list.cson | 6 + .../snippets/language-property-list.cson | 34 + packages/language-python/.coffeelintignore | 1 + .../language-python/.github/no-response.yml | 15 + .../language-python/.github/workflows/ci.yml | 51 + packages/language-python/.gitignore | 1 + packages/language-python/CONTRIBUTING.md | 1 + packages/language-python/ISSUE_TEMPLATE.md | 40 + packages/language-python/LICENSE.md | 31 + .../language-python/PULL_REQUEST_TEMPLATE.md | 28 + packages/language-python/README.md | 9 + packages/language-python/appveyor.yml | 6 + packages/language-python/coffeelint.json | 37 + .../grammars/python-console.cson | 18 + .../grammars/python-traceback.cson | 34 + packages/language-python/grammars/python.cson | 2301 +++++++ .../regular expressions (python).cson | 147 + .../grammars/tree-sitter-python.cson | 195 + packages/language-python/package-lock.json | 188 + packages/language-python/package.json | 28 + .../settings/language-python.cson | 9 + .../snippets/language-python.cson | 121 + .../fixtures/grammar/syntax_test_python.py | 14 + .../grammar/syntax_test_python_functions.py | 88 + .../grammar/syntax_test_python_lambdas.py | 41 + .../grammar/syntax_test_python_typing.py | 23 + .../spec/language-python-spec.coffee | 83 + .../spec/python-regex-spec.coffee | 52 + .../language-python/spec/python-spec.coffee | 759 +++ .../language-ruby-on-rails/.coffeelintignore | 1 + .../.github/no-response.yml | 15 + .../.github/workflows/ci.yml | 23 + packages/language-ruby-on-rails/.gitignore | 5 + .../language-ruby-on-rails/CONTRIBUTING.md | 1 + .../language-ruby-on-rails/ISSUE_TEMPLATE.md | 40 + packages/language-ruby-on-rails/LICENSE.md | 45 + .../PULL_REQUEST_TEMPLATE.md | 28 + packages/language-ruby-on-rails/README.md | 10 + .../language-ruby-on-rails/coffeelint.json | 37 + .../grammars/html (rails).cson | 42 + .../grammars/javascript (rails).cson | 40 + .../language-ruby-on-rails/grammars/rjs.cson | 10 + .../grammars/ruby on rails.cson | 194 + .../grammars/sql (rails).cson | 27 + packages/language-ruby-on-rails/package.json | 21 + .../snippets/language-ruby-on-rails.cson | 558 ++ .../spec/grammar-spec.coffee | 29 + .../spec/snippets-spec.coffee | 29 + packages/language-ruby/.coffeelintignore | 1 + .../language-ruby/.github/no-response.yml | 15 + .../language-ruby/.github/workflows/ci.yml | 51 + packages/language-ruby/.gitignore | 2 + packages/language-ruby/CONTRIBUTING.md | 1 + packages/language-ruby/ISSUE_TEMPLATE.md | 40 + packages/language-ruby/LICENSE.md | 31 + .../language-ruby/PULL_REQUEST_TEMPLATE.md | 28 + packages/language-ruby/README.md | 9 + packages/language-ruby/appveyor.yml | 6 + packages/language-ruby/coffeelint.json | 37 + packages/language-ruby/grammars/gemfile.cson | 23 + .../grammars/html (ruby - erb).cson | 121 + packages/language-ruby/grammars/ruby.cson | 2491 ++++++++ .../grammars/tree-sitter-ruby.cson | 256 + packages/language-ruby/lib/main.js | 15 + packages/language-ruby/package.json | 22 + .../language-ruby/settings/language-ruby.cson | 11 + .../language-ruby/snippets/language-ruby.cson | 351 + packages/language-ruby/spec/erb-spec.coffee | 28 + .../language-ruby/spec/gemfile-spec.coffee | 56 + packages/language-ruby/spec/ruby-spec.coffee | 1060 +++ .../language-ruby/spec/tree-sitter-spec.js | 133 + .../language-sass/.github/no-response.yml | 15 + .../language-sass/.github/workflows/main.yml | 27 + packages/language-sass/.gitignore | 3 + packages/language-sass/CONTRIBUTING.md | 1 + packages/language-sass/ISSUE_TEMPLATE.md | 40 + packages/language-sass/LICENSE.md | 47 + .../language-sass/PULL_REQUEST_TEMPLATE.md | 28 + packages/language-sass/README.md | 8 + packages/language-sass/coffeelint.json | 37 + packages/language-sass/grammars/sass.cson | 891 +++ packages/language-sass/grammars/sassdoc.cson | 409 ++ packages/language-sass/grammars/scss.cson | 1704 +++++ packages/language-sass/package-lock.json | 162 + packages/language-sass/package.json | 22 + .../language-sass/settings/language-sass.cson | 748 +++ .../language-sass/snippets/language-sass.cson | 54 + packages/language-sass/spec/sass-spec.coffee | 627 ++ .../language-sass/spec/sassdoc-spec.coffee | 62 + packages/language-sass/spec/scss-spec.coffee | 1126 ++++ packages/language-sass/spec/scss-spec.js | 19 + .../.github/no-response.yml | 15 + .../.github/workflows/main.yml | 28 + packages/language-shellscript/.gitignore | 1 + packages/language-shellscript/CONTRIBUTING.md | 1 + .../language-shellscript/ISSUE_TEMPLATE.md | 40 + packages/language-shellscript/LICENSE.md | 31 + .../PULL_REQUEST_TEMPLATE.md | 28 + packages/language-shellscript/README.md | 8 + packages/language-shellscript/coffeelint.json | 37 + .../grammars/shell-session.cson | 44 + .../grammars/shell-unix-bash.cson | 1137 ++++ .../grammars/tree-sitter-bash.cson | 144 + .../language-shellscript/package-lock.json | 566 ++ packages/language-shellscript/package.json | 27 + .../settings/language-shellscript.cson | 6 + .../snippets/language-shellscript.cson | 40 + .../spec/shell-session-spec.coffee | 75 + .../spec/shell-unix-bash-spec.coffee | 458 ++ packages/language-source/.coffeelintignore | 1 + .../language-source/.github/no-response.yml | 15 + .../language-source/.github/workflows/ci.yml | 21 + packages/language-source/.gitignore | 1 + packages/language-source/CONTRIBUTING.md | 1 + packages/language-source/ISSUE_TEMPLATE.md | 40 + packages/language-source/LICENSE.md | 31 + .../language-source/PULL_REQUEST_TEMPLATE.md | 28 + packages/language-source/README.md | 10 + packages/language-source/coffeelint.json | 37 + packages/language-source/package.json | 21 + .../settings/language-source.cson | 6 + packages/language-sql/.coffeelintignore | 1 + packages/language-sql/.github/no-response.yml | 15 + .../language-sql/.github/workflows/ci.yml | 23 + packages/language-sql/.gitignore | 1 + packages/language-sql/CONTRIBUTING.md | 1 + packages/language-sql/ISSUE_TEMPLATE.md | 40 + packages/language-sql/LICENSE.md | 31 + .../language-sql/PULL_REQUEST_TEMPLATE.md | 28 + packages/language-sql/README.md | 8 + packages/language-sql/coffeelint.json | 37 + packages/language-sql/grammars/sql.cson | 451 ++ packages/language-sql/package.json | 21 + .../language-sql/settings/language-sql.cson | 6 + .../language-sql/spec/grammar-spec.coffee | 232 + packages/language-text/.coffeelintignore | 1 + .../language-text/.github/no-response.yml | 15 + .../language-text/.github/workflows/ci.yml | 23 + packages/language-text/.gitignore | 1 + packages/language-text/CONTRIBUTING.md | 1 + packages/language-text/ISSUE_TEMPLATE.md | 40 + packages/language-text/LICENSE.md | 31 + .../language-text/PULL_REQUEST_TEMPLATE.md | 28 + packages/language-text/README.md | 11 + packages/language-text/coffeelint.json | 37 + .../language-text/grammars/plain text.cson | 33 + packages/language-text/package.json | 21 + .../language-text/snippets/language-text.cson | 7 + .../language-text/spec/plain-text-spec.coffee | 13 + .../language-todo/.github/no-response.yml | 15 + .../language-todo/.github/workflows/ci.yml | 23 + packages/language-todo/.gitignore | 1 + packages/language-todo/CONTRIBUTING.md | 1 + packages/language-todo/ISSUE_TEMPLATE.md | 40 + packages/language-todo/LICENSE.md | 31 + .../language-todo/PULL_REQUEST_TEMPLATE.md | 28 + packages/language-todo/README.md | 9 + packages/language-todo/coffeelint.json | 37 + packages/language-todo/grammars/todo.cson | 15 + packages/language-todo/package.json | 21 + packages/language-todo/snippets/todo.cson | 397 ++ packages/language-todo/spec/todo-spec.coffee | 13 + packages/language-toml/.coffeelintignore | 1 + .../language-toml/.github/no-response.yml | 15 + .../language-toml/.github/workflows/ci.yml | 23 + packages/language-toml/.gitignore | 1 + packages/language-toml/CONTRIBUTING.md | 1 + packages/language-toml/ISSUE_TEMPLATE.md | 40 + packages/language-toml/LICENSE.md | 20 + .../language-toml/PULL_REQUEST_TEMPLATE.md | 28 + packages/language-toml/README.md | 8 + packages/language-toml/coffeelint.json | 37 + packages/language-toml/grammars/toml.cson | 295 + packages/language-toml/package-lock.json | 156 + packages/language-toml/package.json | 13 + .../language-toml/settings/language-toml.cson | 3 + packages/language-toml/spec/toml-spec.coffee | 348 + .../.github/workflows/ci.yml | 21 + packages/language-typescript/.gitignore | 1 + packages/language-typescript/CONTRIBUTING.md | 1 + .../language-typescript/ISSUE_TEMPLATE.md | 40 + packages/language-typescript/LICENSE.md | 28 + .../PULL_REQUEST_TEMPLATE.md | 28 + packages/language-typescript/README.md | 8 + .../grammars/TypeScript.json | 5408 ++++++++++++++++ .../grammars/TypeScriptReact.json | 5661 +++++++++++++++++ .../grammars/tree-sitter-flow.cson | 262 + .../grammars/tree-sitter-tsx.cson | 254 + .../grammars/tree-sitter-typescript.cson | 234 + packages/language-typescript/lib/main.js | 62 + .../language-typescript/package-lock.json | 21 + packages/language-typescript/package.json | 25 + .../settings/TypeScript.cson | 12 + .../settings/TypeScriptReact.cson | 19 + .../snippets/language-typescript.cson | 222 + packages/language-xml/.coffeelintignore | 1 + packages/language-xml/.github/no-response.yml | 15 + .../language-xml/.github/workflows/ci.yml | 23 + packages/language-xml/.gitignore | 1 + packages/language-xml/CONTRIBUTING.md | 1 + packages/language-xml/ISSUE_TEMPLATE.md | 40 + packages/language-xml/LICENSE.md | 31 + .../language-xml/PULL_REQUEST_TEMPLATE.md | 28 + packages/language-xml/README.md | 8 + packages/language-xml/coffeelint.json | 37 + packages/language-xml/grammars/xml.cson | 428 ++ packages/language-xml/grammars/xsl.cson | 68 + packages/language-xml/package.json | 21 + .../language-xml/settings/language-xml.cson | 7 + .../language-xml/snippets/language-xml.cson | 16 + packages/language-xml/spec/xml-spec.coffee | 212 + packages/language-yaml/.coffeelintignore | 1 + .../language-yaml/.github/no-response.yml | 15 + .../language-yaml/.github/workflows/ci.yml | 23 + packages/language-yaml/.gitignore | 1 + packages/language-yaml/CONTRIBUTING.md | 1 + packages/language-yaml/ISSUE_TEMPLATE.md | 40 + packages/language-yaml/LICENSE.md | 31 + .../language-yaml/PULL_REQUEST_TEMPLATE.md | 28 + packages/language-yaml/README.md | 10 + packages/language-yaml/coffeelint.json | 37 + packages/language-yaml/grammars/yaml.cson | 405 ++ packages/language-yaml/package.json | 21 + .../language-yaml/settings/language-yaml.cson | 8 + .../language-yaml/spec/fixtures/cloud.config | 6 + packages/language-yaml/spec/yaml-spec.coffee | 1004 +++ 570 files changed, 107088 insertions(+), 526 deletions(-) create mode 100644 packages/language-c/.github/no-response.yml create mode 100644 packages/language-c/.github/workflows/main.yml create mode 100644 packages/language-c/.gitignore create mode 100644 packages/language-c/CONTRIBUTING.md create mode 100644 packages/language-c/ISSUE_TEMPLATE.md create mode 100644 packages/language-c/LICENSE.md create mode 100644 packages/language-c/PULL_REQUEST_TEMPLATE.md create mode 100644 packages/language-c/README.md create mode 100644 packages/language-c/coffeelint.json create mode 100644 packages/language-c/grammars/c++.cson create mode 100644 packages/language-c/grammars/c.cson create mode 100644 packages/language-c/grammars/tree-sitter-c.cson create mode 100644 packages/language-c/grammars/tree-sitter-cpp.cson create mode 100644 packages/language-c/lib/main.js create mode 100644 packages/language-c/package-lock.json create mode 100644 packages/language-c/package.json create mode 100644 packages/language-c/settings/language-c.cson create mode 100644 packages/language-c/snippets/language-c.cson create mode 100644 packages/language-c/spec/c-spec.coffee create mode 100644 packages/language-clojure/.coffeelintignore create mode 100644 packages/language-clojure/.github/no-response.yml create mode 100644 packages/language-clojure/.github/workflows/ci.yml create mode 100644 packages/language-clojure/.gitignore create mode 100644 packages/language-clojure/ISSUE_TEMPLATE.md create mode 100644 packages/language-clojure/LICENSE.md create mode 100644 packages/language-clojure/PULL_REQUEST_TEMPLATE.md create mode 100644 packages/language-clojure/README.md create mode 100644 packages/language-clojure/coffeelint.json create mode 100644 packages/language-clojure/grammars/clojure.cson create mode 100644 packages/language-clojure/package.json create mode 100644 packages/language-clojure/settings/language-clojure.cson create mode 100644 packages/language-clojure/snippets/language-clojure.cson create mode 100644 packages/language-clojure/spec/clojure-spec.coffee create mode 100644 packages/language-coffee-script/.github/no-response.yml create mode 100644 packages/language-coffee-script/.github/workflows/ci.yml create mode 100644 packages/language-coffee-script/.gitignore create mode 100644 packages/language-coffee-script/CONTRIBUTING.md create mode 100644 packages/language-coffee-script/ISSUE_TEMPLATE.md create mode 100644 packages/language-coffee-script/LICENSE.md create mode 100644 packages/language-coffee-script/PULL_REQUEST_TEMPLATE.md create mode 100644 packages/language-coffee-script/README.md create mode 100644 packages/language-coffee-script/coffeelint.json create mode 100644 packages/language-coffee-script/grammars/coffeescript (literate).cson create mode 100644 packages/language-coffee-script/grammars/coffeescript.cson create mode 100644 packages/language-coffee-script/package-lock.json create mode 100644 packages/language-coffee-script/package.json create mode 100644 packages/language-coffee-script/settings/language-coffee-script.cson create mode 100644 packages/language-coffee-script/snippets/language-coffee-script.cson create mode 100644 packages/language-coffee-script/spec/coffee-script-literate-spec.coffee create mode 100644 packages/language-coffee-script/spec/coffee-script-spec.coffee create mode 100644 packages/language-csharp/.coffeelintignore create mode 100644 packages/language-csharp/.github/no-response.yml create mode 100644 packages/language-csharp/.github/workflows/ci.yml create mode 100644 packages/language-csharp/.gitignore create mode 100644 packages/language-csharp/ISSUE_TEMPLATE.md create mode 100644 packages/language-csharp/LICENSE.md create mode 100644 packages/language-csharp/PULL_REQUEST_TEMPLATE.md create mode 100644 packages/language-csharp/README.md create mode 100644 packages/language-csharp/grammars/cake.cson create mode 100644 packages/language-csharp/grammars/csharp.cson create mode 100644 packages/language-csharp/grammars/csx.cson create mode 100644 packages/language-csharp/package.json create mode 100644 packages/language-csharp/scripts/converter.py create mode 100644 packages/language-csharp/settings/language-csharp.cson create mode 100644 packages/language-csharp/snippets/language-csharp.cson create mode 100644 packages/language-csharp/spec/grammar-spec.coffee create mode 100644 packages/language-css/.github/no-response.yml create mode 100644 packages/language-css/.github/workflows/main.yml create mode 100644 packages/language-css/.gitignore create mode 100644 packages/language-css/CONTRIBUTING.md create mode 100644 packages/language-css/ISSUE_TEMPLATE.md create mode 100644 packages/language-css/LICENSE.md create mode 100644 packages/language-css/PULL_REQUEST_TEMPLATE.md create mode 100644 packages/language-css/README.md create mode 100644 packages/language-css/coffeelint.json create mode 100644 packages/language-css/grammars/css.cson create mode 100644 packages/language-css/grammars/tree-sitter-css.cson create mode 100644 packages/language-css/package-lock.json create mode 100644 packages/language-css/package.json create mode 100644 packages/language-css/settings/language-css.cson create mode 100644 packages/language-css/snippets/language-css.cson create mode 100644 packages/language-css/spec/css-spec.coffee create mode 100644 packages/language-gfm/.github/no-response.yml create mode 100644 packages/language-gfm/.github/workflows/ci.yml create mode 100644 packages/language-gfm/.gitignore create mode 100644 packages/language-gfm/CONTRIBUTING.md create mode 100644 packages/language-gfm/ISSUE_TEMPLATE.md create mode 100644 packages/language-gfm/LICENSE.md create mode 100644 packages/language-gfm/PULL_REQUEST_TEMPLATE.md create mode 100644 packages/language-gfm/README.md create mode 100644 packages/language-gfm/coffeelint.json create mode 100644 packages/language-gfm/grammars/gfm.json create mode 100644 packages/language-gfm/package.json create mode 100644 packages/language-gfm/settings/gfm.cson create mode 100644 packages/language-gfm/snippets/gfm.cson create mode 100644 packages/language-gfm/spec/gfm-spec.coffee create mode 100644 packages/language-git/.coffeelintignore create mode 100644 packages/language-git/.github/no-response.yml create mode 100644 packages/language-git/.github/workflows/ci.yml create mode 100644 packages/language-git/.gitignore create mode 100644 packages/language-git/CONTRIBUTING.md create mode 100644 packages/language-git/ISSUE_TEMPLATE.md create mode 100644 packages/language-git/LICENSE.md create mode 100644 packages/language-git/PULL_REQUEST_TEMPLATE.md create mode 100644 packages/language-git/README.md create mode 100644 packages/language-git/coffeelint.json create mode 100644 packages/language-git/grammars/git commit message.cson create mode 100644 packages/language-git/grammars/git config.cson create mode 100644 packages/language-git/grammars/git rebase message.cson create mode 100644 packages/language-git/package.json create mode 100644 packages/language-git/settings/language-git.cson create mode 100644 packages/language-git/snippets/language-git.cson create mode 100644 packages/language-git/spec/git-spec.coffee create mode 100644 packages/language-go/.coffeelintignore create mode 100644 packages/language-go/.github/no-response.yml create mode 100644 packages/language-go/.github/workflows/main.yml create mode 100644 packages/language-go/.gitignore create mode 100644 packages/language-go/CONTRIBUTING.md create mode 100644 packages/language-go/ISSUE_TEMPLATE.md create mode 100644 packages/language-go/LICENSE.md create mode 100644 packages/language-go/PULL_REQUEST_TEMPLATE.md create mode 100644 packages/language-go/README.md create mode 100644 packages/language-go/coffeelint.json create mode 100644 packages/language-go/grammars/go.cson create mode 100644 packages/language-go/grammars/gohtml.cson create mode 100644 packages/language-go/grammars/gomod.cson create mode 100644 packages/language-go/grammars/gosum.cson create mode 100644 packages/language-go/grammars/gotemplate.cson create mode 100644 packages/language-go/grammars/tree-sitter-go.cson create mode 100644 packages/language-go/package-lock.json create mode 100644 packages/language-go/package.json create mode 100644 packages/language-go/settings/language-go.cson create mode 100644 packages/language-go/snippets/language-go.cson create mode 100644 packages/language-go/spec/go-spec.coffee create mode 100644 packages/language-go/spec/language-go-spec.coffee create mode 100644 packages/language-html/.coffeelintignore create mode 100644 packages/language-html/.github/no-response.yml create mode 100644 packages/language-html/.github/workflows/main.yml create mode 100644 packages/language-html/.gitignore create mode 100644 packages/language-html/CONTRIBUTING.md create mode 100644 packages/language-html/ISSUE_TEMPLATE.md create mode 100644 packages/language-html/LICENSE.md create mode 100644 packages/language-html/PULL_REQUEST_TEMPLATE.md create mode 100644 packages/language-html/README.md create mode 100644 packages/language-html/coffeelint.json create mode 100644 packages/language-html/grammars/html.cson create mode 100644 packages/language-html/grammars/tree-sitter-ejs.cson create mode 100644 packages/language-html/grammars/tree-sitter-erb.cson create mode 100644 packages/language-html/grammars/tree-sitter-html.cson create mode 100644 packages/language-html/lib/main.js create mode 100644 packages/language-html/package-lock.json create mode 100644 packages/language-html/package.json create mode 100644 packages/language-html/settings/language-html.cson create mode 100644 packages/language-html/snippets/language-html.cson create mode 100644 packages/language-html/spec/fixtures/syntax_test_html.html create mode 100644 packages/language-html/spec/fixtures/syntax_test_html_template_fragments.html create mode 100644 packages/language-html/spec/html-spec.coffee create mode 100644 packages/language-html/spec/tree-sitter-spec.js create mode 100644 packages/language-hyperlink/.coffeelintignore create mode 100644 packages/language-hyperlink/.github/no-response.yml create mode 100644 packages/language-hyperlink/.github/workflows/ci.yml create mode 100644 packages/language-hyperlink/.gitignore create mode 100644 packages/language-hyperlink/CONTRIBUTING.md create mode 100644 packages/language-hyperlink/ISSUE_TEMPLATE.md create mode 100644 packages/language-hyperlink/LICENSE.md create mode 100644 packages/language-hyperlink/PULL_REQUEST_TEMPLATE.md create mode 100644 packages/language-hyperlink/README.md create mode 100644 packages/language-hyperlink/coffeelint.json create mode 100644 packages/language-hyperlink/grammars/hyperlink.cson create mode 100644 packages/language-hyperlink/package.json create mode 100644 packages/language-hyperlink/spec/fixtures/test-grammar.cson create mode 100644 packages/language-hyperlink/spec/hyperlink-spec.coffee create mode 100644 packages/language-java/.coffeelintignore create mode 100644 packages/language-java/.github/no-response.yml create mode 100644 packages/language-java/.github/workflows/build.yml create mode 100644 packages/language-java/.gitignore create mode 100644 packages/language-java/CONTRIBUTING.md create mode 100644 packages/language-java/ISSUE_TEMPLATE.md create mode 100644 packages/language-java/LICENSE.md create mode 100644 packages/language-java/PULL_REQUEST_TEMPLATE.md create mode 100644 packages/language-java/README.md create mode 100644 packages/language-java/coffeelint.json create mode 100644 packages/language-java/grammars/java server pages (jsp).cson create mode 100644 packages/language-java/grammars/java.cson create mode 100644 packages/language-java/grammars/javaproperties.cson create mode 100644 packages/language-java/grammars/junit test report.cson create mode 100644 packages/language-java/grammars/tree-sitter-java.cson create mode 100644 packages/language-java/grammars/unified expression language (el).cson create mode 100644 packages/language-java/package-lock.json create mode 100644 packages/language-java/package.json create mode 100644 packages/language-java/settings/language-java.cson create mode 100644 packages/language-java/snippets/language-java.cson create mode 100644 packages/language-java/spec/java-spec.coffee create mode 100644 packages/language-java/spec/tree-sitter-java-spec.coffee create mode 100644 packages/language-java/spec/unified-el-spec.coffee create mode 100644 packages/language-javascript/.github/no-response.yml create mode 100644 packages/language-javascript/.github/workflows/ci.yml create mode 100644 packages/language-javascript/.gitignore create mode 100644 packages/language-javascript/CONTRIBUTING.md create mode 100644 packages/language-javascript/ISSUE_TEMPLATE.md create mode 100644 packages/language-javascript/LICENSE.md create mode 100644 packages/language-javascript/PULL_REQUEST_TEMPLATE.md create mode 100644 packages/language-javascript/README.md create mode 100644 packages/language-javascript/appveyor.yml create mode 100644 packages/language-javascript/coffeelint.json create mode 100644 packages/language-javascript/grammars/javascript.cson create mode 100644 packages/language-javascript/grammars/jsdoc.cson create mode 100644 packages/language-javascript/grammars/regular expression replacement (javascript).cson create mode 100644 packages/language-javascript/grammars/regular expressions (javascript).cson create mode 100644 packages/language-javascript/grammars/tree-sitter-javascript.cson create mode 100644 packages/language-javascript/grammars/tree-sitter-jsdoc.cson create mode 100644 packages/language-javascript/grammars/tree-sitter-regex.cson create mode 100644 packages/language-javascript/lib/main.js create mode 100644 packages/language-javascript/package.json create mode 100644 packages/language-javascript/settings/language-javascript.cson create mode 100644 packages/language-javascript/snippets/language-javascript.cson create mode 100644 packages/language-javascript/spec/javascript-spec.coffee create mode 100644 packages/language-javascript/spec/jsdoc-spec.coffee create mode 100644 packages/language-javascript/spec/regular-expression-replacement-spec.coffee create mode 100644 packages/language-json/.coffeelintignore create mode 100644 packages/language-json/.github/no-response.yml create mode 100644 packages/language-json/.github/workflows/main.yml create mode 100644 packages/language-json/.gitignore create mode 100644 packages/language-json/CONTRIBUTING.md create mode 100644 packages/language-json/ISSUE_TEMPLATE.md create mode 100644 packages/language-json/LICENSE.md create mode 100644 packages/language-json/PULL_REQUEST_TEMPLATE.md create mode 100644 packages/language-json/README.md create mode 100644 packages/language-json/coffeelint.json create mode 100644 packages/language-json/grammars/json.cson create mode 100644 packages/language-json/grammars/tree-sitter-json.cson create mode 100644 packages/language-json/package-lock.json create mode 100644 packages/language-json/package.json create mode 100644 packages/language-json/settings/language-json.cson create mode 100644 packages/language-json/spec/json-spec.coffee create mode 100644 packages/language-less/.github/no-response.yml create mode 100644 packages/language-less/.github/workflows/ci.yml create mode 100644 packages/language-less/.gitignore create mode 100644 packages/language-less/CONTRIBUTING.md create mode 100644 packages/language-less/ISSUE_TEMPLATE.md create mode 100644 packages/language-less/LICENSE.md create mode 100644 packages/language-less/PULL_REQUEST_TEMPLATE.md create mode 100644 packages/language-less/README.md create mode 100644 packages/language-less/coffeelint.json create mode 100644 packages/language-less/grammars/less.cson create mode 100644 packages/language-less/package.json create mode 100644 packages/language-less/settings/language-less.cson create mode 100644 packages/language-less/spec/less-spec.coffee create mode 100644 packages/language-less/update.coffee create mode 100644 packages/language-make/.coffeelintignore create mode 100644 packages/language-make/.github/no-response.yml create mode 100644 packages/language-make/.github/workflows/ci.yml create mode 100644 packages/language-make/.gitignore create mode 100644 packages/language-make/CONTRIBUTING.md create mode 100644 packages/language-make/ISSUE_TEMPLATE.md create mode 100644 packages/language-make/LICENSE.md create mode 100644 packages/language-make/PULL_REQUEST_TEMPLATE.md create mode 100644 packages/language-make/README.md create mode 100644 packages/language-make/coffeelint.json create mode 100644 packages/language-make/grammars/makefile.cson create mode 100644 packages/language-make/package.json create mode 100644 packages/language-make/settings/language-make.cson create mode 100644 packages/language-make/spec/make-spec.coffee create mode 100644 packages/language-mustache/.coffeelintignore create mode 100644 packages/language-mustache/.github/no-response.yml create mode 100644 packages/language-mustache/.github/workflows/ci.yml create mode 100644 packages/language-mustache/.gitignore create mode 100644 packages/language-mustache/ISSUE_TEMPLATE.md create mode 100644 packages/language-mustache/LICENSE.md create mode 100644 packages/language-mustache/PULL_REQUEST_TEMPLATE.md create mode 100644 packages/language-mustache/README.md create mode 100644 packages/language-mustache/coffeelint.json create mode 100644 packages/language-mustache/grammars/mustache.cson create mode 100644 packages/language-mustache/grammars/sql with mustaches.cson create mode 100644 packages/language-mustache/package.json create mode 100644 packages/language-mustache/spec/mustache-spec.coffee create mode 100644 packages/language-objective-c/.coffeelintignore create mode 100644 packages/language-objective-c/.github/no-response.yml create mode 100644 packages/language-objective-c/.github/workflows/ci.yml create mode 100644 packages/language-objective-c/.gitignore create mode 100644 packages/language-objective-c/CONTRIBUTING.md create mode 100644 packages/language-objective-c/ISSUE_TEMPLATE.md create mode 100644 packages/language-objective-c/LICENSE.md create mode 100644 packages/language-objective-c/PULL_REQUEST_TEMPLATE.md create mode 100644 packages/language-objective-c/README.md create mode 100644 packages/language-objective-c/coffeelint.json create mode 100644 packages/language-objective-c/grammars/objective-c++.cson create mode 100644 packages/language-objective-c/grammars/objective-c.cson create mode 100644 packages/language-objective-c/grammars/strings file.cson create mode 100644 packages/language-objective-c/package.json create mode 100644 packages/language-objective-c/settings/language-objective-c.cson create mode 100644 packages/language-objective-c/snippets/language-objective-c.cson create mode 100644 packages/language-objective-c/spec/objective-c-spec.coffee create mode 100644 packages/language-perl/.github/no-response.yml create mode 100644 packages/language-perl/.github/workflows/ci.yml create mode 100644 packages/language-perl/.gitignore create mode 100644 packages/language-perl/CONTRIBUTING.md create mode 100644 packages/language-perl/ISSUE_TEMPLATE.md create mode 100644 packages/language-perl/LICENSE.md create mode 100644 packages/language-perl/PULL_REQUEST_TEMPLATE.md create mode 100644 packages/language-perl/README.md create mode 100644 packages/language-perl/coffeelint.json create mode 100644 packages/language-perl/grammars/perl 6.cson create mode 100644 packages/language-perl/grammars/perl.cson create mode 100644 packages/language-perl/package.json create mode 100644 packages/language-perl/settings/language-perl.cson create mode 100644 packages/language-perl/snippets/language-perl.cson create mode 100644 packages/language-perl/spec/grammar-perl6-spec.coffee create mode 100644 packages/language-perl/spec/grammar-spec.coffee create mode 100644 packages/language-php/.coffeelintignore create mode 100644 packages/language-php/.github/no-response.yml create mode 100644 packages/language-php/.github/workflows/main.yml create mode 100644 packages/language-php/.gitignore create mode 100644 packages/language-php/CONTRIBUTING.md create mode 100644 packages/language-php/ISSUE_TEMPLATE.md create mode 100644 packages/language-php/LICENSE.md create mode 100644 packages/language-php/PULL_REQUEST_TEMPLATE.md create mode 100644 packages/language-php/README.md create mode 100644 packages/language-php/coffeelint.json create mode 100644 packages/language-php/grammars/html.cson create mode 100644 packages/language-php/grammars/php.cson create mode 100644 packages/language-php/package-lock.json create mode 100644 packages/language-php/package.json create mode 100644 packages/language-php/settings/language-php.cson create mode 100644 packages/language-php/snippets/language-php.cson create mode 100644 packages/language-php/spec/html-spec.coffee create mode 100644 packages/language-php/spec/php-spec.coffee create mode 100644 packages/language-property-list/.coffeelintignore create mode 100644 packages/language-property-list/.github/no-response.yml create mode 100644 packages/language-property-list/.github/workflows/ci.yml create mode 100644 packages/language-property-list/.gitignore create mode 100644 packages/language-property-list/CONTRIBUTING.md create mode 100644 packages/language-property-list/ISSUE_TEMPLATE.md create mode 100644 packages/language-property-list/LICENSE.md create mode 100644 packages/language-property-list/PULL_REQUEST_TEMPLATE.md create mode 100644 packages/language-property-list/README.md create mode 100644 packages/language-property-list/coffeelint.json create mode 100644 packages/language-property-list/grammars/property list (old-style).cson create mode 100644 packages/language-property-list/grammars/property list (xml).cson create mode 100644 packages/language-property-list/package.json create mode 100644 packages/language-property-list/settings/language-property-list.cson create mode 100644 packages/language-property-list/snippets/language-property-list.cson create mode 100644 packages/language-python/.coffeelintignore create mode 100644 packages/language-python/.github/no-response.yml create mode 100644 packages/language-python/.github/workflows/ci.yml create mode 100644 packages/language-python/.gitignore create mode 100644 packages/language-python/CONTRIBUTING.md create mode 100644 packages/language-python/ISSUE_TEMPLATE.md create mode 100644 packages/language-python/LICENSE.md create mode 100644 packages/language-python/PULL_REQUEST_TEMPLATE.md create mode 100644 packages/language-python/README.md create mode 100644 packages/language-python/appveyor.yml create mode 100644 packages/language-python/coffeelint.json create mode 100644 packages/language-python/grammars/python-console.cson create mode 100644 packages/language-python/grammars/python-traceback.cson create mode 100644 packages/language-python/grammars/python.cson create mode 100644 packages/language-python/grammars/regular expressions (python).cson create mode 100644 packages/language-python/grammars/tree-sitter-python.cson create mode 100644 packages/language-python/package-lock.json create mode 100644 packages/language-python/package.json create mode 100644 packages/language-python/settings/language-python.cson create mode 100644 packages/language-python/snippets/language-python.cson create mode 100644 packages/language-python/spec/fixtures/grammar/syntax_test_python.py create mode 100644 packages/language-python/spec/fixtures/grammar/syntax_test_python_functions.py create mode 100644 packages/language-python/spec/fixtures/grammar/syntax_test_python_lambdas.py create mode 100644 packages/language-python/spec/fixtures/grammar/syntax_test_python_typing.py create mode 100644 packages/language-python/spec/language-python-spec.coffee create mode 100644 packages/language-python/spec/python-regex-spec.coffee create mode 100644 packages/language-python/spec/python-spec.coffee create mode 100644 packages/language-ruby-on-rails/.coffeelintignore create mode 100644 packages/language-ruby-on-rails/.github/no-response.yml create mode 100644 packages/language-ruby-on-rails/.github/workflows/ci.yml create mode 100644 packages/language-ruby-on-rails/.gitignore create mode 100644 packages/language-ruby-on-rails/CONTRIBUTING.md create mode 100644 packages/language-ruby-on-rails/ISSUE_TEMPLATE.md create mode 100644 packages/language-ruby-on-rails/LICENSE.md create mode 100644 packages/language-ruby-on-rails/PULL_REQUEST_TEMPLATE.md create mode 100644 packages/language-ruby-on-rails/README.md create mode 100644 packages/language-ruby-on-rails/coffeelint.json create mode 100644 packages/language-ruby-on-rails/grammars/html (rails).cson create mode 100644 packages/language-ruby-on-rails/grammars/javascript (rails).cson create mode 100644 packages/language-ruby-on-rails/grammars/rjs.cson create mode 100644 packages/language-ruby-on-rails/grammars/ruby on rails.cson create mode 100644 packages/language-ruby-on-rails/grammars/sql (rails).cson create mode 100644 packages/language-ruby-on-rails/package.json create mode 100644 packages/language-ruby-on-rails/snippets/language-ruby-on-rails.cson create mode 100644 packages/language-ruby-on-rails/spec/grammar-spec.coffee create mode 100644 packages/language-ruby-on-rails/spec/snippets-spec.coffee create mode 100644 packages/language-ruby/.coffeelintignore create mode 100644 packages/language-ruby/.github/no-response.yml create mode 100644 packages/language-ruby/.github/workflows/ci.yml create mode 100644 packages/language-ruby/.gitignore create mode 100644 packages/language-ruby/CONTRIBUTING.md create mode 100644 packages/language-ruby/ISSUE_TEMPLATE.md create mode 100644 packages/language-ruby/LICENSE.md create mode 100644 packages/language-ruby/PULL_REQUEST_TEMPLATE.md create mode 100644 packages/language-ruby/README.md create mode 100644 packages/language-ruby/appveyor.yml create mode 100644 packages/language-ruby/coffeelint.json create mode 100644 packages/language-ruby/grammars/gemfile.cson create mode 100644 packages/language-ruby/grammars/html (ruby - erb).cson create mode 100644 packages/language-ruby/grammars/ruby.cson create mode 100644 packages/language-ruby/grammars/tree-sitter-ruby.cson create mode 100644 packages/language-ruby/lib/main.js create mode 100644 packages/language-ruby/package.json create mode 100644 packages/language-ruby/settings/language-ruby.cson create mode 100644 packages/language-ruby/snippets/language-ruby.cson create mode 100644 packages/language-ruby/spec/erb-spec.coffee create mode 100644 packages/language-ruby/spec/gemfile-spec.coffee create mode 100644 packages/language-ruby/spec/ruby-spec.coffee create mode 100644 packages/language-ruby/spec/tree-sitter-spec.js create mode 100644 packages/language-sass/.github/no-response.yml create mode 100644 packages/language-sass/.github/workflows/main.yml create mode 100644 packages/language-sass/.gitignore create mode 100644 packages/language-sass/CONTRIBUTING.md create mode 100644 packages/language-sass/ISSUE_TEMPLATE.md create mode 100644 packages/language-sass/LICENSE.md create mode 100644 packages/language-sass/PULL_REQUEST_TEMPLATE.md create mode 100644 packages/language-sass/README.md create mode 100644 packages/language-sass/coffeelint.json create mode 100644 packages/language-sass/grammars/sass.cson create mode 100644 packages/language-sass/grammars/sassdoc.cson create mode 100644 packages/language-sass/grammars/scss.cson create mode 100644 packages/language-sass/package-lock.json create mode 100644 packages/language-sass/package.json create mode 100644 packages/language-sass/settings/language-sass.cson create mode 100644 packages/language-sass/snippets/language-sass.cson create mode 100644 packages/language-sass/spec/sass-spec.coffee create mode 100644 packages/language-sass/spec/sassdoc-spec.coffee create mode 100644 packages/language-sass/spec/scss-spec.coffee create mode 100644 packages/language-sass/spec/scss-spec.js create mode 100644 packages/language-shellscript/.github/no-response.yml create mode 100644 packages/language-shellscript/.github/workflows/main.yml create mode 100644 packages/language-shellscript/.gitignore create mode 100644 packages/language-shellscript/CONTRIBUTING.md create mode 100644 packages/language-shellscript/ISSUE_TEMPLATE.md create mode 100644 packages/language-shellscript/LICENSE.md create mode 100644 packages/language-shellscript/PULL_REQUEST_TEMPLATE.md create mode 100644 packages/language-shellscript/README.md create mode 100644 packages/language-shellscript/coffeelint.json create mode 100644 packages/language-shellscript/grammars/shell-session.cson create mode 100644 packages/language-shellscript/grammars/shell-unix-bash.cson create mode 100644 packages/language-shellscript/grammars/tree-sitter-bash.cson create mode 100644 packages/language-shellscript/package-lock.json create mode 100644 packages/language-shellscript/package.json create mode 100644 packages/language-shellscript/settings/language-shellscript.cson create mode 100644 packages/language-shellscript/snippets/language-shellscript.cson create mode 100644 packages/language-shellscript/spec/shell-session-spec.coffee create mode 100644 packages/language-shellscript/spec/shell-unix-bash-spec.coffee create mode 100644 packages/language-source/.coffeelintignore create mode 100644 packages/language-source/.github/no-response.yml create mode 100644 packages/language-source/.github/workflows/ci.yml create mode 100644 packages/language-source/.gitignore create mode 100644 packages/language-source/CONTRIBUTING.md create mode 100644 packages/language-source/ISSUE_TEMPLATE.md create mode 100644 packages/language-source/LICENSE.md create mode 100644 packages/language-source/PULL_REQUEST_TEMPLATE.md create mode 100644 packages/language-source/README.md create mode 100644 packages/language-source/coffeelint.json create mode 100644 packages/language-source/package.json create mode 100644 packages/language-source/settings/language-source.cson create mode 100644 packages/language-sql/.coffeelintignore create mode 100644 packages/language-sql/.github/no-response.yml create mode 100644 packages/language-sql/.github/workflows/ci.yml create mode 100644 packages/language-sql/.gitignore create mode 100644 packages/language-sql/CONTRIBUTING.md create mode 100644 packages/language-sql/ISSUE_TEMPLATE.md create mode 100644 packages/language-sql/LICENSE.md create mode 100644 packages/language-sql/PULL_REQUEST_TEMPLATE.md create mode 100644 packages/language-sql/README.md create mode 100644 packages/language-sql/coffeelint.json create mode 100644 packages/language-sql/grammars/sql.cson create mode 100644 packages/language-sql/package.json create mode 100644 packages/language-sql/settings/language-sql.cson create mode 100644 packages/language-sql/spec/grammar-spec.coffee create mode 100644 packages/language-text/.coffeelintignore create mode 100644 packages/language-text/.github/no-response.yml create mode 100644 packages/language-text/.github/workflows/ci.yml create mode 100644 packages/language-text/.gitignore create mode 100644 packages/language-text/CONTRIBUTING.md create mode 100644 packages/language-text/ISSUE_TEMPLATE.md create mode 100644 packages/language-text/LICENSE.md create mode 100644 packages/language-text/PULL_REQUEST_TEMPLATE.md create mode 100644 packages/language-text/README.md create mode 100644 packages/language-text/coffeelint.json create mode 100644 packages/language-text/grammars/plain text.cson create mode 100644 packages/language-text/package.json create mode 100644 packages/language-text/snippets/language-text.cson create mode 100644 packages/language-text/spec/plain-text-spec.coffee create mode 100644 packages/language-todo/.github/no-response.yml create mode 100644 packages/language-todo/.github/workflows/ci.yml create mode 100644 packages/language-todo/.gitignore create mode 100644 packages/language-todo/CONTRIBUTING.md create mode 100644 packages/language-todo/ISSUE_TEMPLATE.md create mode 100644 packages/language-todo/LICENSE.md create mode 100644 packages/language-todo/PULL_REQUEST_TEMPLATE.md create mode 100644 packages/language-todo/README.md create mode 100644 packages/language-todo/coffeelint.json create mode 100644 packages/language-todo/grammars/todo.cson create mode 100644 packages/language-todo/package.json create mode 100644 packages/language-todo/snippets/todo.cson create mode 100644 packages/language-todo/spec/todo-spec.coffee create mode 100644 packages/language-toml/.coffeelintignore create mode 100644 packages/language-toml/.github/no-response.yml create mode 100644 packages/language-toml/.github/workflows/ci.yml create mode 100644 packages/language-toml/.gitignore create mode 100644 packages/language-toml/CONTRIBUTING.md create mode 100644 packages/language-toml/ISSUE_TEMPLATE.md create mode 100644 packages/language-toml/LICENSE.md create mode 100644 packages/language-toml/PULL_REQUEST_TEMPLATE.md create mode 100644 packages/language-toml/README.md create mode 100644 packages/language-toml/coffeelint.json create mode 100644 packages/language-toml/grammars/toml.cson create mode 100644 packages/language-toml/package-lock.json create mode 100644 packages/language-toml/package.json create mode 100644 packages/language-toml/settings/language-toml.cson create mode 100644 packages/language-toml/spec/toml-spec.coffee create mode 100644 packages/language-typescript/.github/workflows/ci.yml create mode 100644 packages/language-typescript/.gitignore create mode 100644 packages/language-typescript/CONTRIBUTING.md create mode 100644 packages/language-typescript/ISSUE_TEMPLATE.md create mode 100644 packages/language-typescript/LICENSE.md create mode 100644 packages/language-typescript/PULL_REQUEST_TEMPLATE.md create mode 100644 packages/language-typescript/README.md create mode 100644 packages/language-typescript/grammars/TypeScript.json create mode 100644 packages/language-typescript/grammars/TypeScriptReact.json create mode 100644 packages/language-typescript/grammars/tree-sitter-flow.cson create mode 100644 packages/language-typescript/grammars/tree-sitter-tsx.cson create mode 100644 packages/language-typescript/grammars/tree-sitter-typescript.cson create mode 100644 packages/language-typescript/lib/main.js create mode 100644 packages/language-typescript/package-lock.json create mode 100644 packages/language-typescript/package.json create mode 100644 packages/language-typescript/settings/TypeScript.cson create mode 100644 packages/language-typescript/settings/TypeScriptReact.cson create mode 100644 packages/language-typescript/snippets/language-typescript.cson create mode 100644 packages/language-xml/.coffeelintignore create mode 100644 packages/language-xml/.github/no-response.yml create mode 100644 packages/language-xml/.github/workflows/ci.yml create mode 100644 packages/language-xml/.gitignore create mode 100644 packages/language-xml/CONTRIBUTING.md create mode 100644 packages/language-xml/ISSUE_TEMPLATE.md create mode 100644 packages/language-xml/LICENSE.md create mode 100644 packages/language-xml/PULL_REQUEST_TEMPLATE.md create mode 100644 packages/language-xml/README.md create mode 100644 packages/language-xml/coffeelint.json create mode 100644 packages/language-xml/grammars/xml.cson create mode 100644 packages/language-xml/grammars/xsl.cson create mode 100644 packages/language-xml/package.json create mode 100644 packages/language-xml/settings/language-xml.cson create mode 100644 packages/language-xml/snippets/language-xml.cson create mode 100644 packages/language-xml/spec/xml-spec.coffee create mode 100644 packages/language-yaml/.coffeelintignore create mode 100644 packages/language-yaml/.github/no-response.yml create mode 100644 packages/language-yaml/.github/workflows/ci.yml create mode 100644 packages/language-yaml/.gitignore create mode 100644 packages/language-yaml/CONTRIBUTING.md create mode 100644 packages/language-yaml/ISSUE_TEMPLATE.md create mode 100644 packages/language-yaml/LICENSE.md create mode 100644 packages/language-yaml/PULL_REQUEST_TEMPLATE.md create mode 100644 packages/language-yaml/README.md create mode 100644 packages/language-yaml/coffeelint.json create mode 100644 packages/language-yaml/grammars/yaml.cson create mode 100644 packages/language-yaml/package.json create mode 100644 packages/language-yaml/settings/language-yaml.cson create mode 100644 packages/language-yaml/spec/fixtures/cloud.config create mode 100644 packages/language-yaml/spec/yaml-spec.coffee diff --git a/apm/package-lock.json b/apm/package-lock.json index 6cbaff56f..4972291cb 100644 --- a/apm/package-lock.json +++ b/apm/package-lock.json @@ -64,60 +64,12 @@ "ansi-regex": { "version": "2.1.1", "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-2.1.1.tgz", - "integrity": "sha1-w7M6te42DYbg5ijwRorn7yfWVN8=" - }, - "aproba": { - "version": "1.2.0", - "resolved": "https://registry.npmjs.org/aproba/-/aproba-1.2.0.tgz", - "integrity": "sha512-Y9J6ZjXtoYh8RnXVCMOU/ttDmk1aBjunq9vO0ta5x85WDQiQfUF9sIPBITdbiiIVcBo03Hi3jMxigBtsddlXRw==" - }, - "are-we-there-yet": { - "version": "1.1.7", - "resolved": "https://registry.npmjs.org/are-we-there-yet/-/are-we-there-yet-1.1.7.tgz", - "integrity": "sha512-nxwy40TuMiUGqMyRHgCSWZ9FM4VAoRP4xUYSTv5ImRog+h9yISPbVH7H8fASCIzYn9wlEv4zvFL7uKDMCFQm3g==", - "requires": { - "delegates": "^1.0.0", - "readable-stream": "^2.0.6" - }, - "dependencies": { - "isarray": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/isarray/-/isarray-1.0.0.tgz", - "integrity": "sha1-u5NdSFgsuhaMBoNJV6VKPgcSTxE=" - }, - "readable-stream": { - "version": "2.3.7", - "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-2.3.7.tgz", - "integrity": "sha512-Ebho8K4jIbHAxnuxi7o42OrZgF/ZTNcsZj6nRKyUmkhLFq8CHItp/fy6hQZuZmP/n3yZ9VBUbp4zz/mX8hmYPw==", - "requires": { - "core-util-is": "~1.0.0", - "inherits": "~2.0.3", - "isarray": "~1.0.0", - "process-nextick-args": "~2.0.0", - "safe-buffer": "~5.1.1", - "string_decoder": "~1.1.1", - "util-deprecate": "~1.0.1" - } - }, - "safe-buffer": { - "version": "5.1.2", - "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.1.2.tgz", - "integrity": "sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g==" - }, - "string_decoder": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.1.1.tgz", - "integrity": "sha512-n/ShnvDi6FHbbVfviro+WojiFzv+s8MPMHBczVePfUpDJLwoLT0ht1l4YwBCbi8pJAveEEdnkHyPyTP/mzRfwg==", - "requires": { - "safe-buffer": "~5.1.0" - } - } - } + "integrity": "sha512-TIGnTpdo+E3+pCyAluZvtED5p5wCqLdezCyhPZzKPcxvFplEt4i+W7OONCKgeZFT3+y5NZZfOOS/Bdcanm1MYA==" }, "asar": { "version": "0.12.1", "resolved": "https://registry.npmjs.org/asar/-/asar-0.12.1.tgz", - "integrity": "sha1-35Q+jrXNdPvKBmPi10uPK3J7UI8=", + "integrity": "sha512-lmgqdFF9XvKVQcrJ0mv2XxiStH1L0Tmh9G4eY2/XiNN3X25BrkFR/QuOK9ZZlhEWoqd9LiYtD/fe7+gvPcVvWA==", "requires": { "chromium-pickle-js": "^0.1.0", "commander": "^2.9.0", @@ -132,7 +84,7 @@ "glob": { "version": "6.0.4", "resolved": "https://registry.npmjs.org/glob/-/glob-6.0.4.tgz", - "integrity": "sha1-DwiGD2oVUSey+t1PnOJLGqtuTSI=", + "integrity": "sha512-MKZeRNyYZAVVVG1oZeLaWie1uweH40m9AZwIwxyPbTSX4hHrVYSzLg0Ro5Z5R7XKkIX+Cc6oD1rqeDJnwsB8/A==", "requires": { "inflight": "^1.0.4", "inherits": "2", @@ -146,15 +98,15 @@ "asar-require": { "version": "0.3.0", "resolved": "https://registry.npmjs.org/asar-require/-/asar-require-0.3.0.tgz", - "integrity": "sha1-R+TLRBSJSthplTbNDFjAySFRtFs=", + "integrity": "sha512-KqVbqfVhY+88TlLLh8wvZozZ5LI5sTCClS3Ik15XSgNGCLyW3u/qxnudh3Olj569fhixIcbbpQQoYKfthjQC0Q==", "requires": { "asar": "0.12.1" } }, "asn1": { - "version": "0.2.4", - "resolved": "https://registry.npmjs.org/asn1/-/asn1-0.2.4.tgz", - "integrity": "sha512-jxwzQpLQjSmWXgwaCZE9Nz+glAG01yF1QnWgbhGwHI5A6FRIEY6IVqtHhIepHqI7/kyEyQEagBC5mBEFlIYvdg==", + "version": "0.2.6", + "resolved": "https://registry.npmjs.org/asn1/-/asn1-0.2.6.tgz", + "integrity": "sha512-ix/FxPn0MDjeyJ7i/yoHGFt/EX6LyNbxSEhPPXODPL+KB0VPk86UYfL0lMdy+KCnv+fmvIzySwaK5COwqVbWTQ==", "requires": { "safer-buffer": "~2.1.0" } @@ -162,22 +114,22 @@ "assert-plus": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/assert-plus/-/assert-plus-1.0.0.tgz", - "integrity": "sha1-8S4PPF13sLHN2RRpQuTpbB5N1SU=" + "integrity": "sha512-NfJ4UzBCcQGLDlQq7nHxH+tv3kyZ0hHQqF5BO6J7tNJeP5do1llPr8dZ8zHonfhAu0PHAdMkSo+8o0wxg9lZWw==" }, "async": { - "version": "3.2.1", - "resolved": "https://registry.npmjs.org/async/-/async-3.2.1.tgz", - "integrity": "sha512-XdD5lRO/87udXCMC9meWdYiR+Nq6ZjUfXidViUZGu2F1MO4T3XwZ1et0hb2++BgLfhyJwy44BGB/yx80ABx8hg==" + "version": "3.2.4", + "resolved": "https://registry.npmjs.org/async/-/async-3.2.4.tgz", + "integrity": "sha512-iAB+JbDEGXhyIUavoDl9WP/Jj106Kz9DEn1DPgYw5ruDn0e3Wgi3sKFm55sASdGBNOQB8F59d9qQ7deqrHA8wQ==" }, "asynckit": { "version": "0.4.0", "resolved": "https://registry.npmjs.org/asynckit/-/asynckit-0.4.0.tgz", - "integrity": "sha1-x57Zf380y48robyXkLzDZkdLS3k=" + "integrity": "sha512-Oei9OH4tRh0YqU3GxhX79dM/mwVgvbZJaSNaRk+bshkj0S5cfHcgYakreBjrHwatXKbz+IoIdYLxrKim2MjW0Q==" }, "aws-sign2": { "version": "0.7.0", "resolved": "https://registry.npmjs.org/aws-sign2/-/aws-sign2-0.7.0.tgz", - "integrity": "sha1-tG6JCTSpWR8tL2+G1+ap8bP+dqg=" + "integrity": "sha512-08kcGqnYf/YmjoRhfxyu+CLxBjUtHLXLXX/vUfx9l2LYzG3c1m61nrpyFUZI6zeS+Li/wWMMidD9KgrqtGq3mA==" }, "aws4": { "version": "1.11.0", @@ -197,7 +149,7 @@ "bcrypt-pbkdf": { "version": "1.0.2", "resolved": "https://registry.npmjs.org/bcrypt-pbkdf/-/bcrypt-pbkdf-1.0.2.tgz", - "integrity": "sha1-pDAdOJtqQ/m2f/PKEaP2Y342Dp4=", + "integrity": "sha512-qeFIXtP4MSoi6NLqO12WfqARWWuCKi2Rn/9hJLEmtB5yTNr9DqFWkJRCf2qShWzPeAMRnOgCrq0sg/KLv5ES9w==", "requires": { "tweetnacl": "^0.14.3" } @@ -205,7 +157,7 @@ "binary": { "version": "0.3.0", "resolved": "https://registry.npmjs.org/binary/-/binary-0.3.0.tgz", - "integrity": "sha1-n2BVO8XOjDOG87VTz/R0Yq3sqnk=", + "integrity": "sha512-D4H1y5KYwpJgK8wk1Cue5LLPgmwHKYSChkbspQg5JtVuR5ulGckxfR62H3AE9UDkdMC8yyXlqYihuz3Aqg2XZg==", "requires": { "buffers": "~0.1.1", "chainsaw": "~0.1.0" @@ -262,22 +214,22 @@ "buffers": { "version": "0.1.1", "resolved": "https://registry.npmjs.org/buffers/-/buffers-0.1.1.tgz", - "integrity": "sha1-skV5w77U1tOWru5tmorn9Ugqt7s=" + "integrity": "sha512-9q/rDEGSb/Qsvv2qvzIzdluL5k7AaJOTrw23z9reQthrbF7is4CtlT0DXyO1oei2DCp4uojjzQ7igaSHp1kAEQ==" }, "camelcase": { "version": "2.1.1", "resolved": "https://registry.npmjs.org/camelcase/-/camelcase-2.1.1.tgz", - "integrity": "sha1-fB0W1nmhu+WcoCys7PsBHiAfWh8=" + "integrity": "sha512-DLIsRzJVBQu72meAKPkWQOLcujdXT32hwdfnkI1frSiSRMK1MofjKHf+MEx0SB6fjEFXL8fBDv1dKymBlOp4Qw==" }, "caseless": { "version": "0.12.0", "resolved": "https://registry.npmjs.org/caseless/-/caseless-0.12.0.tgz", - "integrity": "sha1-G2gcIf+EAzyCZUMJBolCDRhxUdw=" + "integrity": "sha512-4tYFyifaFfGacoiObjJegolkwSU4xQNGbVgUiNYVUxbQ2x2lUsFvY4hVgVzGiIe6WLOPqycWXA40l+PWsxthUw==" }, "chainsaw": { "version": "0.1.0", "resolved": "https://registry.npmjs.org/chainsaw/-/chainsaw-0.1.0.tgz", - "integrity": "sha1-XqtQsor+WAdNDVgpE4iCi15fvJg=", + "integrity": "sha512-75kWfWt6MEKNC8xYXIdRpDehRYY/tNSgwKaJq+dbbDcxORuVrrQ+SEHoWsniVn9XPYfP4gmdWIeDk/4YNp1rNQ==", "requires": { "traverse": ">=0.3.0 <0.4" } @@ -290,12 +242,12 @@ "chromium-pickle-js": { "version": "0.1.0", "resolved": "https://registry.npmjs.org/chromium-pickle-js/-/chromium-pickle-js-0.1.0.tgz", - "integrity": "sha1-HUixB9ghJqLz4hHC6iX4A7pVGyE=" + "integrity": "sha512-0Xkh0X11DQcRnvr9cO7PKX+MPS6CWgLhAaWznlgMaerE+ZmeV8o9hE6o+wlAxEMAVZpaYSUg4zKx1SDHN7gNNQ==" }, "cliui": { "version": "3.2.0", "resolved": "https://registry.npmjs.org/cliui/-/cliui-3.2.0.tgz", - "integrity": "sha1-EgYBU3qRbSmUD5NNo7SNWFo5IT0=", + "integrity": "sha512-0yayqDxWQbqk3ojkYqUKqaAQ6AfNKeKWRNA8kR0WXzAsdHpP4BIaOmMAG87JGuO6qcobyW4GjxHd9PmhEd+T9w==", "requires": { "string-width": "^1.0.1", "strip-ansi": "^3.0.1", @@ -305,7 +257,7 @@ "code-point-at": { "version": "1.1.0", "resolved": "https://registry.npmjs.org/code-point-at/-/code-point-at-1.1.0.tgz", - "integrity": "sha1-DQcLTQQ6W+ozovGkDi7bPZpMz3c=" + "integrity": "sha512-RpAVKQA5T63xEj6/giIbUEtZwJ4UFIc3ZtvEkiaUERylqe8xb5IvqcgOurZLahv93CLKfxcw5YI+DZcUBRyLXA==" }, "coffee-script": { "version": "1.12.7", @@ -333,22 +285,17 @@ "concat-map": { "version": "0.0.1", "resolved": "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz", - "integrity": "sha1-2Klr13/Wjfd5OnMDajug1UBdR3s=" - }, - "console-control-strings": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/console-control-strings/-/console-control-strings-1.1.0.tgz", - "integrity": "sha1-PXz0Rk22RG6mRL9LOVB/mFEAjo4=" + "integrity": "sha512-/Srv4dswyQNBfohGpz9o6Yb3Gz3SrUDqBH5rTuhGR7ahtlbYKnVxw2bCFMRljaA7EXHaXZ8wsHdodFvbkhKmqg==" }, "core-util-is": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/core-util-is/-/core-util-is-1.0.2.tgz", - "integrity": "sha1-tf1UIgqivFq1eqtxQMlAdUUDwac=" + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/core-util-is/-/core-util-is-1.0.3.tgz", + "integrity": "sha512-ZQBvi1DcpJ4GDqanjucZ2Hj3wEO5pZDS89BWbkcrvdxksJorwUDDZamX9ldFkp9aw2lmBDLgkObEA4DWNJ9FYQ==" }, "cson-parser": { "version": "1.3.5", "resolved": "https://registry.npmjs.org/cson-parser/-/cson-parser-1.3.5.tgz", - "integrity": "sha1-fsZ14DkUVTO/KmqFYHPxWZ2cLSQ=", + "integrity": "sha512-Pchz4dDkyafUL4V3xBuP9Os8Hu9VU96R+MxuTKh7NR+D866UiWrhBiSLbfuvwApEaJzpXhXTr3iPe4lFtXLzcQ==", "requires": { "coffee-script": "^1.10.0" } @@ -356,12 +303,12 @@ "cuint": { "version": "0.2.2", "resolved": "https://registry.npmjs.org/cuint/-/cuint-0.2.2.tgz", - "integrity": "sha1-QICG1AlVDCYxFVYZ6fp7ytw7mRs=" + "integrity": "sha512-d4ZVpCW31eWwCMe1YT3ur7mUDnTXbgwyzaL320DrcRT45rfjYxkt5QWLrmOJ+/UEAI2+fQgKe/fCjR8l4TpRgw==" }, "d": { "version": "0.1.1", "resolved": "https://registry.npmjs.org/d/-/d-0.1.1.tgz", - "integrity": "sha1-2hhMU10Y2O57oqoim5FACfrhEwk=", + "integrity": "sha512-0SdM9V9pd/OXJHoWmTfNPTAeD+lw6ZqHg+isPyBFuJsZLSE0Ygg1cYZ/0l6DrKQXMOqGOu1oWupMoOfoRfMZrQ==", "requires": { "es5-ext": "~0.10.2" } @@ -369,7 +316,7 @@ "dashdash": { "version": "1.14.1", "resolved": "https://registry.npmjs.org/dashdash/-/dashdash-1.14.1.tgz", - "integrity": "sha1-hTz6D3y+L+1d4gMmuN1YEDX24vA=", + "integrity": "sha512-jRFi8UDGo6j+odZiEpjazZaWqEal3w/basFjQHQEwVtZJGDpxbH1MeYluwCS8Xq5wmLJooDlMgvVarmWfGM44g==", "requires": { "assert-plus": "^1.0.0" } @@ -377,14 +324,14 @@ "decamelize": { "version": "1.2.0", "resolved": "https://registry.npmjs.org/decamelize/-/decamelize-1.2.0.tgz", - "integrity": "sha1-9lNNFRSCabIDUue+4m9QH5oZEpA=" + "integrity": "sha512-z2S+W9X73hAUUki+N+9Za2lBlun89zigOyGrsax+KUQ6wKW4ZoWpEYBkGhQjwAjjDCkWxhY0VKEhk8wzY7F5cA==" }, "decompress-response": { - "version": "4.2.1", - "resolved": "https://registry.npmjs.org/decompress-response/-/decompress-response-4.2.1.tgz", - "integrity": "sha512-jOSne2qbyE+/r8G1VU+G/82LBs2Fs4LAsTiLSHOCOMZQl2OKZ6i8i4IyHemTe+/yIXOtTcRQMzPcgyhoFlqPkw==", + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/decompress-response/-/decompress-response-6.0.0.tgz", + "integrity": "sha512-aW35yZM6Bb/4oJlZncMH2LCoZtJXTRxES17vE3hoRiowU2kWHaJKFkSBDnDR+cm9J+9QhXmREyIfv0pji9ejCQ==", "requires": { - "mimic-response": "^2.0.0" + "mimic-response": "^3.1.0" } }, "decompress-zip": { @@ -409,22 +356,17 @@ "delayed-stream": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/delayed-stream/-/delayed-stream-1.0.0.tgz", - "integrity": "sha1-3zrhmayt+31ECqrgsp4icrJOxhk=" - }, - "delegates": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/delegates/-/delegates-1.0.0.tgz", - "integrity": "sha1-hMbhWbgZBP3KWaDvRM2HDTElD5o=" + "integrity": "sha512-ZySD7Nf91aLB0RxL4KGrKHBXl7Eds1DAmEdcoVawXnLD7SDhpNgtuII2aAkg7a7QS41jxPSZ17p4VdGnMHk3MQ==" }, "detect-libc": { - "version": "1.0.3", - "resolved": "https://registry.npmjs.org/detect-libc/-/detect-libc-1.0.3.tgz", - "integrity": "sha1-+hN8S9aY7fVc1c0CrFWfkaTEups=" + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/detect-libc/-/detect-libc-2.0.1.tgz", + "integrity": "sha512-463v3ZeIrcWtdgIg6vI6XUncguvr2TnGl4SzDXinkt9mSLpBJKXT3mW6xT3VQdDN11+WVs29pgvivTc4Lp8v+w==" }, "ecc-jsbn": { "version": "0.1.2", "resolved": "https://registry.npmjs.org/ecc-jsbn/-/ecc-jsbn-0.1.2.tgz", - "integrity": "sha1-OoOpBOVDUyh4dMVkt1SThoSamMk=", + "integrity": "sha512-eh9O+hwRHNbG4BLTjEl3nw044CkGm5X6LoaCf7LPp7UU8Qrt47JYNi6nPX8xjW97TKGKm1ouctg0QSpZe9qrnw==", "requires": { "jsbn": "~0.1.0", "safer-buffer": "^2.1.0" @@ -433,7 +375,7 @@ "emissary": { "version": "1.3.3", "resolved": "https://registry.npmjs.org/emissary/-/emissary-1.3.3.tgz", - "integrity": "sha1-phjZLWgrIy0xER3DYlpd9mF5lgY=", + "integrity": "sha512-pD6FWNBSlEOzSJDCTcSGVLgNnGw5fnCvvGMdQ/TN43efeXZ/QTq8+hZoK3OOEXPRNjMmSJmeOnEJh+bWT5O8rQ==", "requires": { "es6-weak-map": "^0.1.2", "mixto": "1.x", @@ -450,13 +392,13 @@ } }, "es5-ext": { - "version": "0.10.53", - "resolved": "https://registry.npmjs.org/es5-ext/-/es5-ext-0.10.53.tgz", - "integrity": "sha512-Xs2Stw6NiNHWypzRTY1MtaG/uJlwCk8kH81920ma8mvN8Xq1gsfhZvpkImLQArw8AHnv8MT2I45J3c0R8slE+Q==", + "version": "0.10.61", + "resolved": "https://registry.npmjs.org/es5-ext/-/es5-ext-0.10.61.tgz", + "integrity": "sha512-yFhIqQAzu2Ca2I4SE2Au3rxVfmohU9Y7wqGR+s7+H7krk26NXhIRAZDgqd6xqjCEFUomDEA3/Bo/7fKmIkW1kA==", "requires": { - "es6-iterator": "~2.0.3", - "es6-symbol": "~3.1.3", - "next-tick": "~1.0.0" + "es6-iterator": "^2.0.3", + "es6-symbol": "^3.1.3", + "next-tick": "^1.1.0" }, "dependencies": { "d": { @@ -471,7 +413,7 @@ "es6-iterator": { "version": "2.0.3", "resolved": "https://registry.npmjs.org/es6-iterator/-/es6-iterator-2.0.3.tgz", - "integrity": "sha1-p96IkUGgWpSwhUQDstCg+/qY87c=", + "integrity": "sha512-zw4SRzoUkd+cl+ZoE15A9o1oQd920Bb0iOJMQkQhl3jNc03YqVjAhG7scf9C5KWRU/R13Orf588uCC6525o02g==", "requires": { "d": "1", "es5-ext": "^0.10.35", @@ -492,7 +434,7 @@ "es6-iterator": { "version": "0.1.3", "resolved": "https://registry.npmjs.org/es6-iterator/-/es6-iterator-0.1.3.tgz", - "integrity": "sha1-1vWLjE/EE8JJtLqhl2j45NfIlE4=", + "integrity": "sha512-6TOmbFM6OPWkTe+bQ3ZuUkvqcWUjAnYjKUCLdbvRsAUz2Pr+fYIibwNXNkLNtIK9PPFbNMZZddaRNkyJhlGJhA==", "requires": { "d": "~0.1.1", "es5-ext": "~0.10.5", @@ -502,7 +444,7 @@ "es6-symbol": { "version": "2.0.1", "resolved": "https://registry.npmjs.org/es6-symbol/-/es6-symbol-2.0.1.tgz", - "integrity": "sha1-dhtcZ8/U8dGK+yNPaR1nhoLLO/M=", + "integrity": "sha512-wjobO4zO8726HVU7mI2OA/B6QszqwHJuKab7gKHVx+uRfVVYGcWJkCIFxV2Madqb9/RUSrhJ/r6hPfG7FsWtow==", "requires": { "d": "~0.1.1", "es5-ext": "~0.10.5" @@ -511,7 +453,7 @@ "es6-weak-map": { "version": "0.1.4", "resolved": "https://registry.npmjs.org/es6-weak-map/-/es6-weak-map-0.1.4.tgz", - "integrity": "sha1-cGzvnpmqI2undmwjnIueKG6n0ig=", + "integrity": "sha512-P+N5Cd2TXeb7G59euFiM7snORspgbInS29Nbf3KNO2JQp/DyhvMCDWd58nsVAXwYJ6W3Bx7qDdy6QQ3PCJ7jKQ==", "requires": { "d": "~0.1.1", "es5-ext": "~0.10.6", @@ -530,17 +472,17 @@ "integrity": "sha512-XYfuKMvj4O35f/pOXLObndIRvyQ+/+6AhODh+OKWj9S9498pHHn/IMszH+gt0fBCRWMNfk1ZSp5x3AifmnI2vg==" }, "ext": { - "version": "1.4.0", - "resolved": "https://registry.npmjs.org/ext/-/ext-1.4.0.tgz", - "integrity": "sha512-Key5NIsUxdqKg3vIsdw9dSuXpPCQ297y6wBjL30edxwPgt2E44WcWBZey/ZvUc6sERLTxKdyCu4gZFmUbk1Q7A==", + "version": "1.6.0", + "resolved": "https://registry.npmjs.org/ext/-/ext-1.6.0.tgz", + "integrity": "sha512-sdBImtzkq2HpkdRLtlLWDa6w4DX22ijZLKx8BMPUuKe1c5lbN6xwQDQCxSfxBQnHZ13ls/FH0MQZx/q/gr6FQg==", "requires": { - "type": "^2.0.0" + "type": "^2.5.0" }, "dependencies": { "type": { - "version": "2.5.0", - "resolved": "https://registry.npmjs.org/type/-/type-2.5.0.tgz", - "integrity": "sha512-180WMDQaIMm3+7hGXWf12GtdniDEy7nYcyFMKJn/eZz/6tSLXrUN9V0wKSbMjej0I1WHWbpREDEKHtqPQa9NNw==" + "version": "2.6.0", + "resolved": "https://registry.npmjs.org/type/-/type-2.6.0.tgz", + "integrity": "sha512-eiDBDOmkih5pMbo9OqsqPRGMljLodLcwd5XD5JbtNB0o89xZAwynY9EdCDsJU7LtcVCClu9DvM7/0Ep1hYX3EQ==" } } }, @@ -552,7 +494,7 @@ "extsprintf": { "version": "1.3.0", "resolved": "https://registry.npmjs.org/extsprintf/-/extsprintf-1.3.0.tgz", - "integrity": "sha1-lpGEQOMEGnpBT4xS48V06zw+HgU=" + "integrity": "sha512-11Ndz7Nv+mvAC1j0ktTa7fAb0vLyGGX+rMHNBYQviQDGU0Hw7lhctJANqbPhu9nV9/izT/IntTgZ7Im/9LJs9g==" }, "fast-deep-equal": { "version": "3.1.3", @@ -581,7 +523,7 @@ "forever-agent": { "version": "0.6.1", "resolved": "https://registry.npmjs.org/forever-agent/-/forever-agent-0.6.1.tgz", - "integrity": "sha1-+8cfDEGt6zf5bFd60e1C2P2sypE=" + "integrity": "sha512-j0KLYPhm6zeac4lz3oJ3o65qvgQCcPubiyotZrXqEaG4hNagNYO8qdlUrX5vwqv9ohqeT/Z3j6+yW067yWWdUw==" }, "form-data": { "version": "2.3.3", @@ -601,7 +543,7 @@ "fs-extra": { "version": "0.26.7", "resolved": "https://registry.npmjs.org/fs-extra/-/fs-extra-0.26.7.tgz", - "integrity": "sha1-muH92UiXeY7at20JGM9C0MMYT6k=", + "integrity": "sha512-waKu+1KumRhYv8D8gMRCKJGAMI9pRnPuEb1mvgYD0f7wBscg+h6bW4FDTmEZhB9VKxvoTtxW+Y7bnIlB7zja6Q==", "requires": { "graceful-fs": "^4.1.2", "jsonfile": "^2.1.0", @@ -642,7 +584,7 @@ "async": { "version": "1.5.2", "resolved": "https://registry.npmjs.org/async/-/async-1.5.2.tgz", - "integrity": "sha1-7GphrlZIDAw8skHJVhjiCJL5Zyo=" + "integrity": "sha512-nSVgobk4rv61R9PUSDtYt7mPVB2olxNR5RWJcAsH676/ef11bUZwvu7+RGYrYauVdDPcO519v68wRhXQtxsV9w==" }, "rimraf": { "version": "2.7.1", @@ -657,27 +599,12 @@ "fs.realpath": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/fs.realpath/-/fs.realpath-1.0.0.tgz", - "integrity": "sha1-FQStJSMVjKpA20onh8sBQRmU6k8=" - }, - "gauge": { - "version": "2.7.4", - "resolved": "https://registry.npmjs.org/gauge/-/gauge-2.7.4.tgz", - "integrity": "sha1-LANAXHU4w51+s3sxcCLjJfsBi/c=", - "requires": { - "aproba": "^1.0.3", - "console-control-strings": "^1.0.0", - "has-unicode": "^2.0.0", - "object-assign": "^4.1.0", - "signal-exit": "^3.0.0", - "string-width": "^1.0.1", - "strip-ansi": "^3.0.1", - "wide-align": "^1.1.0" - } + "integrity": "sha512-OO0pH2lK6a0hZnAdau5ItzHPI6pUlvI7jMVnxUQRtw4owF2wk8lOSabtGDCTP4Ggrg2MbGnWO9X8K1t4+fGMDw==" }, "getpass": { "version": "0.1.7", "resolved": "https://registry.npmjs.org/getpass/-/getpass-0.1.7.tgz", - "integrity": "sha1-Xv+OPmhNVprkyysSgmBOi6YhSfo=", + "integrity": "sha512-0fzj9JxOLfJ+XGLhR8ze3unN0KZCgZwiSSDz168VERjK8Wl8kVSdcu2kspd4s4wtAa1y/qrVRiAA0WclVsu0ng==", "requires": { "assert-plus": "^1.0.0" } @@ -694,25 +621,25 @@ "github-from-package": { "version": "0.0.0", "resolved": "https://registry.npmjs.org/github-from-package/-/github-from-package-0.0.0.tgz", - "integrity": "sha1-l/tdlr/eiXMxPyDoKI75oWf6ZM4=" + "integrity": "sha512-SyHy3T1v2NUXn29OsWdxmK6RwHD+vkj3v8en8AOBZ1wBQ/hCAQ5bAQTD02kW4W9tUp/3Qh6J8r9EvntiyCmOOw==" }, "glob": { - "version": "7.1.7", - "resolved": "https://registry.npmjs.org/glob/-/glob-7.1.7.tgz", - "integrity": "sha512-OvD9ENzPLbegENnYP5UUfJIirTg4+XwMWGaQfQTY0JenxNvvIKP3U3/tAQSPIu/lHxXYSZmpXlUHeqAIdKzBLQ==", + "version": "7.2.3", + "resolved": "https://registry.npmjs.org/glob/-/glob-7.2.3.tgz", + "integrity": "sha512-nFR0zLpU2YCaRxwoCJvL6UvCH2JFyFVIvwTLsIf21AuHlMskA1hhTdk+LlYJtOlYt9v6dvszD2BGRqBL+iQK9Q==", "requires": { "fs.realpath": "^1.0.0", "inflight": "^1.0.4", "inherits": "2", - "minimatch": "^3.0.4", + "minimatch": "^3.1.1", "once": "^1.3.0", "path-is-absolute": "^1.0.0" } }, "graceful-fs": { - "version": "4.2.8", - "resolved": "https://registry.npmjs.org/graceful-fs/-/graceful-fs-4.2.8.tgz", - "integrity": "sha512-qkIilPUYcNhJpd33n0GBXTB1MMPp14TxEsEs0pTrsSVucApsYzW5V+Q8Qxhik6KU3evy+qkAAowTByymK0avdg==" + "version": "4.2.10", + "resolved": "https://registry.npmjs.org/graceful-fs/-/graceful-fs-4.2.10.tgz", + "integrity": "sha512-9ByhssR2fPVsNZj478qUUbKfmL0+t5BDVyjShtyZZLiK7ZDAArFFfopyOTj0M05wE2tJPisA4iTnnXl2YoPvOA==" }, "grim": { "version": "2.0.3", @@ -725,7 +652,7 @@ "har-schema": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/har-schema/-/har-schema-2.0.0.tgz", - "integrity": "sha1-qUwiJOvKwEeCoNkDVSHyRzW37JI=" + "integrity": "sha512-Oqluz6zhGX8cyRaTQlFMPw80bSJVG2x/cFb8ZPhUILGgHka9SsokCCOQgpveePerqidZOrT14ipqfJb7ILcW5Q==" }, "har-validator": { "version": "5.1.5", @@ -736,11 +663,6 @@ "har-schema": "^2.0.0" } }, - "has-unicode": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/has-unicode/-/has-unicode-2.0.1.tgz", - "integrity": "sha1-4Ob+aijPUROIVeCG0Wkedx3iqLk=" - }, "hosted-git-info": { "version": "3.0.8", "resolved": "https://registry.npmjs.org/hosted-git-info/-/hosted-git-info-3.0.8.tgz", @@ -752,7 +674,7 @@ "http-signature": { "version": "1.2.0", "resolved": "https://registry.npmjs.org/http-signature/-/http-signature-1.2.0.tgz", - "integrity": "sha1-muzZJRFHcvPZW2WmCruPfBj7rOE=", + "integrity": "sha512-CAbnr6Rz4CYQkLYUtSNXxQPUH2gK8f3iWexVlsnMeD+GjlsQ0Xsy1cOX+mN3dtxYomRy21CiOzU8Uhw6OwncEQ==", "requires": { "assert-plus": "^1.0.0", "jsprim": "^1.2.2", @@ -767,7 +689,7 @@ "inflight": { "version": "1.0.6", "resolved": "https://registry.npmjs.org/inflight/-/inflight-1.0.6.tgz", - "integrity": "sha1-Sb1jMdfQLQwJvJEKEHW6gWW1bfk=", + "integrity": "sha512-k92I/b08q4wvFscXCLvqfsHCrjrF7yiXsQuIVvVE7N82W3+aqpzuUdBbfhWcy/FZR3/4IgflMgKLOsvPDrGCJA==", "requires": { "once": "^1.3.0", "wrappy": "1" @@ -786,7 +708,7 @@ "invert-kv": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/invert-kv/-/invert-kv-1.0.0.tgz", - "integrity": "sha1-EEqOSqym09jNFXqO+L+rLXo//bY=" + "integrity": "sha512-xgs2NH9AE66ucSq4cNG1nhSFghr5l6tdL15Pk+jl46bmmBapgoaY/AacXyaDznAqmGL99TiLSQgO/XazFSKYeQ==" }, "is-docker": { "version": "2.2.1", @@ -796,7 +718,7 @@ "is-fullwidth-code-point": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-1.0.0.tgz", - "integrity": "sha1-754xOG8DGn8NZDr4L95QxFfvAMs=", + "integrity": "sha512-1pqUqRjkhPJ9miNq9SwMfdvi6lBJcd6eFxvfaivQhaH3SgisfiuudvFntdKOmxuee/77l+FPjKrQjWvmPjWrRw==", "requires": { "number-is-nan": "^1.0.0" } @@ -804,7 +726,7 @@ "is-typedarray": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/is-typedarray/-/is-typedarray-1.0.0.tgz", - "integrity": "sha1-5HnICFjfDBsR3dppQPlgEfzaSpo=" + "integrity": "sha512-cyA56iCMHAh5CdzjJIa4aohJyeO1YbwLi3Jc35MmRU6poroFjIGZzUzupGiRPOjgHg9TLu43xbpwXk523fMxKA==" }, "is-wsl": { "version": "2.2.0", @@ -817,22 +739,22 @@ "isarray": { "version": "0.0.1", "resolved": "https://registry.npmjs.org/isarray/-/isarray-0.0.1.tgz", - "integrity": "sha1-ihis/Kmo9Bd+Cav8YDiTmwXR7t8=" + "integrity": "sha512-D2S+3GLxWH+uhrNEcoh/fnmYeP8E8/zHl644d/jdA0g2uyXvy3sb0qxotE+ne0LtccHknQzWwZEzhak7oJ0COQ==" }, "isstream": { "version": "0.1.2", "resolved": "https://registry.npmjs.org/isstream/-/isstream-0.1.2.tgz", - "integrity": "sha1-R+Y/evVa+m+S4VAOaQ64uFKcCZo=" + "integrity": "sha512-Yljz7ffyPbrLpLngrMtZ7NduUgVvi6wG9RJ9IUcyCd59YQ911PBJphODUcbOVbqYfxe1wuYf/LJ8PauMRwsM/g==" }, "jsbn": { "version": "0.1.1", "resolved": "https://registry.npmjs.org/jsbn/-/jsbn-0.1.1.tgz", - "integrity": "sha1-peZUwuWi3rXyAdls77yoDA7y9RM=" + "integrity": "sha512-UVU9dibq2JcFWxQPA6KCqj5O42VOmAY3zQUfEKxU0KpTGXwNoCjkX1e13eHNvw/xPynt6pU0rZ1htjWTNTSXsg==" }, "json-schema": { - "version": "0.2.3", - "resolved": "https://registry.npmjs.org/json-schema/-/json-schema-0.2.3.tgz", - "integrity": "sha1-tIDIkuWaLwWVTOcnvT8qTogvnhM=" + "version": "0.4.0", + "resolved": "https://registry.npmjs.org/json-schema/-/json-schema-0.4.0.tgz", + "integrity": "sha512-es94M3nTIfsEPisRafak+HDLfHXnKBhV3vU5eqPcS3flIWqcxJWgXHXiey3YrpaNsanY5ei1VoYEbOzijuq9BA==" }, "json-schema-traverse": { "version": "0.4.1", @@ -842,40 +764,40 @@ "json-stringify-safe": { "version": "5.0.1", "resolved": "https://registry.npmjs.org/json-stringify-safe/-/json-stringify-safe-5.0.1.tgz", - "integrity": "sha1-Epai1Y/UXxmg9s4B1lcB4sc1tus=" + "integrity": "sha512-ZClg6AaYvamvYEE82d3Iyd3vSSIjQ+odgjaTzRuO3s7toCdFKczob2i0zCh7JE8kWn17yvAWhUVxvqGwUalsRA==" }, "jsonfile": { "version": "2.4.0", "resolved": "https://registry.npmjs.org/jsonfile/-/jsonfile-2.4.0.tgz", - "integrity": "sha1-NzaitCi4e72gzIO1P6PWM6NcKug=", + "integrity": "sha512-PKllAqbgLgxHaj8TElYymKCAgrASebJrWpTnEkOaTowt23VKXXN0sUeriJ+eh7y6ufb/CC5ap11pz71/cM0hUw==", "requires": { "graceful-fs": "^4.1.6" } }, "jsprim": { - "version": "1.4.1", - "resolved": "https://registry.npmjs.org/jsprim/-/jsprim-1.4.1.tgz", - "integrity": "sha1-MT5mvB5cwG5Di8G3SZwuXFastqI=", + "version": "1.4.2", + "resolved": "https://registry.npmjs.org/jsprim/-/jsprim-1.4.2.tgz", + "integrity": "sha512-P2bSOMAc/ciLz6DzgjVlGJP9+BrJWu5UDGK70C2iweC5QBIeFf0ZXRvGjEj2uYgrY2MkAAhsSWHDWlFtEroZWw==", "requires": { "assert-plus": "1.0.0", "extsprintf": "1.3.0", - "json-schema": "0.2.3", + "json-schema": "0.4.0", "verror": "1.10.0" } }, "keytar": { - "version": "7.7.0", - "resolved": "https://registry.npmjs.org/keytar/-/keytar-7.7.0.tgz", - "integrity": "sha512-YEY9HWqThQc5q5xbXbRwsZTh2PJ36OSYRjSv3NN2xf5s5dpLTjEZnC2YikR29OaVybf9nQ0dJ/80i40RS97t/A==", + "version": "7.9.0", + "resolved": "https://registry.npmjs.org/keytar/-/keytar-7.9.0.tgz", + "integrity": "sha512-VPD8mtVtm5JNtA2AErl6Chp06JBfy7diFQ7TQQhdpWOl6MrCRB+eRbvAZUsbGQS9kiMq0coJsy0W0vHpDCkWsQ==", "requires": { - "node-addon-api": "^3.0.0", - "prebuild-install": "^6.0.0" + "node-addon-api": "^4.3.0", + "prebuild-install": "^7.0.1" } }, "klaw": { "version": "1.3.1", "resolved": "https://registry.npmjs.org/klaw/-/klaw-1.3.1.tgz", - "integrity": "sha1-QIhDO0azsbolnXh4XY6W9zugJDk=", + "integrity": "sha512-TED5xi9gGQjGpNnvRWknrwAB1eL5GciPfVFOt3Vk1OJCVDQbzuSfrF3hkUQKlsgKrG1F+0t5W0m+Fje1jIt8rw==", "requires": { "graceful-fs": "^4.1.9" } @@ -883,7 +805,7 @@ "lcid": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/lcid/-/lcid-1.0.0.tgz", - "integrity": "sha1-MIrMr6C8SDo4Z7S28rlQYlHRuDU=", + "integrity": "sha512-YiGkH6EnGrDGqLMITnGjXtGmNtjoXw9SVUzcaos8RBi7Ps0VBylkq+vOcY9QE5poLasPCR849ucFUkl0UzUyOw==", "requires": { "invert-kv": "^1.0.0" } @@ -897,40 +819,40 @@ } }, "mime-db": { - "version": "1.49.0", - "resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.49.0.tgz", - "integrity": "sha512-CIc8j9URtOVApSFCQIF+VBkX1RwXp/oMMOrqdyXSBXq5RWNEsRfyj1kiRnQgmNXmHxPoFIxOroKA3zcU9P+nAA==" + "version": "1.52.0", + "resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.52.0.tgz", + "integrity": "sha512-sPU4uV7dYlvtWJxwwxHD0PuihVNiE7TyAbQ5SWxDCB9mUYvOgroQOwYQQOKPJ8CIbE+1ETVlOoK1UC2nU3gYvg==" }, "mime-types": { - "version": "2.1.32", - "resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.32.tgz", - "integrity": "sha512-hJGaVS4G4c9TSMYh2n6SQAGrC4RnfU+daP8G7cSCmaqNjiOoUY0VHCMS42pxnQmVF1GWwFhbHWn3RIxCqTmZ9A==", + "version": "2.1.35", + "resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.35.tgz", + "integrity": "sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw==", "requires": { - "mime-db": "1.49.0" + "mime-db": "1.52.0" } }, "mimic-response": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/mimic-response/-/mimic-response-2.1.0.tgz", - "integrity": "sha512-wXqjST+SLt7R009ySCglWBCFpjUygmCIfD790/kVbiGmUgfYGuB14PiTd5DwVxSV4NcYHjzMkoj5LjQZwTQLEA==" + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/mimic-response/-/mimic-response-3.1.0.tgz", + "integrity": "sha512-z0yWI+4FDrrweS8Zmt4Ej5HdJmky15+L2e6Wgn3+iK5fWzb6T3fhNFq2+MeTRb064c6Wr4N/wv0DzQTjNzHNGQ==" }, "minimatch": { - "version": "3.0.4", - "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.0.4.tgz", - "integrity": "sha512-yJHVQEhyqPLUTgt9B83PXu6W3rx4MvvHvSUvToogpwoGDOUQ+yDrR0HRot+yOCdCO7u4hX3pWft6kWBBcqh0UA==", + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz", + "integrity": "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==", "requires": { "brace-expansion": "^1.1.7" } }, "minimist": { - "version": "1.2.5", - "resolved": "https://registry.npmjs.org/minimist/-/minimist-1.2.5.tgz", - "integrity": "sha512-FM9nNUYrRBAELZQT3xeZQ7fmMOBg6nWNmJKTcgsJeaLstP/UODVpGsr5OhXhhXg6f+qtJ8uiZ+PUxkDWcgIXLw==" + "version": "1.2.6", + "resolved": "https://registry.npmjs.org/minimist/-/minimist-1.2.6.tgz", + "integrity": "sha512-Jsjnk4bw3YJqYzbdyBiNsPWHPfO++UGG749Cxs6peCu5Xg4nrena6OVxOYxrQTqww0Jmwt+Ref8rggumkTLz9Q==" }, "minipass": { - "version": "3.1.3", - "resolved": "https://registry.npmjs.org/minipass/-/minipass-3.1.3.tgz", - "integrity": "sha512-Mgd2GdMVzY+x3IJ+oHnVM+KG3lA5c8tnabyJKmHSaG2kAGpudxuOf8ToDkhumF7UzME7DecbQE9uOZhNm7PuJg==", + "version": "3.3.3", + "resolved": "https://registry.npmjs.org/minipass/-/minipass-3.3.3.tgz", + "integrity": "sha512-N0BOsdFAlNRfmwMhjAsLVWOk7Ljmeb39iqFlsV1At+jqRhSUP9yeof8FyJu4imaJiSUp8vQebWD/guZwGQC8iA==", "requires": { "yallist": "^4.0.0" } @@ -947,14 +869,14 @@ "mixto": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/mixto/-/mixto-1.0.0.tgz", - "integrity": "sha1-wyDvYbUvKJj1IuF9i7xtUG2EJbY=" + "integrity": "sha512-g2Kg8O3ww9RbWuPnAgTsAhe+aBwVXoo/lhYyDKTYPiLKdJofAr97O8zTFzW5UfiJUoeJbmXLmcjDAF7/Egwi8Q==" }, "mkdirp": { - "version": "0.5.5", - "resolved": "https://registry.npmjs.org/mkdirp/-/mkdirp-0.5.5.tgz", - "integrity": "sha512-NKmAlESf6jMGym1++R0Ra7wvhV+wFW63FaSOFPwRahvea0gMUcGUhVeAg/0BC0wiv9ih5NYPB1Wn1UEI1/L+xQ==", + "version": "0.5.6", + "resolved": "https://registry.npmjs.org/mkdirp/-/mkdirp-0.5.6.tgz", + "integrity": "sha512-FP+p8RB8OWpF3YZBCrP5gtADmtXApB5AMLn+vdyA+PyxCjrCs00mjyUozssO33cwDeT3wNGdLxJ5M//YqtHAJw==", "requires": { - "minimist": "^1.2.5" + "minimist": "^1.2.6" } }, "mkdirp-classic": { @@ -965,7 +887,7 @@ "mkpath": { "version": "0.1.0", "resolved": "https://registry.npmjs.org/mkpath/-/mkpath-0.1.0.tgz", - "integrity": "sha1-dVSm+Nhxg0zJe1RisSLEwSTW3pE=" + "integrity": "sha512-bauHShmaxVQiEvlrAPWxSPn8spSL8gDVRl11r8vLT4r/KdnknLqtqwQbToZ2Oa8sJkExYY1z6/d+X7pNiqo4yg==" }, "mksnapshot": { "version": "0.3.5", @@ -985,7 +907,7 @@ "mv": { "version": "2.1.1", "resolved": "https://registry.npmjs.org/mv/-/mv-2.1.1.tgz", - "integrity": "sha1-rmzg1vbV4KT32JN5jQPB6pVZtqI=", + "integrity": "sha512-at/ZndSy3xEGJ8i0ygALh8ru9qy7gWW1cmkaqBN29JmMlIvM//MEO9y1sk/avxuwnPcfhkejkLsuPxH81BrkSg==", "requires": { "mkdirp": "~0.5.1", "ncp": "~2.0.0", @@ -995,7 +917,7 @@ "glob": { "version": "6.0.4", "resolved": "https://registry.npmjs.org/glob/-/glob-6.0.4.tgz", - "integrity": "sha1-DwiGD2oVUSey+t1PnOJLGqtuTSI=", + "integrity": "sha512-MKZeRNyYZAVVVG1oZeLaWie1uweH40m9AZwIwxyPbTSX4hHrVYSzLg0Ro5Z5R7XKkIX+Cc6oD1rqeDJnwsB8/A==", "requires": { "inflight": "^1.0.4", "inherits": "2", @@ -1007,7 +929,7 @@ "rimraf": { "version": "2.4.5", "resolved": "https://registry.npmjs.org/rimraf/-/rimraf-2.4.5.tgz", - "integrity": "sha1-7nEM5dk6j9uFb7Xqj/Di11k0sto=", + "integrity": "sha512-J5xnxTyqaiw06JjMftq7L9ouA448dw/E7dKghkP9WpKNuwmARNNg+Gk8/u5ryb9N/Yo2+z3MCwuqFK/+qPOPfQ==", "requires": { "glob": "^6.0.1" } @@ -1015,9 +937,9 @@ } }, "nan": { - "version": "2.15.0", - "resolved": "https://registry.npmjs.org/nan/-/nan-2.15.0.tgz", - "integrity": "sha512-8ZtvEnA2c5aYCZYd1cvgdnU6cqwixRoYg70xPLWUws5ORTa/lnw+u4amixRS/Ac5U5mQVgp9pnlSUnbNWFaWZQ==" + "version": "2.16.0", + "resolved": "https://registry.npmjs.org/nan/-/nan-2.16.0.tgz", + "integrity": "sha512-UdAqHyFngu7TfQKsCBgAA6pWDkT8MAO7d0jyOecVhN5354xbLqdn8mV9Tat9gepAupm0bt2DbeaSC8vS52MuFA==" }, "napi-build-utils": { "version": "1.0.2", @@ -1027,45 +949,38 @@ "ncp": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/ncp/-/ncp-2.0.0.tgz", - "integrity": "sha1-GVoh1sRuNh0vsSgbo4uR6d9727M=" + "integrity": "sha512-zIdGUrPRFTUELUvr3Gmc7KZ2Sw/h1PiVM0Af/oHB6zgnV1ikqSfRk+TOufi79aHYCW3NiOXmr1BP5nWbzojLaA==" }, "next-tick": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/next-tick/-/next-tick-1.0.0.tgz", - "integrity": "sha1-yobR/ogoFpsBICCOPchCS524NCw=" + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/next-tick/-/next-tick-1.1.0.tgz", + "integrity": "sha512-CXdUiJembsNjuToQvxayPZF9Vqht7hewsvy2sOWafLvi2awflj9mOC6bHIg50orX8IJvWKY9wYQ/zB2kogPslQ==" }, "node-abi": { - "version": "2.30.0", - "resolved": "https://registry.npmjs.org/node-abi/-/node-abi-2.30.0.tgz", - "integrity": "sha512-g6bZh3YCKQRdwuO/tSZZYJAw622SjsRfJ2X0Iy4sSOHZ34/sPPdVBn8fev2tj7njzLwuqPw9uMtGsGkO5kIQvg==", + "version": "3.22.0", + "resolved": "https://registry.npmjs.org/node-abi/-/node-abi-3.22.0.tgz", + "integrity": "sha512-u4uAs/4Zzmp/jjsD9cyFYDXeISfUWaAVWshPmDZOFOv4Xl4SbzTXm53I04C2uRueYJ+0t5PEtLH/owbn2Npf/w==", "requires": { - "semver": "^5.4.1" - }, - "dependencies": { - "semver": { - "version": "5.7.1", - "resolved": "https://registry.npmjs.org/semver/-/semver-5.7.1.tgz", - "integrity": "sha512-sauaDf/PZdVgrLTNYHRtpXa1iRiKcaebiKQ1BJdpQlWH2lCvexQdX55snPFyK7QzpudqbCI0qXFfOasHdyNDGQ==" - } + "semver": "^7.3.5" } }, "node-addon-api": { - "version": "3.2.1", - "resolved": "https://registry.npmjs.org/node-addon-api/-/node-addon-api-3.2.1.tgz", - "integrity": "sha512-mmcei9JghVNDYydghQmeDX8KoAm0FAiYyIcUt/N4nhyAipB17pllZQDOJD2fotxABnt4Mdz+dKTO7eftLg4d0A==" + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/node-addon-api/-/node-addon-api-4.3.0.tgz", + "integrity": "sha512-73sE9+3UaLYYFmDsFZnqCInzPyh3MqIwZO9cw58yIqAZhONrrabrYyYe3TuIqtIiOuTXVhsGau8hcrhhwSsDIQ==" }, "nopt": { "version": "3.0.6", "resolved": "https://registry.npmjs.org/nopt/-/nopt-3.0.6.tgz", - "integrity": "sha1-xkZdvwirzU2zWTF/eaxopkayj/k=", + "integrity": "sha512-4GUt3kSEYmk4ITxzB/b9vaIDfUVWN/Ml1Fwl11IlnIG2iaJ9O6WXZ9SrYM9NLI8OCBieN2Y8SWC2oJV0RQ7qYg==", "requires": { "abbrev": "1" } }, "npm": { - "version": "6.14.14", - "resolved": "https://registry.npmjs.org/npm/-/npm-6.14.14.tgz", - "integrity": "sha512-4TnYPV3rvwk9m92ON0iFrdXtRyMhrNkGdI8sr3dy1HVE3pVNxR9FyWYbjAw/HjfmHvmHUbxTVbmjAMv8NhIa6Q==", + "version": "6.14.17", + "resolved": "https://registry.npmjs.org/npm/-/npm-6.14.17.tgz", + "integrity": "sha512-CxEDn1ydVRPDl4tHrlnq+WevYAhv4GF2AEHzJKQ4prZDZ96IS3Uo6t0Sy6O9kB6XzqkI+J00WfYCqqk0p6IJ1Q==", "requires": { "JSONStream": "^1.3.5", "abbrev": "~1.1.1", @@ -1176,7 +1091,7 @@ "sorted-union-stream": "~2.1.3", "ssri": "^6.0.2", "stringify-package": "^1.0.1", - "tar": "^4.4.15", + "tar": "^4.4.19", "text-table": "~0.2.0", "tiny-relative-date": "^1.3.0", "uid-number": "0.0.6", @@ -1466,7 +1381,7 @@ }, "dependencies": { "ansi-regex": { - "version": "4.1.0", + "version": "4.1.1", "bundled": true }, "is-fullwidth-code-point": { @@ -2439,7 +2354,7 @@ "bundled": true }, "json-schema": { - "version": "0.2.3", + "version": "0.4.0", "bundled": true }, "json-stringify-safe": { @@ -2451,12 +2366,12 @@ "bundled": true }, "jsprim": { - "version": "1.4.1", + "version": "1.4.2", "bundled": true, "requires": { "assert-plus": "1.0.0", "extsprintf": "1.3.0", - "json-schema": "0.2.3", + "json-schema": "0.4.0", "verror": "1.10.0" } }, @@ -2772,7 +2687,7 @@ } }, "minimist": { - "version": "1.2.5", + "version": "1.2.6", "bundled": true }, "minizlib": { @@ -2816,7 +2731,7 @@ }, "dependencies": { "minimist": { - "version": "1.2.5", + "version": "1.2.6", "bundled": true } } @@ -3184,7 +3099,7 @@ "bundled": true }, "path-parse": { - "version": "1.0.6", + "version": "1.0.7", "bundled": true }, "performance-now": { @@ -3735,16 +3650,16 @@ } }, "tar": { - "version": "4.4.15", + "version": "4.4.19", "bundled": true, "requires": { - "chownr": "^1.1.1", - "fs-minipass": "^1.2.5", - "minipass": "^2.8.6", - "minizlib": "^1.2.1", - "mkdirp": "^0.5.0", - "safe-buffer": "^5.1.2", - "yallist": "^3.0.3" + "chownr": "^1.1.4", + "fs-minipass": "^1.2.7", + "minipass": "^2.9.0", + "minizlib": "^1.3.3", + "mkdirp": "^0.5.5", + "safe-buffer": "^5.2.1", + "yallist": "^3.1.1" }, "dependencies": { "minipass": { @@ -3754,6 +3669,14 @@ "safe-buffer": "^5.1.2", "yallist": "^3.0.0" } + }, + "safe-buffer": { + "version": "5.2.1", + "bundled": true + }, + "yallist": { + "version": "3.1.1", + "bundled": true } } }, @@ -4010,7 +3933,7 @@ }, "dependencies": { "ansi-regex": { - "version": "4.1.0", + "version": "4.1.1", "bundled": true }, "is-fullwidth-code-point": { @@ -4156,36 +4079,20 @@ } } }, - "npmlog": { - "version": "4.1.2", - "resolved": "https://registry.npmjs.org/npmlog/-/npmlog-4.1.2.tgz", - "integrity": "sha512-2uUqazuKlTaSI/dC8AzicUck7+IrEaOnN/e0jd3Xtt1KcGpwx30v50mL7oPyr/h9bL3E4aZccVwpwP+5W9Vjkg==", - "requires": { - "are-we-there-yet": "~1.1.2", - "console-control-strings": "~1.1.0", - "gauge": "~2.7.3", - "set-blocking": "~2.0.0" - } - }, "number-is-nan": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/number-is-nan/-/number-is-nan-1.0.1.tgz", - "integrity": "sha1-CXtgK1NCKlIsGvuHkDGDNpQaAR0=" + "integrity": "sha512-4jbtZXNAsfZbAHiiqjLPBiCl16dES1zI4Hpzzxw61Tk+loF+sBDBKx1ICKKKwIqQ7M0mFn1TmkN7euSncWgHiQ==" }, "oauth-sign": { "version": "0.9.0", "resolved": "https://registry.npmjs.org/oauth-sign/-/oauth-sign-0.9.0.tgz", "integrity": "sha512-fexhUFFPTGV8ybAtSIGbV6gOkSv8UtRbDBnAyLQw4QPKkgNlsH2ByPGtMUqdWkos6YCRmAqViwgZrJc/mRDzZQ==" }, - "object-assign": { - "version": "4.1.1", - "resolved": "https://registry.npmjs.org/object-assign/-/object-assign-4.1.1.tgz", - "integrity": "sha1-IQmtx5ZYh8/AXLvUQsrIv7s2CGM=" - }, "once": { "version": "1.4.0", "resolved": "https://registry.npmjs.org/once/-/once-1.4.0.tgz", - "integrity": "sha1-WDsap3WWHUsROsF9nFC6753Xa9E=", + "integrity": "sha512-lNaJgI+2Q5URQBkccEKHTQOPaXdUxnZZElQTZY0MFUAuaEqe1E+Nyvgdz/aIyNi6Z9MzO5dv1H8n58/GELp3+w==", "requires": { "wrappy": "1" } @@ -4210,7 +4117,7 @@ "os-locale": { "version": "1.4.0", "resolved": "https://registry.npmjs.org/os-locale/-/os-locale-1.4.0.tgz", - "integrity": "sha1-IPnxeuKe00XoveWDsT0gCYA8FNk=", + "integrity": "sha512-PRT7ZORmwu2MEFt4/fv3Q+mEfN4zetKxufQrkShY2oGvUms9r8otu5HfdyIFHkYXjO7laNsoVGmM2MANfuTA8g==", "requires": { "lcid": "^1.0.0" } @@ -4218,47 +4125,41 @@ "os-tmpdir": { "version": "1.0.2", "resolved": "https://registry.npmjs.org/os-tmpdir/-/os-tmpdir-1.0.2.tgz", - "integrity": "sha1-u+Z0BseaqFxc/sdm/lc0VV36EnQ=" + "integrity": "sha512-D2FR03Vir7FIu45XBY20mTb+/ZSWB00sjU9jdQXt83gDrI4Ztz5Fs7/yy74g2N5SVQY4xY1qDr4rNddwYRVX0g==" }, "path-is-absolute": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/path-is-absolute/-/path-is-absolute-1.0.1.tgz", - "integrity": "sha1-F0uSaHNVNP+8es5r9TpanhtcX18=" + "integrity": "sha512-AVbw3UJ2e9bq64vSaS9Am0fje1Pa8pbGqTTsmXfaIiMpnr5DlDhfJOuLj9Sf95ZPVDAUerDfEk88MPmPe7UCQg==" }, "performance-now": { "version": "2.1.0", "resolved": "https://registry.npmjs.org/performance-now/-/performance-now-2.1.0.tgz", - "integrity": "sha1-Ywn04OX6kT7BxpMHrjZLSzd8nns=" + "integrity": "sha512-7EAHlyLHI56VEIdK57uwHdHKIaAGbnXPiw0yWbarQZOKaKpvUIgW0jWRVLiatnM+XXlSwsanIBH/hzGMJulMow==" }, "prebuild-install": { - "version": "6.1.4", - "resolved": "https://registry.npmjs.org/prebuild-install/-/prebuild-install-6.1.4.tgz", - "integrity": "sha512-Z4vpywnK1lBg+zdPCVCsKq0xO66eEV9rWo2zrROGGiRS4JtueBOdlB1FnY8lcy7JsUud/Q3ijUxyWN26Ika0vQ==", + "version": "7.1.1", + "resolved": "https://registry.npmjs.org/prebuild-install/-/prebuild-install-7.1.1.tgz", + "integrity": "sha512-jAXscXWMcCK8GgCoHOfIr0ODh5ai8mj63L2nWrjuAgXE6tDyYGnx4/8o/rCgU+B4JSyZBKbeZqzhtwtC3ovxjw==", "requires": { - "detect-libc": "^1.0.3", + "detect-libc": "^2.0.0", "expand-template": "^2.0.3", "github-from-package": "0.0.0", "minimist": "^1.2.3", "mkdirp-classic": "^0.5.3", "napi-build-utils": "^1.0.1", - "node-abi": "^2.21.0", - "npmlog": "^4.0.1", + "node-abi": "^3.3.0", "pump": "^3.0.0", "rc": "^1.2.7", - "simple-get": "^3.0.3", + "simple-get": "^4.0.0", "tar-fs": "^2.0.0", "tunnel-agent": "^0.6.0" } }, - "process-nextick-args": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/process-nextick-args/-/process-nextick-args-2.0.1.tgz", - "integrity": "sha512-3ouUOpQhtgrbOa17J7+uxOTpITYWaGP7/AhoR3+A+/1e9skrzelGi/dXzEYyvbxubEF6Wn2ypscTKiKJFFn1ag==" - }, "property-accessors": { "version": "1.1.3", "resolved": "https://registry.npmjs.org/property-accessors/-/property-accessors-1.1.3.tgz", - "integrity": "sha1-Hd6EAkYxhlkJ7zBwM2VoDF+SixU=", + "integrity": "sha512-WQTVW7rn+k6wq8FyYVM15afyoB2loEdeIzd/o7+HEA5hMZcxvRf4Khie0fBM9wLP3EJotKhiH15kY7Dd4gc57g==", "requires": { "es6-weak-map": "^0.1.2", "mixto": "1.x" @@ -4286,12 +4187,12 @@ "q": { "version": "1.5.1", "resolved": "https://registry.npmjs.org/q/-/q-1.5.1.tgz", - "integrity": "sha1-fjL3W0E4EpHQRhHxvxQQmsAGUdc=" + "integrity": "sha512-kV/CThkXo6xyFEZUugw/+pIOywXcDbFYgSct5cT3gqlbkBE1SJdwy6UQoZvodiWF/ckQLZyDE/Bu1M6gVu5lVw==" }, "qs": { - "version": "6.5.2", - "resolved": "https://registry.npmjs.org/qs/-/qs-6.5.2.tgz", - "integrity": "sha512-N5ZAX4/LxJmF+7wN74pUD6qAh9/wnvdQcjq9TZjevvXzSUo7bfmw91saqMjzGS2xq91/odN2dW/WOl7qQHNDGA==" + "version": "6.5.3", + "resolved": "https://registry.npmjs.org/qs/-/qs-6.5.3.tgz", + "integrity": "sha512-qxXIEh4pCGfHICj1mAJQ2/2XVZkjCDTcEgfoSQxc/fYivUZxTkk7L3bDBJSoNrEzXI17oUO5Dp07ktqE5KzczA==" }, "rc": { "version": "1.2.8", @@ -4307,7 +4208,7 @@ "read": { "version": "1.0.7", "resolved": "https://registry.npmjs.org/read/-/read-1.0.7.tgz", - "integrity": "sha1-s9oZvQUkMal2cdRKQmNK33ELQMQ=", + "integrity": "sha512-rSOKNYUmaxy0om1BNjMN4ezNT6VKK+2xF4GBhc81mkH7L60i6dp8qPYrkndNLT3QPphoII3maL9PVC9XmhHwVQ==", "requires": { "mute-stream": "~0.0.4" } @@ -4315,7 +4216,7 @@ "readable-stream": { "version": "1.1.14", "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-1.1.14.tgz", - "integrity": "sha1-fPTFTvZI44EwhMY23SB54WbAgdk=", + "integrity": "sha512-+MeVjFf4L44XUkhM1eYbD8fyEsxcV81pqMSR5gblfcLCHfZvbrqy4/qYHE+/R5HoBUT11WV5O08Cr1n3YXkWVQ==", "requires": { "core-util-is": "~1.0.0", "inherits": "~2.0.1", @@ -4371,7 +4272,7 @@ "season": { "version": "6.0.2", "resolved": "https://registry.npmjs.org/season/-/season-6.0.2.tgz", - "integrity": "sha1-naWPsd3SSCTXYhstxjpxI7UCF7Y=", + "integrity": "sha512-5eq1ZKvsIUTkefE/R6PhJyiDDaalPjmdhUPVMuOFh4Yz2n5pBl1COkzNlxQyI8BXEBEIu1nJeJqJPVD0c3vycQ==", "requires": { "cson-parser": "^1.3.0", "fs-plus": "^3.0.0", @@ -4379,42 +4280,32 @@ } }, "semver": { - "version": "7.3.5", - "resolved": "https://registry.npmjs.org/semver/-/semver-7.3.5.tgz", - "integrity": "sha512-PoeGJYh8HK4BTO/a9Tf6ZG3veo/A7ZVsYrSA6J8ny9nb3B1VrpkuN+z9OE5wfE5p6H4LchYZsegiQgbJD94ZFQ==", + "version": "7.3.7", + "resolved": "https://registry.npmjs.org/semver/-/semver-7.3.7.tgz", + "integrity": "sha512-QlYTucUYOews+WeEujDoEGziz4K6c47V/Bd+LjSSYcA94p+DmINdf7ncaUinThfvZyu13lN9OY1XDxt8C0Tw0g==", "requires": { "lru-cache": "^6.0.0" } }, - "set-blocking": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/set-blocking/-/set-blocking-2.0.0.tgz", - "integrity": "sha1-BF+XgtARrppoA93TgrJDkrPYkPc=" - }, - "signal-exit": { - "version": "3.0.3", - "resolved": "https://registry.npmjs.org/signal-exit/-/signal-exit-3.0.3.tgz", - "integrity": "sha512-VUJ49FC8U1OxwZLxIbTTrDvLnf/6TDgxZcK8wxR8zs13xpx7xbG60ndBlhNrFi2EMuFRoeDoJO7wthSLq42EjA==" - }, "simple-concat": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/simple-concat/-/simple-concat-1.0.1.tgz", "integrity": "sha512-cSFtAPtRhljv69IK0hTVZQ+OfE9nePi/rtJmw5UjHeVyVroEqJXP1sFztKUy1qU+xvz3u/sfYJLa947b7nAN2Q==" }, "simple-get": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/simple-get/-/simple-get-3.1.0.tgz", - "integrity": "sha512-bCR6cP+aTdScaQCnQKbPKtJOKDp/hj9EDLJo3Nw4y1QksqaovlW/bnptB6/c1e+qmNIDHRK+oXFDdEqBT8WzUA==", + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/simple-get/-/simple-get-4.0.1.tgz", + "integrity": "sha512-brv7p5WgH0jmQJr1ZDDfKDOSeWWg+OVypG99A/5vYGPqJ6pxiaHLy8nxtFjBA7oMa01ebA9gfh1uMCFqOuXxvA==", "requires": { - "decompress-response": "^4.2.0", + "decompress-response": "^6.0.0", "once": "^1.3.1", "simple-concat": "^1.0.0" } }, "sshpk": { - "version": "1.16.1", - "resolved": "https://registry.npmjs.org/sshpk/-/sshpk-1.16.1.tgz", - "integrity": "sha512-HXXqVUq7+pcKeLqqZj6mHFUMvXtOJt1uoUx09pFW6011inTMxqI8BA8PM95myrIyyKwdnzjdFjLiE6KBPVtJIg==", + "version": "1.17.0", + "resolved": "https://registry.npmjs.org/sshpk/-/sshpk-1.17.0.tgz", + "integrity": "sha512-/9HIEs1ZXGhSPE8X6Ccm7Nam1z8KcoCqPdI7ecm1N33EzAetWahvQWVqLZtaZQ+IDKX4IyA2o0gBzqIMkAagHQ==", "requires": { "asn1": "~0.2.3", "assert-plus": "^1.0.0", @@ -4430,7 +4321,7 @@ "string-width": { "version": "1.0.2", "resolved": "https://registry.npmjs.org/string-width/-/string-width-1.0.2.tgz", - "integrity": "sha1-EYvfW4zcUaKn5w0hHgfisLmxB9M=", + "integrity": "sha512-0XsVpQLnVCXHJfyEs8tC0zpTVIr5PKKsQtkT29IwupnPTjtPmQ3xT/4yCREF9hYkV/3M3kzcUTSAZT6a6h81tw==", "requires": { "code-point-at": "^1.0.0", "is-fullwidth-code-point": "^1.0.0", @@ -4440,12 +4331,12 @@ "string_decoder": { "version": "0.10.31", "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-0.10.31.tgz", - "integrity": "sha1-YuIDvEF2bGwoyfyEMB2rHFMQ+pQ=" + "integrity": "sha512-ev2QzSzWPYmy9GuqfIVildA4OdcGLeFZQrq5ys6RtiuF+RQQiZWr8TZNyAcuVXyQRYfEO+MsoB/1BuQVhOJuoQ==" }, "strip-ansi": { "version": "3.0.1", "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-3.0.1.tgz", - "integrity": "sha1-ajhfuIU9lS1f8F0Oiq+UJ43GPc8=", + "integrity": "sha512-VhumSSbBqDTP8p2ZLKj40UjBCV4+v8bUSEpUb4KjRgWk9pbqGF4REFj6KEagidb2f/M6AzC0EmFyDNGaw9OCzg==", "requires": { "ansi-regex": "^2.0.0" } @@ -4453,12 +4344,12 @@ "strip-json-comments": { "version": "2.0.1", "resolved": "https://registry.npmjs.org/strip-json-comments/-/strip-json-comments-2.0.1.tgz", - "integrity": "sha1-PFMZQukIwml8DsNEhYwobHygpgo=" + "integrity": "sha512-4gB8na07fecVVkOI6Rs4e7T6NOTki5EmL7TUduTs6bu3EdnSycntVJ4re8kgZA+wx9IueI2Y11bfbgwtzuE0KQ==" }, "tar": { - "version": "6.1.8", - "resolved": "https://registry.npmjs.org/tar/-/tar-6.1.8.tgz", - "integrity": "sha512-sb9b0cp855NbkMJcskdSYA7b11Q8JsX4qe4pyUAfHp+Y6jBjJeek2ZVlwEfWayshEIwlIzXx0Fain3QG9JPm2A==", + "version": "6.1.11", + "resolved": "https://registry.npmjs.org/tar/-/tar-6.1.11.tgz", + "integrity": "sha512-an/KZQzQUkZCkuoAA64hM92X0Urb6VpRhAFllDzz44U2mcD5scmT3zBc4VgVpkugF580+DQn8eAFSyoQt0tznA==", "requires": { "chownr": "^2.0.0", "fs-minipass": "^2.0.0", @@ -4545,7 +4436,7 @@ "tmp": { "version": "0.0.28", "resolved": "https://registry.npmjs.org/tmp/-/tmp-0.0.28.tgz", - "integrity": "sha1-Fyc1t/YU6nrzlmT6hM8N5OUV0SA=", + "integrity": "sha512-c2mmfiBmND6SOVxzogm1oda0OJ1HZVIk/5n26N59dDTh80MUeavpiCls4PGAdkX1PFkKokLpcf7prSjCeXLsJg==", "requires": { "os-tmpdir": "~1.0.1" } @@ -4553,7 +4444,7 @@ "touch": { "version": "0.0.3", "resolved": "https://registry.npmjs.org/touch/-/touch-0.0.3.tgz", - "integrity": "sha1-Ua7z1ElXHU8oel2Hyci0kYGg2x0=", + "integrity": "sha512-/LQ54KM9rPf3rGXGo2UPQWx3ol242Zg6Whq27H5DEmZhCJo+pm9N5BzRGepO9vTVhYxpXJdcc1+3uaYt9NyeKg==", "requires": { "nopt": "~1.0.10" }, @@ -4561,7 +4452,7 @@ "nopt": { "version": "1.0.10", "resolved": "https://registry.npmjs.org/nopt/-/nopt-1.0.10.tgz", - "integrity": "sha1-bd0hvSoxQXuScn3Vhfim83YI6+4=", + "integrity": "sha512-NWmpvLSqUrgrAC9HCuxEvb+PSloHpqVu+FqcO4eeF2h5qYRhA7ev6KvelyQAKtegUbC6RypJnlEOhd8vloNKYg==", "requires": { "abbrev": "1" } @@ -4580,12 +4471,12 @@ "traverse": { "version": "0.3.9", "resolved": "https://registry.npmjs.org/traverse/-/traverse-0.3.9.tgz", - "integrity": "sha1-cXuPIgzAu3tE5AUUwisui7xw2Lk=" + "integrity": "sha512-iawgk0hLP3SxGKDfnDJf8wTz4p2qImnyihM5Hh/sGvQ3K37dPi/w8sRhdNIxYA1TwFwc5mDhIJq+O0RsvXBKdQ==" }, "tunnel-agent": { "version": "0.6.0", "resolved": "https://registry.npmjs.org/tunnel-agent/-/tunnel-agent-0.6.0.tgz", - "integrity": "sha1-J6XeoGs2sEoKmWZ3SykIaPD8QP0=", + "integrity": "sha512-McnNiV1l8RYeY8tBgEpuodCC1mLUdbSN+CYBL7kJsJNInOP8UjDDEwdk6Mw60vdLLrr5NHKZhMAOSrR2NZuQ+w==", "requires": { "safe-buffer": "^5.0.1" } @@ -4593,7 +4484,7 @@ "tweetnacl": { "version": "0.14.5", "resolved": "https://registry.npmjs.org/tweetnacl/-/tweetnacl-0.14.5.tgz", - "integrity": "sha1-WuaBd/GS1EViadEIr6k/+HQ/T2Q=" + "integrity": "sha512-KXXFFdAbFXY4geFIwoyNK+f5Z1b7swfXABfL7HXCmoIWMKU3dmS26672A4EeQtDzLKy7SXmfBu51JolvEKwtGA==" }, "type": { "version": "1.2.0", @@ -4601,9 +4492,9 @@ "integrity": "sha512-+5nt5AAniqsCnu2cEQQdpzCAh33kVx8n0VoFidKpB1dVVLAN/F+bgVOqOJqOnEnrhp222clB5p3vUlD+1QAnfg==" }, "underscore": { - "version": "1.13.1", - "resolved": "https://registry.npmjs.org/underscore/-/underscore-1.13.1.tgz", - "integrity": "sha512-hzSoAVtJF+3ZtiFX0VgfFPHEDRm7Y/QPjGyNo4TVdnDTdft3tr8hEkD25a1jC+TjTuE7tkHGKkhwCgs9dgBB2g==" + "version": "1.13.4", + "resolved": "https://registry.npmjs.org/underscore/-/underscore-1.13.4.tgz", + "integrity": "sha512-BQFnUDuAQ4Yf/cYY5LNrK9NCJFKriaRbD9uR1fTeXnBeoa97W0i41qkZfGO9pSo8I5KzjAcSY2XYtdf0oKd7KQ==" }, "underscore-plus": { "version": "1.7.0", @@ -4624,7 +4515,7 @@ "util-deprecate": { "version": "1.0.2", "resolved": "https://registry.npmjs.org/util-deprecate/-/util-deprecate-1.0.2.tgz", - "integrity": "sha1-RQ1Nyfpw3nMnYvvS1KKJgUGaDM8=" + "integrity": "sha512-EPD5q1uXyFxJpCrLnCc1nHnq3gOa6DZBocAIiI2TaSCA7VCJ1UJDMagCzIkXNsUYfD1daK//LTEQ8xiIbrHtcw==" }, "uuid": { "version": "3.4.0", @@ -4634,35 +4525,34 @@ "verror": { "version": "1.10.0", "resolved": "https://registry.npmjs.org/verror/-/verror-1.10.0.tgz", - "integrity": "sha1-OhBcoXBTr1XW4nDB+CiGguGNpAA=", + "integrity": "sha512-ZZKSmDAEFOijERBLkmYfJ+vmk3w+7hOLYDNkRCuRuMJGEmqYNCNLyBBFwWKVMhfwaEF3WOd0Zlw86U/WC/+nYw==", "requires": { "assert-plus": "^1.0.0", "core-util-is": "1.0.2", "extsprintf": "^1.2.0" - } - }, - "wide-align": { - "version": "1.1.3", - "resolved": "https://registry.npmjs.org/wide-align/-/wide-align-1.1.3.tgz", - "integrity": "sha512-QGkOQc8XL6Bt5PwnsExKBPuMKBxnGxWWW3fU55Xt4feHozMUhdUMaBCk290qpm/wG5u/RSKzwdAC4i51YigihA==", - "requires": { - "string-width": "^1.0.2 || 2" + }, + "dependencies": { + "core-util-is": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/core-util-is/-/core-util-is-1.0.2.tgz", + "integrity": "sha512-3lqz5YjWTYnW6dlDa5TLaTCcShfar1e40rmcJVwCBJC6mWlFuj0eCHIElmG1g5kyuJ/GD+8Wn4FFCcz4gJPfaQ==" + } } }, "window-size": { "version": "0.1.4", "resolved": "https://registry.npmjs.org/window-size/-/window-size-0.1.4.tgz", - "integrity": "sha1-+OGqHuWlPsW/FR/6CXQqatdpeHY=" + "integrity": "sha512-2thx4pB0cV3h+Bw7QmMXcEbdmOzv9t0HFplJH/Lz6yu60hXYy5RT8rUu+wlIreVxWsGN20mo+MHeCSfUpQBwPw==" }, "wordwrap": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/wordwrap/-/wordwrap-1.0.0.tgz", - "integrity": "sha1-J1hIEIkUVqQXHI0CJkQa3pDLyus=" + "integrity": "sha512-gvVzJFlPycKc5dZN4yPkP8w7Dc37BtP1yczEneOb4uq34pXZcvrtRTmWV8W+Ume+XCxKgbjM+nevkyFPMybd4Q==" }, "wrap-ansi": { "version": "2.1.0", "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-2.1.0.tgz", - "integrity": "sha1-2Pw9KE3QV5T+hJc8rs3Rz4JP3YU=", + "integrity": "sha512-vAaEaDM946gbNpH5pLVNR+vX2ht6n0Bt3GXwVB1AuAqZosOvHNF3P7wDnh8KLkSqgUh0uh77le7Owgoz+Z9XBw==", "requires": { "string-width": "^1.0.1", "strip-ansi": "^3.0.1" @@ -4671,17 +4561,17 @@ "wrappy": { "version": "1.0.2", "resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz", - "integrity": "sha1-tSQ9jz7BqjXxNkYFvA0QNuMKtp8=" + "integrity": "sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ==" }, "wrench": { "version": "1.5.9", "resolved": "https://registry.npmjs.org/wrench/-/wrench-1.5.9.tgz", - "integrity": "sha1-QRaRxjqbJTGxcAJnJ5veyiOyFCo=" + "integrity": "sha512-QH+8W9n0UGDAxnRDOkQzG1N277GTaBgMDNdckluqnAY773njfs1gfo867IbMMbGjOZZof+zlRIUeQ9XN8VUHUQ==" }, "xmlbuilder": { "version": "0.4.3", "resolved": "https://registry.npmjs.org/xmlbuilder/-/xmlbuilder-0.4.3.tgz", - "integrity": "sha1-xGFLp04K0ZbmCcknLNnh3bKKilg=" + "integrity": "sha512-t3QW+VdXvxcy214Wf5Mvb+38RPW6EUG1RpMjjtG+esbAFh+/50PdXz1iGywefNl70DW2ucNWTXBw5buTgzDWyw==" }, "xmldom": { "version": "0.1.31", @@ -4701,7 +4591,7 @@ "yargs": { "version": "3.32.0", "resolved": "https://registry.npmjs.org/yargs/-/yargs-3.32.0.tgz", - "integrity": "sha1-AwiOnr+edWtpdRYR0qXvWRSCyZU=", + "integrity": "sha512-ONJZiimStfZzhKamYvR/xvmgW3uEkAUFSP91y2caTEPhzF6uP2JfPiVZcq66b/YR0C3uitxSV7+T1x8p5bkmMg==", "requires": { "camelcase": "^2.0.1", "cliui": "^3.0.3", diff --git a/package-lock.json b/package-lock.json index aad5bc19c..5a7fed122 100644 --- a/package-lock.json +++ b/package-lock.json @@ -1,6 +1,6 @@ { "name": "atom", - "version": "1.61.0-dev", + "version": "1.63.0-dev", "lockfileVersion": 1, "requires": true, "dependencies": { @@ -1615,7 +1615,7 @@ "atom-grammar-test": { "version": "0.6.4", "resolved": "https://registry.npmjs.org/atom-grammar-test/-/atom-grammar-test-0.6.4.tgz", - "integrity": "sha1-2KU1A9H+k5mX9Ji3SirDEARKfU4=", + "integrity": "sha512-oVd4NmzM95nnb/CSPuyO/YlwbhRN7rpv3UTnc5btj9RSJaI7r6GzJoFYpIjOwBviGnWHuMoZxyupEn2VXbGbZw==", "requires": { "chevrotain": "^0.18.0", "escape-string-regexp": "^1.0.5" @@ -2271,7 +2271,7 @@ "chevrotain": { "version": "0.18.0", "resolved": "https://registry.npmjs.org/chevrotain/-/chevrotain-0.18.0.tgz", - "integrity": "sha1-sodxTjFZC64sXR4vYRZz7+xHnYA=" + "integrity": "sha512-TdMrwmrzGuLta1iwC7yuDC7T3IQrI2WHjzbkY3naTX6RsQivy9aXBrTc+LuUbrs4mdefrwVHBVLW6ojn5giTZw==" }, "chownr": { "version": "1.1.3", @@ -4754,16 +4754,13 @@ } }, "language-clojure": { - "version": "https://www.atom.io/api/packages/language-clojure/versions/0.22.8/tarball", - "integrity": "sha512-V9tDXCuZf53Esy3W1eUuaZW2Dq78n3KdPWkypfz3pJJ1bklgLgCWxBUGjLAY4X/ULgYjucnelhp71xwLjZZa5A==" + "version": "file:packages/language-clojure" }, "language-coffee-script": { - "version": "https://www.atom.io/api/packages/language-coffee-script/versions/0.50.0/tarball", - "integrity": "sha512-Sp3b1i8wsd+AELphP2f52mli4C3YjicGC8ps21g48V3SrTZoM7tLE7lkcEdKddYlTqo0fBixTKN2R/iL6GcEVw==" + "version": "file:packages/language-coffee-script" }, "language-csharp": { - "version": "https://www.atom.io/api/packages/language-csharp/versions/1.1.0/tarball", - "integrity": "sha512-37RDzLiqSHJjopFg3T/Sqntx793OMQ4o7tkf3IuKe68Mshz3mwavJhIaIX/mca+0u/aWquySJFzlZ4D8EdtRSA==" + "version": "file:packages/language-csharp" }, "language-css": { "version": "https://www.atom.io/api/packages/language-css/versions/0.45.1/tarball", @@ -4773,12 +4770,10 @@ } }, "language-gfm": { - "version": "https://www.atom.io/api/packages/language-gfm/versions/0.90.8/tarball", - "integrity": "sha512-YQ13ypnfPvQTcZ/8j6cUuLsYBoU88qqPlFTRXNXa72L1HVaahFDgG0d0a/QOdOnxrYBtmEWR/5Q3FNPwPpSehw==" + "version": "file:packages/language-gfm" }, "language-git": { - "version": "https://www.atom.io/api/packages/language-git/versions/0.19.1/tarball", - "integrity": "sha512-xvsGO/d3/XsKJmwdAz9VGHo6t7A13VuJeuEoZaoLmvzwkVpFdpJcK8PNwVMPHav+lpNeu73qiXmqS+YIlvLwLQ==" + "version": "file:packages/language-git" }, "language-go": { "version": "https://www.atom.io/api/packages/language-go/versions/0.47.2/tarball", @@ -4788,8 +4783,7 @@ } }, "language-html": { - "version": "https://www.atom.io/api/packages/language-html/versions/0.53.1/tarball", - "integrity": "sha512-/GFk8qHnQ67E/+RZs1my117VKPIAsfUNrDg+7EU+HlCx8qnEnV7lBRaWedh0AoDDGtaMm2wmuhTM/1eGNcDJ8Q==", + "version": "file:packages/language-html", "requires": { "atom-grammar-test": "^0.6.3", "tree-sitter-embedded-template": "^0.15.2", @@ -4797,12 +4791,10 @@ } }, "language-hyperlink": { - "version": "https://www.atom.io/api/packages/language-hyperlink/versions/0.17.1/tarball", - "integrity": "sha512-bntgT5AVqSbWZpjjiGbKVfzjocWHgDLbfAnECKkk87owjlMeuzbZaylI+HRdbVxPMt9K1UdFRVT/NUaia+A3+g==" + "version": "file:packages/language-hyperlink" }, "language-java": { - "version": "https://www.atom.io/api/packages/language-java/versions/0.32.1/tarball", - "integrity": "sha512-CzS8Tr2uL93SElx/P6eZCDbxnGdBq9EBimFezXWWop+IgmYPNaNFS3d2kFUXgSNY3bvNV9ezpR7xSIZteFpisQ==", + "version": "file:packages/language-java", "requires": { "tree-sitter-java-dev": "^0.16.0-dev2" } @@ -4817,58 +4809,47 @@ } }, "language-json": { - "version": "https://www.atom.io/api/packages/language-json/versions/1.0.5/tarball", - "integrity": "sha512-n4kpZ0Z3Yju2qnqoGvYXgQJF2HdR21qlrLrZ66CmsAPI7Ttw0xgXbVHBNHaHIWlH3lQT30p472cNsYlQl3pdNA==", + "version": "file:packages/language-json", "requires": { "tree-sitter-json": "^0.15.1" } }, "language-less": { - "version": "https://www.atom.io/api/packages/language-less/versions/0.34.3/tarball", - "integrity": "sha512-x1sDaJKCIQuLufevH9dt9XET3zfKaXudF1RMq05D9OpQBnhi34qRlG/jgI1khykOUn/NuhSsb5ZJtixj0oy+bA==" + "version": "file:packages/language-less" }, "language-make": { - "version": "https://www.atom.io/api/packages/language-make/versions/0.23.0/tarball", - "integrity": "sha512-kNY6n/0eTu6398rIQHwaXC1+Rsq9a3TZrMd+KVNPoJJh33GnMocjPxEempZ6jAOL5fa+hxb8ESiUOcQlEm9hyA==" + "version": "file:packages/language-make" }, "language-mustache": { - "version": "https://www.atom.io/api/packages/language-mustache/versions/0.14.5/tarball", - "integrity": "sha512-1aC1OAoYye+krEJ8t5RzXiLYTEA/RJ/Igv1efDsuxvZHnIkdrSDzS/UsssS3snqPkIGyLI+htRvU/v11famx6A==" + "version": "file:packages/language-mustache" }, "language-objective-c": { - "version": "https://www.atom.io/api/packages/language-objective-c/versions/0.16.0/tarball", - "integrity": "sha512-KFkmXxNuTL2zwL8mfIF9PovRaWUOu/rWPp/fDjSgXPgClXUWeJdZQystXODr6u7kvGYEAdmjYFj/zQu7f/P85Q==" + "version": "file:packages/language-objective-c" }, "language-perl": { - "version": "https://www.atom.io/api/packages/language-perl/versions/0.38.1/tarball", - "integrity": "sha512-XXHULyFvbxAiRoj+MxIXoeO//in3bQctHZbaD72p3vFxm3klxe2ebx7b3cFmFYqf/g0eajmLrR3tR5m1Rmz1XQ==" + "version": "file:packages/language-perl" }, "language-php": { - "version": "https://www.atom.io/api/packages/language-php/versions/0.48.1/tarball", - "integrity": "sha512-E943QBnu4Z9PVEHdXVeY/XSWVopYnoB1Pr43RJHX3r9Xnd/slFNamBzVTIHazMqGM/33PSjaNmpeQFEfgRtHwQ==" + "version": "file:packages/language-php" }, "language-property-list": { - "version": "https://www.atom.io/api/packages/language-property-list/versions/0.9.1/tarball", - "integrity": "sha512-HD6HI41u57i0/Tu9catiriURhJsef0RDrzJDkGDtdFkE9F9KPxC9Fayq2JBLJrhIyADRVXFxwxsfwQ2Jmh6hxg==" + "version": "file:packages/language-property-list" }, "language-python": { - "version": "https://www.atom.io/api/packages/language-python/versions/0.53.6/tarball", - "integrity": "sha512-QLAajhoCNaDvWPE8qw/v0T0yMQCMavu5P0ZkJXTOuVzG3hj4W60F87PFYTgwSHa61KpXGvUA1kiGibeQbxytGA==", + "version": "file:packages/language-python", "requires": { "atom-grammar-test": "^0.6.4", "tree-sitter-python": "^0.17.0" } }, "language-ruby": { - "version": "https://www.atom.io/api/packages/language-ruby/versions/0.73.0/tarball", - "integrity": "sha512-dbqBGWUBHyzXStRiZNWR/Dx85Co3ecQvF9IWjngAcWdFsye1zrUWAdhSLOU8FvYQnP2jBgE2EmQQO+jSCG+T4Q==", + "version": "file:packages/language-ruby", "requires": { "tree-sitter-ruby": "^0.17.0" } }, "language-ruby-on-rails": { - "version": "https://www.atom.io/api/packages/language-ruby-on-rails/versions/0.25.3/tarball", - "integrity": "sha512-uI4ItSsq1J0/5gBblVgLv69C8TzWMcAoL19H8iFuosWWDRUsh9va1PrPMLeSNnNbnOYkw2fE53fqLlJjrgxiGw==" + "version": "file:packages/language-ruby-on-rails" }, "language-rust-bundled": { "version": "file:packages/language-rust-bundled", @@ -4887,50 +4868,40 @@ } }, "language-sass": { - "version": "https://www.atom.io/api/packages/language-sass/versions/0.62.1/tarball", - "integrity": "sha512-6UIvd6scZY06JE2X9INQzLHu3KOHnPOU16teD2MhsY3yU8OGExEtZRkY93G4OwUQN9GB2keeF70X1O7LX6FZSg==" + "version": "file:packages/language-sass" }, "language-shellscript": { - "version": "https://www.atom.io/api/packages/language-shellscript/versions/0.28.2/tarball", - "integrity": "sha512-YAbcijqWa07DSn6HXlV5KSJ/8nMBpT+DteEwOK2A4vXSSFc0phUMR+LcPcjVB5599OZkX4aB42DqjKHUT9LMtQ==", + "version": "file:packages/language-shellscript", "requires": { "tree-sitter-bash": "^0.16.1" } }, "language-source": { - "version": "https://www.atom.io/api/packages/language-source/versions/0.9.0/tarball", - "integrity": "sha512-Uu/C5EQKdKgwUOiCWM95CkCUePhT93KpiqsrVqEgTV1TssLY/LRwT9fd1XJSZ5EDKSS71Tfzvbww/V117uoDWw==" + "version": "file:packages/language-source" }, "language-sql": { - "version": "https://www.atom.io/api/packages/language-sql/versions/0.25.10/tarball", - "integrity": "sha512-JXlwc9wV0qnhLn2fe3xRSNghxy/MtmCgy5+6xXN3Dqr9f6Q9Jh4vy3Kwrhz4xSgpPcHMocQwS72JcFuTI9CRdw==" + "version": "file:packages/language-sql" }, "language-text": { - "version": "https://www.atom.io/api/packages/language-text/versions/0.7.4/tarball", - "integrity": "sha512-XPmROjdb8CvAznbyiDYNeJi0hKZegBA84bAyTSt/FbZR0enexxk+5NDlyjqYsmR7A1P+LtcMJJZdQYPgXr7mdw==" + "version": "file:packages/language-text" }, "language-todo": { - "version": "https://www.atom.io/api/packages/language-todo/versions/0.29.4/tarball", - "integrity": "sha512-mdSeM6hR7D9ZohrfMTA9wDH46MQbcbfTMxU5WpzYwvQXAvYEZyuhc2dzWZ827VsSOrUcOcAYVcOvTkTrx9nytg==" + "version": "file:packages/language-todo" }, "language-toml": { - "version": "https://www.atom.io/api/packages/language-toml/versions/0.20.0/tarball", - "integrity": "sha512-6xFDqM6nZpynmxGKUS85iUWY0yeub7GYvLyzSOqDejMuOL5UXAITnSNcb7jhr+hQA8KTj5dCmRjphkAQER4Ucg==" + "version": "file:packages/language-toml" }, "language-typescript": { - "version": "https://www.atom.io/api/packages/language-typescript/versions/0.6.3/tarball", - "integrity": "sha512-F/ZnFXEF7C14/8JQ3T1kiCKVff+AB043LE5i0k3m86YsVl6IrjK6ElBNu5TsmUd7Se3STmqPfjn0Pf3280AZmg==", + "version": "file:packages/language-typescript", "requires": { "tree-sitter-typescript": "^0.16.1" } }, "language-xml": { - "version": "https://www.atom.io/api/packages/language-xml/versions/0.35.3/tarball", - "integrity": "sha512-9fh1pwCSikEdHoOGprBr7xeO2lq8GuOwSRsN3dwJKGTvzFaji2Zh6KkgxHBEOh2spsc8ORT+THZ+h6hhHz+ckQ==" + "version": "file:packages/language-xml" }, "language-yaml": { - "version": "https://www.atom.io/api/packages/language-yaml/versions/0.32.0/tarball", - "integrity": "sha512-kx6Qj//j3PuFaf8yhlfPGdirRJ3NVvLJw+9Oi2Gg998K6vG/XecgvwyP5jVs4xExX8eYMOTlvN7n6dgkPf6LHQ==" + "version": "file:packages/language-yaml" }, "lazy-cache": { "version": "1.0.4", @@ -7997,9 +7968,9 @@ }, "dependencies": { "bl": { - "version": "4.0.3", - "resolved": "https://registry.npmjs.org/bl/-/bl-4.0.3.tgz", - "integrity": "sha512-fs4G6/Hu4/EE+F75J8DuN/0IpQqNjAdC7aEQv7Qt8MHGUH7Ckv2MwTEEeN9QehD0pfIDkMI1bkHYkKy7xHyKIg==", + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/bl/-/bl-4.1.0.tgz", + "integrity": "sha512-1W07cM9gS6DcLperZfFSj+bWLtaPGSOHWhPiGzXmvVJbRLdG82sH/Kn8EtW1VqWVA54AKf2h5k5BbnIbwF3h6w==", "requires": { "buffer": "^5.5.0", "inherits": "^2.0.4", @@ -8020,9 +7991,9 @@ "integrity": "sha512-wXqjST+SLt7R009ySCglWBCFpjUygmCIfD790/kVbiGmUgfYGuB14PiTd5DwVxSV4NcYHjzMkoj5LjQZwTQLEA==" }, "minimist": { - "version": "1.2.5", - "resolved": "https://registry.npmjs.org/minimist/-/minimist-1.2.5.tgz", - "integrity": "sha512-FM9nNUYrRBAELZQT3xeZQ7fmMOBg6nWNmJKTcgsJeaLstP/UODVpGsr5OhXhhXg6f+qtJ8uiZ+PUxkDWcgIXLw==" + "version": "1.2.6", + "resolved": "https://registry.npmjs.org/minimist/-/minimist-1.2.6.tgz", + "integrity": "sha512-Jsjnk4bw3YJqYzbdyBiNsPWHPfO++UGG749Cxs6peCu5Xg4nrena6OVxOYxrQTqww0Jmwt+Ref8rggumkTLz9Q==" }, "prebuild-install": { "version": "5.3.6", @@ -8046,15 +8017,6 @@ "which-pm-runs": "^1.0.0" } }, - "pump": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/pump/-/pump-3.0.0.tgz", - "integrity": "sha512-LwZy+p3SFs1Pytd/jYct4wpv49HiYCqd9Rlc5ZVdk0V+8Yzv6jR5Blk3TRmPL1ft69TxP0IMZGJ+WPFU2BFhww==", - "requires": { - "end-of-stream": "^1.1.0", - "once": "^1.3.1" - } - }, "readable-stream": { "version": "3.6.0", "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-3.6.0.tgz", @@ -8066,9 +8028,9 @@ } }, "simple-get": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/simple-get/-/simple-get-3.1.0.tgz", - "integrity": "sha512-bCR6cP+aTdScaQCnQKbPKtJOKDp/hj9EDLJo3Nw4y1QksqaovlW/bnptB6/c1e+qmNIDHRK+oXFDdEqBT8WzUA==", + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/simple-get/-/simple-get-3.1.1.tgz", + "integrity": "sha512-CQ5LTKGfCpvE1K0n2us+kuMPbk/q0EKl82s4aheV9oXjFEz6W/Y7oQFVJuU6QG77hRT4Ghb5RURteF5vnWjupA==", "requires": { "decompress-response": "^4.2.0", "once": "^1.3.1", @@ -8076,20 +8038,20 @@ } }, "tar-fs": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/tar-fs/-/tar-fs-2.1.0.tgz", - "integrity": "sha512-9uW5iDvrIMCVpvasdFHW0wJPez0K4JnMZtsuIeDI7HyMGJNxmDZDOCQROr7lXyS+iL/QMpj07qcjGYTSdRFXUg==", + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/tar-fs/-/tar-fs-2.1.1.tgz", + "integrity": "sha512-V0r2Y9scmbDRLCNex/+hYzvp/zyYjvFbHPNgVTKfQvVrb6guiE/fxP+XblDNR011utopbkex2nM4dHNV6GDsng==", "requires": { "chownr": "^1.1.1", "mkdirp-classic": "^0.5.2", "pump": "^3.0.0", - "tar-stream": "^2.0.0" + "tar-stream": "^2.1.4" } }, "tar-stream": { - "version": "2.1.4", - "resolved": "https://registry.npmjs.org/tar-stream/-/tar-stream-2.1.4.tgz", - "integrity": "sha512-o3pS2zlG4gxr67GmFYBLlq+dM8gyRGUOvsrHclSkvtVtQbjV0s/+ZE8OpICbaj8clrX3tjeHngYGP7rweaBnuw==", + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/tar-stream/-/tar-stream-2.2.0.tgz", + "integrity": "sha512-ujeqbceABgwMZxEJnk2HDY2DlnUZ+9oEcb1KzTVfYHio0UE6dG71n60d8D2I4qNvleWrrXpmjpt7vZeF1LnMZQ==", "requires": { "bl": "^4.0.3", "end-of-stream": "^1.4.1", @@ -8125,9 +8087,9 @@ }, "dependencies": { "nan": { - "version": "2.15.0", - "resolved": "https://registry.npmjs.org/nan/-/nan-2.15.0.tgz", - "integrity": "sha512-8ZtvEnA2c5aYCZYd1cvgdnU6cqwixRoYg70xPLWUws5ORTa/lnw+u4amixRS/Ac5U5mQVgp9pnlSUnbNWFaWZQ==" + "version": "2.16.0", + "resolved": "https://registry.npmjs.org/nan/-/nan-2.16.0.tgz", + "integrity": "sha512-UdAqHyFngu7TfQKsCBgAA6pWDkT8MAO7d0jyOecVhN5354xbLqdn8mV9Tat9gepAupm0bt2DbeaSC8vS52MuFA==" } } }, @@ -8236,9 +8198,9 @@ "integrity": "sha512-wXqjST+SLt7R009ySCglWBCFpjUygmCIfD790/kVbiGmUgfYGuB14PiTd5DwVxSV4NcYHjzMkoj5LjQZwTQLEA==" }, "minimist": { - "version": "1.2.5", - "resolved": "https://registry.npmjs.org/minimist/-/minimist-1.2.5.tgz", - "integrity": "sha512-FM9nNUYrRBAELZQT3xeZQ7fmMOBg6nWNmJKTcgsJeaLstP/UODVpGsr5OhXhhXg6f+qtJ8uiZ+PUxkDWcgIXLw==" + "version": "1.2.6", + "resolved": "https://registry.npmjs.org/minimist/-/minimist-1.2.6.tgz", + "integrity": "sha512-Jsjnk4bw3YJqYzbdyBiNsPWHPfO++UGG749Cxs6peCu5Xg4nrena6OVxOYxrQTqww0Jmwt+Ref8rggumkTLz9Q==" }, "prebuild-install": { "version": "5.3.6", @@ -8308,9 +8270,9 @@ } }, "tree-sitter-typescript": { - "version": "0.16.1", - "resolved": "https://registry.npmjs.org/tree-sitter-typescript/-/tree-sitter-typescript-0.16.1.tgz", - "integrity": "sha512-jyU5yl4W6JPn66v2YbzaO1ClDcdDnj+7YQNZz3STgEiUooSjpWI1Ucgw+S/qEGbf0fMXsC0fucpP+/M1uc9ubw==", + "version": "0.16.3", + "resolved": "https://registry.npmjs.org/tree-sitter-typescript/-/tree-sitter-typescript-0.16.3.tgz", + "integrity": "sha512-qdRydjlnFuxwlkE/+oqOywzcKL2l3G1xkhR9DxDySGfF4JiMdYCTqJCWRUYaGnagJDZBF7wGWtHf5FGGXdLjNw==", "requires": { "nan": "^2.14.0" } diff --git a/package.json b/package.json index 23f3b3520..5c80a8bb9 100644 --- a/package.json +++ b/package.json @@ -82,39 +82,39 @@ "key-path-helpers": "^0.4.0", "keybinding-resolver": "https://www.atom.io/api/packages/keybinding-resolver/versions/0.39.1/tarball", "language-c": "https://www.atom.io/api/packages/language-c/versions/0.60.19/tarball", - "language-clojure": "https://www.atom.io/api/packages/language-clojure/versions/0.22.8/tarball", - "language-coffee-script": "https://www.atom.io/api/packages/language-coffee-script/versions/0.50.0/tarball", - "language-csharp": "https://www.atom.io/api/packages/language-csharp/versions/1.1.0/tarball", + "language-clojure": "file:packages/language-clojure", + "language-coffee-script": "file:packages/language-coffee-script", + "language-csharp": "file:packages/language-csharp", "language-css": "https://www.atom.io/api/packages/language-css/versions/0.45.1/tarball", - "language-gfm": "https://www.atom.io/api/packages/language-gfm/versions/0.90.8/tarball", - "language-git": "https://www.atom.io/api/packages/language-git/versions/0.19.1/tarball", + "language-gfm": "file:packages/language-gfm", + "language-git": "file:packages/language-git", "language-go": "https://www.atom.io/api/packages/language-go/versions/0.47.2/tarball", - "language-html": "https://www.atom.io/api/packages/language-html/versions/0.53.1/tarball", - "language-hyperlink": "https://www.atom.io/api/packages/language-hyperlink/versions/0.17.1/tarball", - "language-java": "https://www.atom.io/api/packages/language-java/versions/0.32.1/tarball", + "language-html": "file:packages/language-html", + "language-hyperlink": "file:packages/language-hyperlink", + "language-java": "file:packages/language-java", "language-javascript": "https://www.atom.io/api/packages/language-javascript/versions/0.134.1/tarball", - "language-json": "https://www.atom.io/api/packages/language-json/versions/1.0.5/tarball", - "language-less": "https://www.atom.io/api/packages/language-less/versions/0.34.3/tarball", - "language-make": "https://www.atom.io/api/packages/language-make/versions/0.23.0/tarball", - "language-mustache": "https://www.atom.io/api/packages/language-mustache/versions/0.14.5/tarball", - "language-objective-c": "https://www.atom.io/api/packages/language-objective-c/versions/0.16.0/tarball", - "language-perl": "https://www.atom.io/api/packages/language-perl/versions/0.38.1/tarball", - "language-php": "https://www.atom.io/api/packages/language-php/versions/0.48.1/tarball", - "language-property-list": "https://www.atom.io/api/packages/language-property-list/versions/0.9.1/tarball", - "language-python": "https://www.atom.io/api/packages/language-python/versions/0.53.6/tarball", - "language-ruby": "https://www.atom.io/api/packages/language-ruby/versions/0.73.0/tarball", - "language-ruby-on-rails": "https://www.atom.io/api/packages/language-ruby-on-rails/versions/0.25.3/tarball", + "language-json": "file:packages/language-json", + "language-less": "file:packages/language-less", + "language-make": "file:packages/language-make", + "language-mustache": "file:packages/language-mustache", + "language-objective-c": "file:packages/language-objective-c", + "language-perl": "file:packages/language-perl", + "language-php": "file:packages/language-php", + "language-property-list": "file:packages/language-property-list", + "language-python": "file:packages/language-python", + "language-ruby": "file:packages/language-ruby", + "language-ruby-on-rails": "file:packages/language-ruby-on-rails", "language-rust-bundled": "file:packages/language-rust-bundled", - "language-sass": "https://www.atom.io/api/packages/language-sass/versions/0.62.1/tarball", - "language-shellscript": "https://www.atom.io/api/packages/language-shellscript/versions/0.28.2/tarball", - "language-source": "https://www.atom.io/api/packages/language-source/versions/0.9.0/tarball", - "language-sql": "https://www.atom.io/api/packages/language-sql/versions/0.25.10/tarball", - "language-text": "https://www.atom.io/api/packages/language-text/versions/0.7.4/tarball", - "language-todo": "https://www.atom.io/api/packages/language-todo/versions/0.29.4/tarball", - "language-toml": "https://www.atom.io/api/packages/language-toml/versions/0.20.0/tarball", - "language-typescript": "https://www.atom.io/api/packages/language-typescript/versions/0.6.3/tarball", - "language-xml": "https://www.atom.io/api/packages/language-xml/versions/0.35.3/tarball", - "language-yaml": "https://www.atom.io/api/packages/language-yaml/versions/0.32.0/tarball", + "language-sass": "file:packages/language-sass", + "language-shellscript": "file:packages/language-shellscript", + "language-source": "file:packages/language-source", + "language-sql": "file:packages/language-sql", + "language-text": "file:packages/language-text", + "language-todo": "file:packages/language-todo", + "language-toml": "file:packages/language-toml", + "language-typescript": "file:packages/language-typescript", + "language-xml": "file:packages/language-xml", + "language-yaml": "file:packages/language-yaml", "less-cache": "1.1.0", "line-ending-selector": "file:packages/line-ending-selector", "line-top-index": "0.3.1", @@ -233,39 +233,39 @@ "whitespace": "0.37.8", "wrap-guide": "0.41.0", "language-c": "0.60.19", - "language-clojure": "0.22.8", - "language-coffee-script": "0.50.0", - "language-csharp": "1.1.0", + "language-clojure": "file:./packages/language-clojure", + "language-coffee-script": "file:./packages/language-coffee-script", + "language-csharp": "file:./packages/language-csharp", "language-css": "0.45.1", - "language-gfm": "0.90.8", - "language-git": "0.19.1", + "language-gfm": "file:./packages/language-gfm", + "language-git": "file:./packages/language-git", "language-go": "0.47.2", - "language-html": "0.53.1", - "language-hyperlink": "0.17.1", - "language-java": "0.32.1", + "language-html": "file:./packages/language-html", + "language-hyperlink": "file:./packages/language-hyperlink", + "language-java": "file:./packages/language-java", "language-javascript": "0.134.1", - "language-json": "1.0.5", - "language-less": "0.34.3", - "language-make": "0.23.0", - "language-mustache": "0.14.5", - "language-objective-c": "0.16.0", - "language-perl": "0.38.1", - "language-php": "0.48.1", - "language-property-list": "0.9.1", - "language-python": "0.53.6", - "language-ruby": "0.73.0", - "language-ruby-on-rails": "0.25.3", + "language-json": "file:./packages/language-json", + "language-less": "file:./packages/language-less", + "language-make": "file:./packages/language-make", + "language-mustache": "file:./packages/language-mustache", + "language-objective-c": "file:./packages/language-objective-c", + "language-perl": "file:./packages/language-perl", + "language-php": "file:./packages/language-php", + "language-property-list": "file:./packages/language-property-list", + "language-python": "file:./packages/language-python", + "language-ruby": "file:./packages/language-ruby", + "language-ruby-on-rails": "file:./packages/language-ruby-on-rails", "language-rust-bundled": "file:./packages/language-rust-bundled", - "language-sass": "0.62.1", - "language-shellscript": "0.28.2", - "language-source": "0.9.0", - "language-sql": "0.25.10", - "language-text": "0.7.4", - "language-todo": "0.29.4", - "language-toml": "0.20.0", - "language-typescript": "0.6.3", - "language-xml": "0.35.3", - "language-yaml": "0.32.0" + "language-sass": "file:./packages/language-sass", + "language-shellscript": "file:./packages/language-shellscript", + "language-source": "file:./packages/language-source", + "language-sql": "file:./packages/language-sql", + "language-text": "file:./packages/language-text", + "language-todo": "file:./packages/language-todo", + "language-toml": "file:./packages/language-toml", + "language-typescript": "file:./packages/language-typescript", + "language-xml": "file:./packages/language-xml", + "language-yaml": "file:./packages/language-yaml" }, "private": true, "scripts": { diff --git a/packages/README.md b/packages/README.md index ab5107381..f495d0269 100644 --- a/packages/README.md +++ b/packages/README.md @@ -40,40 +40,40 @@ See [RFC 003](https://github.com/atom/atom/blob/master/docs/rfcs/003-consolidate | **image-view** | [`atom/image-view`][image-view] | [#18274](https://github.com/atom/atom/issues/18274) | | **incompatible-packages** | [`./incompatible-packages`](./incompatible-packages) | [#17846](https://github.com/atom/atom/issues/17846) | | **keybinding-resolver** | [`atom/keybinding-resolver`][keybinding-resolver] | [#18275](https://github.com/atom/atom/issues/18275) | -| **language-c** | [`atom/language-c`][language-c] | | -| **language-clojure** | [`atom/language-clojure`][language-clojure] | | -| **language-coffee-script** | [`atom/language-coffee-script`][language-coffee-script] | | -| **language-csharp** | [`atom/language-csharp`][language-csharp] | | -| **language-css** | [`atom/language-css`][language-css] | | -| **language-gfm** | [`atom/language-gfm`][language-gfm] | | -| **language-git** | [`atom/language-git`][language-git] | | -| **language-go** | [`atom/language-go`][language-go] | | -| **language-html** | [`atom/language-html`][language-html] | | -| **language-hyperlink** | [`atom/language-hyperlink`][language-hyperlink] | | -| **language-java** | [`atom/language-java`][language-java] | | -| **language-javascript** | [`atom/language-javascript`][language-javascript] | | -| **language-json** | [`atom/language-json`][language-json] | | -| **language-less** | [`atom/language-less`][language-less] | | -| **language-make** | [`atom/language-make`][language-make] | | -| **language-mustache** | [`atom/language-mustache`][language-mustache] | | -| **language-objective-c** | [`atom/language-objective-c`][language-objective-c] | | -| **language-perl** | [`atom/language-perl`][language-perl] | | -| **language-php** | [`atom/language-php`][language-php] | | -| **language-property-list** | [`atom/language-property-list`][language-property-list] | | -| **language-python** | [`atom/language-python`][language-python] | | -| **language-ruby** | [`atom/language-ruby`][language-ruby] | | -| **language-ruby-on-rails** | [`atom/language-ruby-on-rails`][language-ruby-on-rails] | | +| **language-c** | [`atom/language-c`](./language-c) | | +| **language-clojure** | [`atom/language-clojure`](./language-clojure) | | +| **language-coffee-script** | [`atom/language-coffee-script`](./language-coffee-script) | | +| **language-csharp** | [`atom/language-csharp`](./language-csharp) | | +| **language-css** | [`atom/language-css`](./language-css) | | +| **language-gfm** | [`atom/language-gfm`](./language-gfm) | | +| **language-git** | [`atom/language-git`](./language-git) | | +| **language-go** | [`atom/language-go`](./language-go) | | +| **language-html** | [`atom/language-html`](./language-html) | | +| **language-hyperlink** | [`atom/language-hyperlink`](./language-hyperlink) | | +| **language-java** | [`atom/language-java`](./language-java) | | +| **language-javascript** | [`atom/language-javascript`](./language-javascript) | | +| **language-json** | [`atom/language-json`](./language-json) | | +| **language-less** | [`atom/language-less`](./language-less) | | +| **language-make** | [`atom/language-make`](./language-make) | | +| **language-mustache** | [`atom/language-mustache`](./language-mustache) | | +| **language-objective-c** | [`atom/language-objective-c`](./language-objective-c) | | +| **language-perl** | [`atom/language-perl`](./language-perl) | | +| **language-php** | [`atom/language-php`](./language-php) | | +| **language-property-list** | [`atom/language-property-list`](./language-property-list) | | +| **language-python** | [`atom/language-python`](./language-python) | | +| **language-ruby** | [`atom/language-ruby`](./language-ruby) | | +| **language-ruby-on-rails** | [`atom/language-ruby-on-rails`](./language-ruby-on-rails) | | | **language-rust-bundled** | [`./language-rust-bundled`](./language-rust-bundled) | | -| **language-sass** | [`atom/language-sass`][language-sass] | | -| **language-shellscript** | [`atom/language-shellscript`][language-shellscript] | | -| **language-source** | [`atom/language-source`][language-source] | | -| **language-sql** | [`atom/language-sql`][language-sql] | | -| **language-text** | [`atom/language-text`][language-text] | | -| **language-todo** | [`atom/language-todo`][language-todo] | | -| **language-toml** | [`atom/language-toml`][language-toml] | | -| **language-typescript** | [`atom/language-typescript`][language-typescript] | | -| **language-xml** | [`atom/language-xml`][language-xml] | | -| **language-yaml** | [`atom/language-yaml`][language-yaml] | | +| **language-sass** | [`atom/language-sass`](./language-sass) | | +| **language-shellscript** | [`atom/language-shellscript`](./language-shellscript) | | +| **language-source** | [`atom/language-source`](./language-source) | | +| **language-sql** | [`atom/language-sql`](./language-sql) | | +| **language-text** | [`atom/language-text`](./language-text) | | +| **language-todo** | [`atom/language-todo`](./language-todo) | | +| **language-toml** | [`atom/language-toml`](./language-toml) | | +| **language-typescript** | [`atom/language-typescript`](./language-typescript) | | +| **language-xml** | [`atom/language-xml`](./language-xml) | | +| **language-yaml** | [`atom/language-yaml`](./language-yaml) | | | **line-ending-selector** | [`./packages/line-ending-selector`](./line-ending-selector) | [#17847](https://github.com/atom/atom/issues/17847) | | **link** | [`./link`](./link) | [#17848](https://github.com/atom/atom/issues/17848) | | **markdown-preview** | [`atom/markdown-preview`][markdown-preview] | | diff --git a/packages/language-c/.github/no-response.yml b/packages/language-c/.github/no-response.yml new file mode 100644 index 000000000..1c8799d13 --- /dev/null +++ b/packages/language-c/.github/no-response.yml @@ -0,0 +1,15 @@ +# Configuration for probot-no-response - https://github.com/probot/no-response + +# Number of days of inactivity before an issue is closed for lack of response +daysUntilClose: 28 + +# Label requiring a response +responseRequiredLabel: more-information-needed + +# Comment to post when closing an issue for lack of response. Set to `false` to disable. +closeComment: > + This issue has been automatically closed because there has been no response + to our request for more information from the original author. With only the + information that is currently in the issue, we don't have enough information + to take action. Please reach out if you have or find the answers we need so + that we can investigate further. diff --git a/packages/language-c/.github/workflows/main.yml b/packages/language-c/.github/workflows/main.yml new file mode 100644 index 000000000..80b99d1b6 --- /dev/null +++ b/packages/language-c/.github/workflows/main.yml @@ -0,0 +1,27 @@ +name: CI + +on: [push] + +env: + CI: true + +jobs: + Test: + strategy: + matrix: + os: [ubuntu-latest, macos-latest, windows-latest] + channel: [stable, beta] + runs-on: ${{ matrix.os }} + steps: + - uses: actions/checkout@v1 + - uses: UziTech/action-setup-atom@v2 + with: + version: ${{ matrix.channel }} + - name: Install windows-build-tools + if: ${{ matrix.os == 'windows-latest' }} + run: | + npm i windows-build-tools@4.0.0 + - name: Install dependencies + run: apm install + - name: Run tests + run: atom --test spec diff --git a/packages/language-c/.gitignore b/packages/language-c/.gitignore new file mode 100644 index 000000000..3c3629e64 --- /dev/null +++ b/packages/language-c/.gitignore @@ -0,0 +1 @@ +node_modules diff --git a/packages/language-c/CONTRIBUTING.md b/packages/language-c/CONTRIBUTING.md new file mode 100644 index 000000000..0fd0ad696 --- /dev/null +++ b/packages/language-c/CONTRIBUTING.md @@ -0,0 +1 @@ +See the [Atom contributing guide](https://github.com/atom/atom/blob/master/CONTRIBUTING.md) diff --git a/packages/language-c/ISSUE_TEMPLATE.md b/packages/language-c/ISSUE_TEMPLATE.md new file mode 100644 index 000000000..b60bb86c9 --- /dev/null +++ b/packages/language-c/ISSUE_TEMPLATE.md @@ -0,0 +1,40 @@ + + +### Prerequisites + +* [ ] Put an X between the brackets on this line if you have done all of the following: + * Reproduced the problem in Safe Mode: http://flight-manual.atom.io/hacking-atom/sections/debugging/#using-safe-mode + * Followed all applicable steps in the debugging guide: http://flight-manual.atom.io/hacking-atom/sections/debugging/ + * Checked the FAQs on the message board for common solutions: https://discuss.atom.io/c/faq + * Checked that your issue isn't already filed: https://github.com/issues?utf8=✓&q=is%3Aissue+user%3Aatom + * Checked that there is not already an Atom package that provides the described functionality: https://atom.io/packages + +### Description + +[Description of the issue] + +### Steps to Reproduce + +1. [First Step] +2. [Second Step] +3. [and so on...] + +**Expected behavior:** [What you expect to happen] + +**Actual behavior:** [What actually happens] + +**Reproduces how often:** [What percentage of the time does it reproduce?] + +### Versions + +You can get this information from copy and pasting the output of `atom --version` and `apm --version` from the command line. Also, please include the OS and what version of the OS you're running. + +### Additional Information + +Any additional information, configuration or data that might be necessary to reproduce the issue. diff --git a/packages/language-c/LICENSE.md b/packages/language-c/LICENSE.md new file mode 100644 index 000000000..1aac5cb84 --- /dev/null +++ b/packages/language-c/LICENSE.md @@ -0,0 +1,31 @@ +Copyright (c) 2014 GitHub Inc. + +Permission is hereby granted, free of charge, to any person obtaining +a copy of this software and associated documentation files (the +"Software"), to deal in the Software without restriction, including +without limitation the rights to use, copy, modify, merge, publish, +distribute, sublicense, and/or sell copies of the Software, and to +permit persons to whom the Software is furnished to do so, subject to +the following conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. + +-------------------------------------------------------------------- + +This package was derived from a TextMate bundle located at +https://github.com/textmate/c.tmbundle and distributed under the following +license, located in `README.mdown`: + +Permission to copy, use, modify, sell and distribute this +software is granted. This software is provided "as is" without +express or implied warranty, and with no claim as to its +suitability for any purpose. diff --git a/packages/language-c/PULL_REQUEST_TEMPLATE.md b/packages/language-c/PULL_REQUEST_TEMPLATE.md new file mode 100644 index 000000000..cdaa94a86 --- /dev/null +++ b/packages/language-c/PULL_REQUEST_TEMPLATE.md @@ -0,0 +1,28 @@ +### Requirements + +* Filling out the template is required. Any pull request that does not include enough information to be reviewed in a timely manner may be closed at the maintainers' discretion. +* All new code requires tests to ensure against regressions + +### Description of the Change + + + +### Alternate Designs + + + +### Benefits + + + +### Possible Drawbacks + + + +### Applicable Issues + + diff --git a/packages/language-c/README.md b/packages/language-c/README.md new file mode 100644 index 000000000..64ad583d1 --- /dev/null +++ b/packages/language-c/README.md @@ -0,0 +1,10 @@ +# C/C++ language support in Atom +![CI Status](https://github.com/atom/language-c/actions/workflows/main.yml/badge.svg) + +Adds syntax highlighting and snippets to C/C++ files in Atom. + +Originally [converted](http://flight-manual.atom.io/hacking-atom/sections/converting-from-textmate) +from the [C TextMate bundle](https://github.com/textmate/c.tmbundle). + +Contributions are greatly appreciated. Please fork this repository and open a +pull request to add snippets, make grammar tweaks, etc. diff --git a/packages/language-c/coffeelint.json b/packages/language-c/coffeelint.json new file mode 100644 index 000000000..a5dd715e3 --- /dev/null +++ b/packages/language-c/coffeelint.json @@ -0,0 +1,37 @@ +{ + "max_line_length": { + "level": "ignore" + }, + "no_empty_param_list": { + "level": "error" + }, + "arrow_spacing": { + "level": "error" + }, + "no_interpolation_in_single_quotes": { + "level": "error" + }, + "no_debugger": { + "level": "error" + }, + "prefer_english_operator": { + "level": "error" + }, + "colon_assignment_spacing": { + "spacing": { + "left": 0, + "right": 1 + }, + "level": "error" + }, + "braces_spacing": { + "spaces": 0, + "level": "error" + }, + "spacing_after_comma": { + "level": "error" + }, + "no_stand_alone_at": { + "level": "error" + } +} diff --git a/packages/language-c/grammars/c++.cson b/packages/language-c/grammars/c++.cson new file mode 100644 index 000000000..16dc335d0 --- /dev/null +++ b/packages/language-c/grammars/c++.cson @@ -0,0 +1,424 @@ +'scopeName': 'source.cpp' +'fileTypes': [ + 'cc' + 'cpp' + 'cp' + 'cxx' + 'c++' + 'cu' + 'cuh' + 'h' + 'hh' + 'hpp' + 'hxx' + 'h++' + 'inl' + 'ino' + 'ipp' + 'tcc' + 'tpp' +] +'firstLineMatch': '(?i)-\\*-[^*]*(Mode:\\s*)?C\\+\\+(\\s*;.*?)?\\s*-\\*-' +'name': 'C++' +'patterns': [ + { + 'include': '#special_block' + } + { + 'include': '#strings' + } + { + 'match': '\\b(friend|explicit|virtual|override|final|noexcept)\\b' + 'name': 'storage.modifier.cpp' + } + { + 'match': '\\b(private:|protected:|public:)' + 'name': 'storage.modifier.cpp' + } + { + 'match': '\\b(catch|operator|try|throw|using)\\b' + 'name': 'keyword.control.cpp' + } + { + 'match': '\\bdelete\\b(\\s*\\[\\])?|\\bnew\\b(?!])' + 'name': 'keyword.control.cpp' + } + { + # Common naming idiom for C++ instanced vars: "fMemberName" + 'match': '\\b(f|m)[A-Z]\\w*\\b' + 'name': 'variable.other.readwrite.member.cpp' + } + { + 'match': '\\bthis\\b' + 'name': 'variable.language.this.cpp' + } + { + 'match': '\\bnullptr\\b' + 'name': 'constant.language.cpp' + } + { + 'match': '\\btemplate\\b\\s*' + 'name': 'storage.type.template.cpp' + } + { + 'match': '\\b(const_cast|dynamic_cast|reinterpret_cast|static_cast)\\b\\s*' + 'name': 'keyword.operator.cast.cpp' + } + { + 'match': '::' + 'name': 'punctuation.separator.namespace.access.cpp' + } + { + 'match': '\\b(and|and_eq|bitand|bitor|compl|not|not_eq|or|or_eq|typeid|xor|xor_eq|alignof|alignas)\\b' + 'name': 'keyword.operator.cpp' + } + { + 'match': '\\b(class|decltype|wchar_t|char16_t|char32_t)\\b' + 'name': 'storage.type.cpp' + } + { + 'match': '\\b(constexpr|export|mutable|typename|thread_local)\\b' + 'name': 'storage.modifier.cpp' + } + { + 'begin': '''(?x) + (?: + ^ | # beginning of line + (?:(?' + 'name': 'meta.angle-brackets.cpp' + 'patterns': [ + { + 'include': '#angle_brackets' + } + { + 'include': '$base' + } + ] + 'block': + 'begin': '\\{' + 'beginCaptures': + '0': + 'name': 'punctuation.section.block.begin.bracket.curly.c' + 'end': '\\}' + 'endCaptures': + '0': + 'name': 'punctuation.section.block.end.bracket.curly.c' + 'name': 'meta.block.cpp' + 'patterns': [ + { + 'captures': + '1': + 'name': 'support.function.any-method.c' + '2': + 'name': 'punctuation.definition.parameters.c' + 'match': '''(?x) + ( + (?!while|for|do|if|else|switch|catch|enumerate|return|r?iterate) + (?:\\b[A-Za-z_][A-Za-z0-9_]*+\\b|::)*+ # actual name + ) + \\s*(\\() # opening bracket + ''' + 'name': 'meta.function-call.c' + } + { + 'include': '$base' + } + ] + 'constructor': + 'patterns': [ + { + 'begin': '''(?x) + (?:^\\s*) # beginning of line + ((?!while|for|do|if|else|switch|catch|enumerate|r?iterate)[A-Za-z_][A-Za-z0-9_:]*) # actual name + \\s*(\\() # opening bracket + ''' + 'beginCaptures': + '1': + 'name': 'entity.name.function.cpp' + '2': + 'name': 'punctuation.definition.parameters.begin.c' + 'end': '\\)' + 'endCaptures': + '0': + 'name': 'punctuation.definition.parameters.end.c' + 'name': 'meta.function.constructor.cpp' + 'patterns': [ + { + 'include': '$base' + } + ] + } + { + 'begin': '''(?x) + (:) + ( + (?= + \\s*[A-Za-z_][A-Za-z0-9_:]* # actual name + \\s* (\\() # opening bracket + ) + ) + ''' + 'beginCaptures': + '1': + 'name': 'punctuation.definition.parameters.c' + 'end': '(?=\\{)' + 'name': 'meta.function.constructor.initializer-list.cpp' + 'patterns': [ + { + 'include': '$base' + } + ] + } + ] + 'special_block': + 'patterns': [ + { + "begin": "\\b(using)\\b\\s*(namespace)\\b\\s*((?:[_A-Za-z][_A-Za-z0-9]*\\b(::)?)*)", + "beginCaptures": { + "1": { + "name": "keyword.control.cpp" + }, + "2": { + "name": "storage.type.cpp" + }, + "3": { + "name": "entity.name.type.cpp" + } + }, + "end": "(;)", + "name": "meta.using-namespace-declaration.cpp" + }, + { + 'begin': '\\b(namespace)\\b\\s*([_A-Za-z][_A-Za-z0-9]*\\b)?+' + 'beginCaptures': + '1': + 'name': 'storage.type.cpp' + '2': + 'name': 'entity.name.type.cpp' + 'captures': + '1': + 'name': 'keyword.control.namespace.$2' + 'end': '(?<=\\})|(?=(;|,|\\(|\\)|>|\\[|\\]|=))' + 'name': 'meta.namespace-block.cpp' + 'patterns': [ + { + 'begin': '\\{' + 'beginCaptures': + '0': + 'name': 'punctuation.definition.scope.cpp' + 'end': '\\}' + 'endCaptures': + '0': + 'name': 'punctuation.definition.scope.cpp' + 'patterns': [ + { + 'include': '#special_block' + } + { + 'include': '#constructor' + } + { + 'include': '$base' + } + ] + } + { + 'include': '$base' + } + ] + } + { + 'begin': '\\b(class|struct)\\b\\s*([_A-Za-z][_A-Za-z0-9]*\\b)?+(\\s*:\\s*(public|protected|private)\\s*([_A-Za-z][_A-Za-z0-9]*\\b)((\\s*,\\s*(public|protected|private)\\s*[_A-Za-z][_A-Za-z0-9]*\\b)*))?' + 'beginCaptures': + '1': + 'name': 'storage.type.cpp' + '2': + 'name': 'entity.name.type.cpp' + '4': + 'name': 'storage.type.modifier.cpp' + '5': + 'name': 'entity.name.type.inherited.cpp' + '6': + 'patterns': [ + { + 'match': '(public|protected|private)' + 'name': 'storage.type.modifier.cpp' + } + { + 'match': '[_A-Za-z][_A-Za-z0-9]*' + 'name': 'entity.name.type.inherited.cpp' + } + ] + 'end': '(?<=\\})|(?=(;|\\(|\\)|>|\\[|\\]|=))' + 'name': 'meta.class-struct-block.cpp' + 'patterns': [ + { + 'include': '#angle_brackets' + } + { + 'begin': '\\{' + 'beginCaptures': + '0': + 'name': 'punctuation.section.block.begin.bracket.curly.cpp' + 'end': '(\\})(\\s*\\n)?' + 'endCaptures': + '1': + 'name': 'punctuation.section.block.end.bracket.curly.cpp' + '2': + 'name': 'invalid.illegal.you-forgot-semicolon.cpp' + 'patterns': [ + { + 'include': '#special_block' + } + { + 'include': '#constructor' + } + { + 'include': '$base' + } + ] + } + { + 'include': '$base' + } + ] + } + { + 'begin': '\\b(extern)(?=\\s*")' + 'beginCaptures': + '1': + 'name': 'storage.modifier.cpp' + 'end': '(?<=\\})|(?=\\w)|(?=\\s*#\\s*endif\\b)' + 'name': 'meta.extern-block.cpp' + 'patterns': [ + { + 'begin': '\\{' + 'beginCaptures': + '0': + 'name': 'punctuation.section.block.begin.bracket.curly.c' + 'end': '\\}|(?=\\s*#\\s*endif\\b)' + 'endCaptures': + '0': + 'name': 'punctuation.section.block.end.bracket.curly.c' + 'patterns': [ + { + 'include': '#special_block' + } + { + 'include': '$base' + } + ] + } + { + 'include': '$base' + } + ] + } + ] + 'strings': + 'patterns': [ + { + 'begin': '(u|u8|U|L)?"' + 'beginCaptures': + '0': + 'name': 'punctuation.definition.string.begin.cpp' + '1': + 'name': 'meta.encoding.cpp' + 'end': '"' + 'endCaptures': + '0': + 'name': 'punctuation.definition.string.end.cpp' + 'name': 'string.quoted.double.cpp' + 'patterns': [ + { + 'match': '\\\\u\\h{4}|\\\\U\\h{8}' + 'name': 'constant.character.escape.cpp' + } + { + 'match': '\\\\[\'"?\\\\abfnrtv]' + 'name': 'constant.character.escape.cpp' + } + { + 'match': '\\\\[0-7]{1,3}' + 'name': 'constant.character.escape.cpp' + } + { + 'match': '\\\\x\\h+' + 'name': 'constant.character.escape.cpp' + } + { + 'include': 'source.c#string_placeholder' + } + ] + } + { + 'begin': '(u|u8|U|L)?R"(?:([^ ()\\\\\\t]{0,16})|([^ ()\\\\\\t]*))\\(' + 'beginCaptures': + '0': + 'name': 'punctuation.definition.string.begin.cpp' + '1': + 'name': 'meta.encoding.cpp' + '3': + 'name': 'invalid.illegal.delimiter-too-long.cpp' + 'end': '\\)\\2(\\3)"' + 'endCaptures': + '0': + 'name': 'punctuation.definition.string.end.cpp' + '1': + 'name': 'invalid.illegal.delimiter-too-long.cpp' + 'name': 'string.quoted.double.raw.cpp' + } + ] diff --git a/packages/language-c/grammars/c.cson b/packages/language-c/grammars/c.cson new file mode 100644 index 000000000..c04ce5e15 --- /dev/null +++ b/packages/language-c/grammars/c.cson @@ -0,0 +1,1799 @@ +'scopeName': 'source.c' +'fileTypes': [ + 'c' + 'h.in' + 'xpm' +] +'firstLineMatch': '(?i)-\\*-[^*]*(Mode:\\s*)?C(\\s*;.*?)?\\s*-\\*-' +'name': 'C' +'patterns': [ + { + 'include': '#preprocessor-rule-enabled' + } + { + 'include': '#preprocessor-rule-disabled' + } + { + 'include': '#preprocessor-rule-conditional' + } + { + 'include': '#comments' + } + { + 'match': '\\b(break|case|continue|default|do|else|for|goto|if|_Pragma|return|switch|while)\\b' + 'name': 'keyword.control.c' + } + { + 'include': '#storage_types' + } + { + 'match': '\\b(const|extern|register|restrict|static|volatile|inline)\\b' + 'name': 'storage.modifier.c' + } + { + # Common naming idiom for C constants: "kConstantVariable" + 'match': '\\bk[A-Z]\\w*\\b' + 'name': 'constant.other.variable.mac-classic.c' + } + { + 'match': '\\bg[A-Z]\\w*\\b' + 'name': 'variable.other.readwrite.global.mac-classic.c' + } + { + 'match': '\\bs[A-Z]\\w*\\b' + 'name': 'variable.other.readwrite.static.mac-classic.c' + } + { + 'match': '\\b(NULL|true|false|TRUE|FALSE)\\b' + 'name': 'constant.language.c' + } + { + 'include': '#operators' + } + { + 'include': '#numbers' + } + { + 'include': '#strings' + } + { + 'begin': '''(?x) + ^\\s* ((\\#)\\s*define) \\s+ # define + ((?[a-zA-Z_$][\\w$]*)) # macro name + (?: + (\\() + ( + \\s* \\g \\s* # first argument + ((,) \\s* \\g \\s*)* # additional arguments + (?:\\.\\.\\.)? # varargs ellipsis? + ) + (\\)) + )? + ''' + 'beginCaptures': + '1': + 'name': 'keyword.control.directive.define.c' + '2': + 'name': 'punctuation.definition.directive.c' + '3': + 'name': 'entity.name.function.preprocessor.c' + '5': + 'name': 'punctuation.definition.parameters.begin.c' + '6': + 'name': 'variable.parameter.preprocessor.c' + '8': + 'name': 'punctuation.separator.parameters.c' + '9': + 'name': 'punctuation.definition.parameters.end.c' + 'end': '(?=(?://|/\\*))|(?' + 'endCaptures': + '0': + 'name': 'punctuation.definition.string.end.c' + 'name': 'string.quoted.other.lt-gt.include.c' + } + ] + } + { + 'include': '#pragma-mark' + } + { + 'begin': '^\\s*((#)\\s*line)\\b' + 'beginCaptures': + '1': + 'name': 'keyword.control.directive.line.c' + '2': + 'name': 'punctuation.definition.directive.c' + 'end': '(?=(?://|/\\*))|(?=+!]+|\\(\\)|\\[\\]))\\s*\\( + ) + ''' + 'end': '(?<=\\))(?!\\w)' + 'name': 'meta.function.c' + 'patterns': [ + { + 'include': '#function-innards' + } + ] + } + { + 'include': '#line_continuation_character' + } + { + 'match': '(\\[)|(\\])' + 'captures': + '1': + 'name': 'punctuation.definition.begin.bracket.square.c' + '2': + 'name': 'punctuation.definition.end.bracket.square.c' + } + { + 'match': ';' + 'name': 'punctuation.terminator.statement.c' + } + { + 'match': ',' + 'name': 'punctuation.separator.delimiter.c' + } +] +'repository': + 'access': + 'captures': + '2': + 'name': 'punctuation.separator.dot-access.c' + '3': + 'name': 'punctuation.separator.pointer-access.c' + '4': + 'name': 'variable.other.member.c' + 'match': '((\\.)|(->))\\s*(([a-zA-Z_][a-zA-Z_0-9]*)\\b(?!\\s*\\())?' + 'block': + 'patterns': [ + { + 'begin': '{' + 'beginCaptures': + '0': + 'name': 'punctuation.section.block.begin.bracket.curly.c' + 'end': '}|(?=\\s*#\\s*(?:elif|else|endif)\\b)' + 'endCaptures': + '0': + 'name': 'punctuation.section.block.end.bracket.curly.c' + 'name': 'meta.block.c' + 'patterns': [ + { + 'include': '#block_innards' + } + ] + } + ] + 'block_innards': + 'patterns': [ + { + 'include': '#preprocessor-rule-enabled-block' + } + { + 'include': '#preprocessor-rule-disabled-block' + } + { + 'include': '#preprocessor-rule-conditional-block' + } + { + 'include': '#access' + } + { + 'include': '#libc' + } + { + 'include': '#c_function_call' + } + { + 'captures': + '1': + 'name': 'variable.other.c' + '2': + 'name': 'punctuation.definition.parameters.c' + 'match': '''(?x) + (?: + (?: + (?=\\s)(?=+!]+ | \\(\\) | \\[\\])) + ) + \\s*(\\() # opening bracket + ''' + 'name': 'meta.initialization.c' + } + { + # Prevent unnecessary nesting of meta.block.c scope + 'begin': '{' + 'beginCaptures': + '0': + 'name': 'punctuation.section.block.begin.bracket.curly.c' + 'end': '}|(?=\\s*#\\s*(?:elif|else|endif)\\b)' + 'endCaptures': + '0': + 'name': 'punctuation.section.block.end.bracket.curly.c' + 'patterns': [ + { + 'include': '#block_innards' + } + ] + } + { + 'include': '#parens-block' + } + { + 'include': '$base' + } + ] + 'c_function_call': + # FIRST CAPTURE meta.function-call.c scope (provides an injectable scope, balanced parentheses and prevents unnecessary scope nesting) + 'begin': '''(?x) + (?!(?:while|for|do|if|else|switch|catch|enumerate|return|typeid|alignof|alignas|sizeof|[cr]?iterate)\\s*\\() + (?= + (?:[A-Za-z_][A-Za-z0-9_]*+|::)++\\s*\\( # actual name + | + (?:(?<=operator)(?:[-*&<>=+!]+|\\(\\)|\\[\\]))\\s*\\( + ) + ''' + 'end': '(?<=\\))(?!\\w)' + 'name': 'meta.function-call.c' + 'patterns': [ + { + 'include': '#function-call-innards' + } + ] + 'comments': + 'patterns': [ + { + 'captures': + '1': + 'name': 'meta.toc-list.banner.block.c' + 'match': '^/\\* =(\\s*.*?)\\s*= \\*/$\\n?' + 'name': 'comment.block.c' + } + { + 'begin': '/\\*' + 'beginCaptures': + '0': + 'name': 'punctuation.definition.comment.begin.c' + 'end': '\\*/' + 'endCaptures': + '0': + 'name': 'punctuation.definition.comment.end.c' + 'name': 'comment.block.c' + } + { + 'match': '\\*/.*\\n' + 'name': 'invalid.illegal.stray-comment-end.c' + } + { + 'captures': + '1': + 'name': 'meta.toc-list.banner.line.c' + 'match': '^// =(\\s*.*?)\\s*=\\s*$\\n?' + 'name': 'comment.line.banner.cpp' + } + { + 'begin': '(^[ \\t]+)?(?=//)' + 'beginCaptures': + '1': + 'name': 'punctuation.whitespace.comment.leading.cpp' + 'end': '(?!\\G)' + 'patterns': [ + { + 'begin': '//' + 'beginCaptures': + '0': + 'name': 'punctuation.definition.comment.cpp' + 'end': '(?=\\n)' + 'name': 'comment.line.double-slash.cpp' + 'patterns': [ + { + 'include': '#line_continuation_character' + } + ] + } + ] + } + ] + 'disabled': + # Consume nested preprocessor if(def)s + 'begin': '^\\s*#\\s*if(n?def)?\\b.*$' + 'end': '^\\s*#\\s*endif\\b' + 'patterns': [ + { + 'include': '#disabled' + } + { + 'include': '#pragma-mark' + } + ] + 'libc': + 'captures': + '1': + 'name': 'punctuation.whitespace.support.function.leading.c' + '2': + 'name': 'support.function.C99.c' + 'match': '''(?x) (\\s*) \\b + (_Exit|(?:nearbyint|nextafter|nexttoward|netoward|nan)[fl]?|a(?:cos|sin)h?[fl]?|abort|abs|asctime|assert + |atan(?:[h2]?[fl]?)?|atexit|ato[ifl]|atoll|bsearch|btowc|cabs[fl]?|cacos|cacos[fl]|cacosh[fl]? + |calloc|carg[fl]?|casinh?[fl]?|catanh?[fl]?|cbrt[fl]?|ccosh?[fl]?|ceil[fl]?|cexp[fl]?|cimag[fl]? + |clearerr|clock|clog[fl]?|conj[fl]?|copysign[fl]?|cosh?[fl]?|cpow[fl]?|cproj[fl]?|creal[fl]? + |csinh?[fl]?|csqrt[fl]?|ctanh?[fl]?|ctime|difftime|div|erfc?[fl]?|exit|fabs[fl]? + |exp(?:2[fl]?|[fl]|m1[fl]?)?|fclose|fdim[fl]?|fe[gs]et(?:env|exceptflag|round)|feclearexcept + |feholdexcept|feof|feraiseexcept|ferror|fetestexcept|feupdateenv|fflush|fgetpos|fgetw?[sc] + |floor[fl]?|fmax?[fl]?|fmin[fl]?|fmod[fl]?|fopen|fpclassify|fprintf|fputw?[sc]|fread|free|freopen + |frexp[fl]?|fscanf|fseek|fsetpos|ftell|fwide|fwprintf|fwrite|fwscanf|genv|get[sc]|getchar|gmtime + |gwc|gwchar|hypot[fl]?|ilogb[fl]?|imaxabs|imaxdiv|isalnum|isalpha|isblank|iscntrl|isdigit|isfinite + |isgraph|isgreater|isgreaterequal|isinf|isless(?:equal|greater)?|isw?lower|isnan|isnormal|isw?print + |isw?punct|isw?space|isunordered|isw?upper|iswalnum|iswalpha|iswblank|iswcntrl|iswctype|iswdigit|iswgraph + |isw?xdigit|labs|ldexp[fl]?|ldiv|lgamma[fl]?|llabs|lldiv|llrint[fl]?|llround[fl]?|localeconv|localtime + |log[2b]?[fl]?|log1[p0][fl]?|longjmp|lrint[fl]?|lround[fl]?|malloc|mbr?len|mbr?towc|mbsinit|mbsrtowcs + |mbstowcs|memchr|memcmp|memcpy|memmove|memset|mktime|modf[fl]?|perror|pow[fl]?|printf|puts|putw?c(?:har)? + |qsort|raise|rand|remainder[fl]?|realloc|remove|remquo[fl]?|rename|rewind|rint[fl]?|round[fl]?|scalbl?n[fl]? + |scanf|setbuf|setjmp|setlocale|setvbuf|signal|signbit|sinh?[fl]?|snprintf|sprintf|sqrt[fl]?|srand|sscanf + |strcat|strchr|strcmp|strcoll|strcpy|strcspn|strerror|strftime|strlen|strncat|strncmp|strncpy|strpbrk + |strrchr|strspn|strstr|strto[kdf]|strtoimax|strtol[dl]?|strtoull?|strtoumax|strxfrm|swprintf|swscanf + |system|tan|tan[fl]|tanh[fl]?|tgamma[fl]?|time|tmpfile|tmpnam|tolower|toupper|trunc[fl]?|ungetw?c|va_arg + |va_copy|va_end|va_start|vfw?printf|vfw?scanf|vprintf|vscanf|vsnprintf|vsprintf|vsscanf|vswprintf|vswscanf + |vwprintf|vwscanf|wcrtomb|wcscat|wcschr|wcscmp|wcscoll|wcscpy|wcscspn|wcsftime|wcslen|wcsncat|wcsncmp|wcsncpy + |wcspbrk|wcsrchr|wcsrtombs|wcsspn|wcsstr|wcsto[dkf]|wcstoimax|wcstol[dl]?|wcstombs|wcstoull?|wcstoumax|wcsxfrm + |wctom?b|wmem(?:set|chr|cpy|cmp|move)|wprintf|wscanf)\\b + ''' + 'line_continuation_character': + 'patterns': [ + { + 'match': '(\\\\)\\n' + 'captures': + '1': + 'name': 'constant.character.escape.line-continuation.c' + } + ] + 'numbers': + 'patterns': [ + { + 'match': '\\b((0(x|X)[0-9a-fA-F]([0-9a-fA-F\']*[0-9a-fA-F])?)|(0(b|B)[01]([01\']*[01])?)|(([0-9]([0-9\']*[0-9])?\\.?[0-9]*([0-9\']*[0-9])?)|(\\.[0-9]([0-9\']*[0-9])?))((e|E)(\\+|-)?[0-9]([0-9\']*[0-9])?)?)(L|l|UL|ul|u|U|F|f|ll|LL|ull|ULL)?\\b' + 'name': 'constant.numeric.c' + } + ] + 'parens': + 'begin': '\\(' + 'beginCaptures': + '0': + 'name': 'punctuation.section.parens.begin.bracket.round.c' + 'end': '\\)' + 'endCaptures': + '0': + 'name': 'punctuation.section.parens.end.bracket.round.c' + 'patterns': [ + { + 'include': '$base' + } + ] + 'parens-block': + 'begin': '\\(' + 'beginCaptures': + '0': + 'name': 'punctuation.section.parens.begin.bracket.round.c' + 'end': '\\)' + 'endCaptures': + '0': + 'name': 'punctuation.section.parens.end.bracket.round.c' + 'patterns': [ + { + 'include': '#block_innards' + } + ] + 'pragma-mark': + 'captures': + '1': + 'name': 'meta.preprocessor.pragma.c' + '2': + 'name': 'keyword.control.directive.pragma.pragma-mark.c' + '3': + 'name': 'punctuation.definition.directive.c' + '4': + 'name': 'entity.name.tag.pragma-mark.c' + 'match': '^\\s*(((#)\\s*pragma\\s+mark)\\s+(.*))' + 'name': 'meta.section' + 'operators': + 'patterns': [ + { + 'match': '(?>=|\\|=' + 'name': 'keyword.operator.assignment.compound.bitwise.c' + } + { + 'match': '<<|>>' + 'name': 'keyword.operator.bitwise.shift.c' + } + { + 'match': '!=|<=|>=|==|<|>' + 'name': 'keyword.operator.comparison.c' + } + { + 'match': '&&|!|\\|\\|' + 'name': 'keyword.operator.logical.c' + } + { + 'match': '&|\\||\\^|~' + 'name': 'keyword.operator.c' + } + { + 'match': '=' + 'name': 'keyword.operator.assignment.c' + } + { + 'match': '%|\\*|/|-|\\+' + 'name': 'keyword.operator.c' + } + { + 'begin': '\\?' + 'beginCaptures': + '0': + 'name': 'keyword.operator.ternary.c' + 'end': ':' + 'applyEndPatternLast': true # To prevent matching C++ namespace access :: + 'endCaptures': + '0': + 'name': 'keyword.operator.ternary.c' + 'patterns': [ + { + 'include': '#access' + } + { + 'include': '#libc' + } + { + 'include': '#c_function_call' + } + { + 'include': '$base' + } + ] + } + ] + 'strings': + 'patterns': [ + { + 'begin': '"' + 'beginCaptures': + '0': + 'name': 'punctuation.definition.string.begin.c' + 'end': '"' + 'endCaptures': + '0': + 'name': 'punctuation.definition.string.end.c' + 'name': 'string.quoted.double.c' + 'patterns': [ + { + 'include': '#string_escaped_char' + } + { + 'include': '#string_placeholder' + } + { + 'include': '#line_continuation_character' + } + ] + } + { + 'begin': '\'' + 'beginCaptures': + '0': + 'name': 'punctuation.definition.string.begin.c' + 'end': '\'' + 'endCaptures': + '0': + 'name': 'punctuation.definition.string.end.c' + 'name': 'string.quoted.single.c' + 'patterns': [ + { + 'include': '#string_escaped_char' + } + { + 'include': '#line_continuation_character' + } + ] + } + ] + 'string_escaped_char': + 'patterns': [ + { + 'match': '''(?x)\\\\ ( + \\\\ | + [abefnprtv'"?] | + [0-3]\\d{,2} | + [4-7]\\d? | + x[a-fA-F0-9]{,2} | + u[a-fA-F0-9]{,4} | + U[a-fA-F0-9]{,8} ) + ''' + 'name': 'constant.character.escape.c' + } + { + 'match': '\\\\.' + 'name': 'invalid.illegal.unknown-escape.c' + } + ] + 'string_placeholder': + 'patterns': [ + { + 'match': '''(?x) % + (\\d+\\$)? # field (argument #) + [#0\\- +']* # flags + [,;:_]? # separator character (AltiVec) + ((-?\\d+)|\\*(-?\\d+\\$)?)? # minimum field width + (\\.((-?\\d+)|\\*(-?\\d+\\$)?)?)? # precision + (hh|h|ll|l|j|t|z|q|L|vh|vl|v|hv|hl)? # length modifier + [diouxXDOUeEfFgGaACcSspn%] # conversion type + ''' + 'name': 'constant.other.placeholder.c' + } + { + 'match': '(%)(?!"\\s*(PRI|SCN))' + 'captures': + '1': + 'name': 'invalid.illegal.placeholder.c' + } + ] + 'storage_types': + 'patterns': [ + { + 'match': '\\b(asm|__asm__|auto|bool|_Bool|char|_Complex|double|enum|float|_Imaginary|int|long|short|signed|struct|typedef|union|unsigned|void)\\b' + 'name': 'storage.type.c' + } + ] + 'vararg_ellipses': + 'match': '(?=+!]+|\\(\\)|\\[\\]))\\s*\\( + ) + ''' + 'end': '(?<=\\))(?!\\w)|(?=+!]+|\\(\\)|\\[\\])) + ) + \\s*(\\() + ''' + 'beginCaptures': + '1': + 'name': 'entity.name.function.c' + '2': + 'name': 'punctuation.section.arguments.begin.bracket.round.c' + 'end': '(\\))|(?=+!]+|\\(\\)|\\[\\])) + ) + \\s*(\\() + ''' + 'beginCaptures': + '1': + 'name': 'entity.name.function.c' + '2': + 'name': 'punctuation.section.parameters.begin.bracket.round.c' + 'end': '\\)' + 'endCaptures': + '0': + 'name': 'punctuation.section.parameters.end.bracket.round.c' + 'patterns': [ + { + 'include': '#function-innards' + } + ] + } + { + 'begin': '\\(' + 'beginCaptures': + '0': + 'name': 'punctuation.section.parens.begin.bracket.round.c' + 'end': '\\)' + 'endCaptures': + '0': + 'name': 'punctuation.section.parens.end.bracket.round.c' + 'patterns': [ + { + 'include': '#function-innards' + } + ] + } + { + 'include': '$base' + } + ] + 'function-call-innards': + 'patterns': [ + { + 'include': '#comments' + } + { + 'include': '#storage_types' + } + { + 'include': '#access' + } + { + 'include': '#operators' + } + { + 'begin': '''(?x) + (?!(?:while|for|do|if|else|switch|catch|enumerate|return|typeid|alignof|alignas|sizeof|[cr]?iterate)\\s*\\() + ( + (?:[A-Za-z_][A-Za-z0-9_]*+|::)++ # actual name + | + (?:(?<=operator)(?:[-*&<>=+!]+|\\(\\)|\\[\\])) + ) + \\s*(\\() + ''' + 'beginCaptures': + '1': + 'name': 'entity.name.function.c' + '2': + 'name': 'punctuation.section.arguments.begin.bracket.round.c' + 'end': '\\)' + 'endCaptures': + '0': + 'name': 'punctuation.section.arguments.end.bracket.round.c' + 'patterns': [ + { + 'include': '#function-call-innards' + } + ] + } + { + 'begin': '\\(' + 'beginCaptures': + '0': + 'name': 'punctuation.section.parens.begin.bracket.round.c' + 'end': '\\)' + 'endCaptures': + '0': + 'name': 'punctuation.section.parens.end.bracket.round.c' + 'patterns': [ + { + 'include': '#function-call-innards' + } + ] + } + { + 'include': '#block_innards' + } + ] diff --git a/packages/language-c/grammars/tree-sitter-c.cson b/packages/language-c/grammars/tree-sitter-c.cson new file mode 100644 index 000000000..455747b64 --- /dev/null +++ b/packages/language-c/grammars/tree-sitter-c.cson @@ -0,0 +1,185 @@ +name: 'C' +scopeName: 'source.c' +type: 'tree-sitter' +parser: 'tree-sitter-c' + +injectionRegex: 'c|C' + +fileTypes: [ + 'h' + 'c' + 'h.in' +] + +folds: [ + { + type: ['comment', 'preproc_arg'] + } + { + type: ['preproc_if', 'preproc_ifdef', 'preproc_elif'], + end: {type: ['preproc_else', 'preproc_elif']} + } + { + type: ['preproc_if', 'preproc_ifdef'], + end: {index: -1} + } + { + type: ['preproc_else', 'preproc_elif'] + start: {index: 0} + } + { + type: [ + 'enumerator_list' + 'compound_statement' + 'declaration_list' + 'field_declaration_list' + 'parameter_list' + 'argument_list' + 'initializer_list' + 'parenthesized_expression' + 'template_parameter_list' + 'template_argument_list' + ] + start: {index: 0} + end: {index: -1} + } + { + type: 'case_statement' + start: {index: 0} + end: {type: 'break_statement', index: -1} + } + { + type: 'case_statement' + start: {index: 0} + } +] + +comments: + start: '// ' + +scopes: + 'translation_unit': 'source.c' + 'comment': 'comment.block' + + 'identifier': [ + {match: '^[A-Z\\d_]+$', scopes: 'constant.other'} + ] + + '"#if"': 'keyword.control.directive' + '"#ifdef"': 'keyword.control.directive' + '"#ifndef"': 'keyword.control.directive' + '"#elif"': 'keyword.control.directive' + '"#else"': 'keyword.control.directive' + '"#endif"': 'keyword.control.directive' + '"#define"': 'keyword.control.directive' + '"#include"': 'keyword.control.directive' + 'preproc_directive': 'keyword.control.directive' + + '"if"': 'keyword.control' + '"else"': 'keyword.control' + '"do"': 'keyword.control' + '"for"': 'keyword.control' + '"while"': 'keyword.control' + '"break"': 'keyword.control' + '"continue"': 'keyword.control' + '"return"': 'keyword.control' + '"switch"': 'keyword.control' + '"case"': 'keyword.control' + '"default"': 'keyword.control' + '"goto"': 'keyword.control' + + '"struct"': 'keyword.control' + '"enum"': 'keyword.control' + '"union"': 'keyword.control' + '"typedef"': 'keyword.control' + + 'preproc_function_def > identifier:nth-child(1)': 'entity.name.function.preprocessor' + 'preproc_arg': 'meta.preprocessor.macro' + + ''' + call_expression > identifier, + call_expression > field_expression > field_identifier, + function_declarator > identifier + ''': 'entity.name.function' + + 'statement_identifier': 'constant.variable' + + 'field_identifier': 'variable.other.member' + + 'type_identifier': 'support.storage.type' + 'primitive_type': 'support.storage.type' + '"signed"': 'support.storage.type' + '"unsigned"': 'support.storage.type' + '"short"': 'support.storage.type' + '"long"': 'support.storage.type' + + 'char_literal': 'string.quoted.single' + 'string_literal': 'string.quoted.double' + 'system_lib_string': 'string.quoted.other' + 'escape_sequence': 'constant.character.escape' + + 'number_literal': 'constant.numeric.decimal' + 'null': 'constant.language.null' + 'true': 'constant.language.boolean' + 'false': 'constant.language.boolean' + + 'auto': 'storage.modifier' + '"extern"': 'storage.modifier' + '"register"': 'storage.modifier' + '"static"': 'storage.modifier' + '"inline"': 'storage.modifier' + '"const"': 'storage.modifier' + '"volatile"': 'storage.modifier' + '"restrict"': 'storage.modifier' + '"_Atomic"': 'storage.modifier' + 'function_specifier': 'storage.modifier' + + '";"': 'punctuation.terminator.statement' + '"["': 'punctuation.definition.begin.bracket.square' + '"]"': 'punctuation.definition.end.bracket.square' + '","': 'punctuation.separator.delimiter' + 'char_literal > "\'"': 'punctuation.definition.string' + 'string_literal > "\\""': 'punctuation.definition.string' + '"{"': 'punctuation.section.block.begin.bracket.curly' + '"}"': 'punctuation.section.block.end.bracket.curly' + '"("': 'punctuation.section.parens.begin.bracket.round' + '")"': 'punctuation.section.parens.end.bracket.round' + + '"sizeof"': 'keyword.operator.sizeof' + '"."': 'keyword.operator.member' + '"->"': 'keyword.operator.member' + '"*"': 'keyword.operator' + '"-"': 'keyword.operator' + '"+"': 'keyword.operator' + '"/"': 'keyword.operator' + '"%"': 'keyword.operator' + '"++"': 'keyword.operator' + '"--"': 'keyword.operator' + '"=="': 'keyword.operator' + '"!"': 'keyword.operator' + '"!="': 'keyword.operator' + '"<"': 'keyword.operator' + '">"': 'keyword.operator' + '">="': 'keyword.operator' + '"<="': 'keyword.operator' + '"&&"': 'keyword.operator' + '"||"': 'keyword.operator' + '"&"': 'keyword.operator' + '"|"': 'keyword.operator' + '"^"': 'keyword.operator' + '"~"': 'keyword.operator' + '"<<"': 'keyword.operator' + '">>"': 'keyword.operator' + '"="': 'keyword.operator' + '"+="': 'keyword.operator' + '"-="': 'keyword.operator' + '"*="': 'keyword.operator' + '"/="': 'keyword.operator' + '"%="': 'keyword.operator' + '"<<="': 'keyword.operator' + '">>="': 'keyword.operator' + '"&="': 'keyword.operator' + '"^="': 'keyword.operator' + '"|="': 'keyword.operator' + '"?"': 'keyword.operator' + '":"': 'keyword.operator' diff --git a/packages/language-c/grammars/tree-sitter-cpp.cson b/packages/language-c/grammars/tree-sitter-cpp.cson new file mode 100644 index 000000000..d8fb0e4e4 --- /dev/null +++ b/packages/language-c/grammars/tree-sitter-cpp.cson @@ -0,0 +1,251 @@ +name: 'C++' +scopeName: 'source.cpp' +type: 'tree-sitter' +parser: 'tree-sitter-cpp' + +injectionRegex: '(c|C)(\\+\\+|pp|PP)' + +fileTypes: [ + 'cc' + 'cpp' + 'cp' + 'cxx' + 'c++' + 'cu' + 'cuh' + 'h' + 'hh' + 'hpp' + 'hxx' + 'h++' + 'inl' + 'ino' + 'ipp' + 'tcc' + 'tpp' +] + +contentRegex: '\n\\s*(namespace|class|template)\\s+' + +folds: [ + { + type: ['comment', 'preproc_arg'] + } + { + type: ['preproc_if', 'preproc_ifdef', 'preproc_elif'], + end: {type: ['preproc_else', 'preproc_elif']} + } + { + type: ['preproc_if', 'preproc_ifdef'], + end: {index: -1} + } + { + type: ['preproc_else', 'preproc_elif'] + start: {index: 0} + } + { + type: [ + 'enumerator_list' + 'compound_statement' + 'declaration_list' + 'field_declaration_list' + 'parameter_list' + 'argument_list' + 'initializer_list' + 'parenthesized_expression' + 'template_parameter_list' + 'template_argument_list' + ] + start: {index: 0} + end: {index: -1} + } + { + type: 'case_statement' + start: {index: 0} + end: {type: 'break_statement', index: -1} + } + { + type: 'case_statement' + start: {index: 0} + } +] + +comments: + start: '// ' + +scopes: + 'translation_unit': 'source.cpp' + 'comment': 'comment.block' + + 'identifier': [ + {match: '^[A-Z\\d_]+$', scopes: 'constant.other'} + ] + + '"#if"': 'keyword.control.directive' + '"#ifdef"': 'keyword.control.directive' + '"#ifndef"': 'keyword.control.directive' + '"#elif"': 'keyword.control.directive' + '"#else"': 'keyword.control.directive' + '"#endif"': 'keyword.control.directive' + '"#define"': 'keyword.control.directive' + '"#include"': 'keyword.control.directive' + 'preproc_directive': 'keyword.control.directive' + + '"if"': 'keyword.control' + '"else"': 'keyword.control' + '"do"': 'keyword.control' + '"for"': 'keyword.control' + '"while"': 'keyword.control' + '"break"': 'keyword.control' + '"continue"': 'keyword.control' + '"return"': 'keyword.control' + '"switch"': 'keyword.control' + '"case"': 'keyword.control' + '"default"': 'keyword.control' + '"goto"': 'keyword.control' + + '"struct"': 'keyword.control' + '"enum"': 'keyword.control' + '"union"': 'keyword.control' + '"typedef"': 'keyword.control' + '"class"': 'keyword.control' + '"using"': 'keyword.control' + '"namespace"': 'keyword.control' + '"template"': 'keyword.control' + '"typename"': 'keyword.control' + '"try"': 'keyword.control' + '"catch"': 'keyword.control' + '"throw"': 'keyword.control' + '"__attribute__"': 'keyword.attribute' + + 'preproc_function_def > identifier:nth-child(1)': 'entity.name.function.preprocessor' + 'preproc_arg': 'meta.preprocessor.macro' + 'preproc_directive': 'keyword.control.directive' + + 'template_function > identifier': [ + { + match: '^(static|const|dynamic|reinterpret)_cast$' + scopes: 'keyword.operator' + } + ] + + ''' + call_expression > identifier, + call_expression > field_expression > field_identifier, + call_expression > scoped_identifier > identifier, + template_function > identifier, + template_function > scoped_identifier > identifier, + template_method > field_identifier, + function_declarator > identifier, + function_declarator > field_identifier, + function_declarator > scoped_identifier > identifier, + destructor_name > identifier + ''': 'entity.name.function' + + 'statement_identifier': 'constant.variable' + + 'field_identifier': 'variable.other.member' + + 'type_identifier': 'support.storage.type' + 'primitive_type': 'support.storage.type' + '"unsigned"': 'support.storage.type' + '"signed"': 'support.storage.type' + '"short"': 'support.storage.type' + '"long"': 'support.storage.type' + 'auto': 'support.storage.type' + + 'char_literal': 'string.quoted.single' + 'string_literal': 'string.quoted.double' + 'system_lib_string': 'string.quoted.other' + 'raw_string_literal': 'string.quoted.other' + 'escape_sequence': 'constant.character.escape' + 'preproc_include > string_literal > escape_sequence': 'string.quoted.double' + + 'number_literal': 'constant.numeric.decimal' + 'null': 'constant.language.null' + 'nullptr': 'constant.language.null' + 'true': 'constant.language.boolean' + 'false': 'constant.language.boolean' + + '"extern"': 'storage.modifier' + '"static"': 'storage.modifier' + '"register"': 'storage.modifier' + '"friend"': 'storage.modifier' + '"inline"': 'storage.modifier' + '"explicit"': 'storage.modifier' + '"const"': 'storage.modifier' + '"constexpr"': 'storage.modifier' + '"volatile"': 'storage.modifier' + '"restrict"': 'storage.modifier' + 'function_specifier': 'storage.modifier' + '"public"': 'storage.modifier' + '"private"': 'storage.modifier' + '"protected"': 'storage.modifier' + '"final"': 'storage.modifier' + '"override"': 'storage.modifier' + '"virtual"': 'storage.modifier' + '"noexcept"': 'storage.modifier' + '"mutable"': 'storage.modifier' + + '";"': 'punctuation.terminator.statement' + '"["': 'punctuation.definition.begin.bracket.square' + '"]"': 'punctuation.definition.end.bracket.square' + 'access_specifier > ":"': 'punctuation.definition.visibility.colon' + 'base_class_clause > ":"': 'punctuation.definition.inheritance.colon' + 'base_class_clause > ","': 'punctuation.definition.separator.class.comma' + 'field_declaration > ","': 'punctuation.separator.delimiter' + 'parameter_list > ","': 'punctuation.separator.delimiter' + 'field_initializer_list > ":"': 'punctuation.definition.initialization.colon' + 'field_initializer_list > ","': 'punctuation.separator.delimiter' + '"::"': 'punctuation.separator.method.double-colon' + 'template_parameter_list > "<"': 'punctuation.definition.template.bracket.angle' + 'template_parameter_list > ">"': 'punctuation.definition.template.bracket.angle' + 'template_argument_list > ">"': 'punctuation.definition.template.bracket.angle' + 'template_argument_list > "<"': 'punctuation.definition.template.bracket.angle' + 'char_literal > "\'"': 'punctuation.definition.string' + 'string_literal > "\\""': 'punctuation.definition.string' + '"{"': 'punctuation.section.block.begin.bracket.curly' + '"}"': 'punctuation.section.block.end.bracket.curly' + '"("': 'punctuation.section.parens.begin.bracket.round' + '")"': 'punctuation.section.parens.end.bracket.round' + + '"sizeof"': 'keyword.operator.sizeof' + '"new"': 'keyword.operator' + '"delete"': 'keyword.operator' + '"."': 'keyword.operator.member' + '"->"': 'keyword.operator.member' + '"*"': 'keyword.operator' + '"-"': 'keyword.operator' + '"+"': 'keyword.operator' + '"/"': 'keyword.operator' + '"%"': 'keyword.operator' + '"++"': 'keyword.operator' + '"--"': 'keyword.operator' + '"=="': 'keyword.operator' + '"!"': 'keyword.operator' + '"!="': 'keyword.operator' + 'relational_expression > "<"': 'keyword.operator' + 'relational_expression > ">"': 'keyword.operator' + '">="': 'keyword.operator' + '"<="': 'keyword.operator' + '"&&"': 'keyword.operator' + '"||"': 'keyword.operator' + '"&"': 'keyword.operator' + '"|"': 'keyword.operator' + '"^"': 'keyword.operator' + '"~"': 'keyword.operator' + '"<<"': 'keyword.operator' + '">>"': 'keyword.operator' + '"="': 'keyword.operator' + '"+="': 'keyword.operator' + '"-="': 'keyword.operator' + '"*="': 'keyword.operator' + '"/="': 'keyword.operator' + '"%="': 'keyword.operator' + '"<<="': 'keyword.operator' + '">>="': 'keyword.operator' + '"&="': 'keyword.operator' + '"^="': 'keyword.operator' + '"|="': 'keyword.operator' + '"?"': 'keyword.operator' + 'conditional_expression > ":"': 'keyword.operator' diff --git a/packages/language-c/lib/main.js b/packages/language-c/lib/main.js new file mode 100644 index 000000000..56630de15 --- /dev/null +++ b/packages/language-c/lib/main.js @@ -0,0 +1,12 @@ +exports.activate = function () { + // Highlight macro bodies as C/C++ + for (const language of ['c', 'cpp']) { + for (const nodeType of ['preproc_def', 'preproc_function_def']) { + atom.grammars.addInjectionPoint(`source.${language}`, { + type: nodeType, + language (node) { return language }, + content (node) { return node.lastNamedChild } + }) + } + } +} diff --git a/packages/language-c/package-lock.json b/packages/language-c/package-lock.json new file mode 100644 index 000000000..786af1306 --- /dev/null +++ b/packages/language-c/package-lock.json @@ -0,0 +1,398 @@ +{ + "name": "language-c", + "version": "0.60.20", + "lockfileVersion": 2, + "requires": true, + "packages": { + "": { + "version": "0.60.19", + "license": "MIT", + "dependencies": { + "tree-sitter-c": "0.19.0", + "tree-sitter-cpp": "0.19.0" + }, + "devDependencies": { + "coffeelint": "^1.10.1" + }, + "engines": { + "atom": "*", + "node": "*" + } + }, + "node_modules/balanced-match": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.0.tgz", + "integrity": "sha1-ibTRmasr7kneFk6gK4nORi1xt2c=", + "dev": true + }, + "node_modules/brace-expansion": { + "version": "1.1.11", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz", + "integrity": "sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==", + "dev": true, + "dependencies": { + "balanced-match": "^1.0.0", + "concat-map": "0.0.1" + } + }, + "node_modules/coffee-script": { + "version": "1.11.1", + "resolved": "https://registry.npmjs.org/coffee-script/-/coffee-script-1.11.1.tgz", + "integrity": "sha1-vxxHrWREOg2V0S3ysUfMCk2q1uk=", + "deprecated": "CoffeeScript on NPM has moved to \"coffeescript\" (no hyphen)", + "dev": true, + "bin": { + "cake": "bin/cake", + "coffee": "bin/coffee" + }, + "engines": { + "node": ">=0.8.0" + } + }, + "node_modules/coffeelint": { + "version": "1.16.2", + "resolved": "https://registry.npmjs.org/coffeelint/-/coffeelint-1.16.2.tgz", + "integrity": "sha512-6mzgOo4zb17WfdrSui/cSUEgQ0AQkW3gXDht+6lHkfkqGUtSYKwGdGcXsDfAyuScVzTlTtKdfwkAlJWfqul7zg==", + "dev": true, + "dependencies": { + "coffee-script": "~1.11.0", + "glob": "^7.0.6", + "ignore": "^3.0.9", + "optimist": "^0.6.1", + "resolve": "^0.6.3", + "strip-json-comments": "^1.0.2" + }, + "bin": { + "coffeelint": "bin/coffeelint" + }, + "engines": { + "node": ">=0.8.0", + "npm": ">=1.3.7" + } + }, + "node_modules/concat-map": { + "version": "0.0.1", + "resolved": "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz", + "integrity": "sha1-2Klr13/Wjfd5OnMDajug1UBdR3s=", + "dev": true + }, + "node_modules/fs.realpath": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/fs.realpath/-/fs.realpath-1.0.0.tgz", + "integrity": "sha1-FQStJSMVjKpA20onh8sBQRmU6k8=", + "dev": true + }, + "node_modules/glob": { + "version": "7.1.3", + "resolved": "https://registry.npmjs.org/glob/-/glob-7.1.3.tgz", + "integrity": "sha512-vcfuiIxogLV4DlGBHIUOwI0IbrJ8HWPc4MU7HzviGeNho/UJDfi6B5p3sHeWIQ0KGIU0Jpxi5ZHxemQfLkkAwQ==", + "dev": true, + "dependencies": { + "fs.realpath": "^1.0.0", + "inflight": "^1.0.4", + "inherits": "2", + "minimatch": "^3.0.4", + "once": "^1.3.0", + "path-is-absolute": "^1.0.0" + }, + "engines": { + "node": "*" + } + }, + "node_modules/ignore": { + "version": "3.3.10", + "resolved": "https://registry.npmjs.org/ignore/-/ignore-3.3.10.tgz", + "integrity": "sha512-Pgs951kaMm5GXP7MOvxERINe3gsaVjUWFm+UZPSq9xYriQAksyhg0csnS0KXSNRD5NmNdapXEpjxG49+AKh/ug==", + "dev": true + }, + "node_modules/inflight": { + "version": "1.0.6", + "resolved": "https://registry.npmjs.org/inflight/-/inflight-1.0.6.tgz", + "integrity": "sha1-Sb1jMdfQLQwJvJEKEHW6gWW1bfk=", + "dev": true, + "dependencies": { + "once": "^1.3.0", + "wrappy": "1" + } + }, + "node_modules/inherits": { + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.3.tgz", + "integrity": "sha1-Yzwsg+PaQqUC9SRmAiSA9CCCYd4=", + "dev": true + }, + "node_modules/minimatch": { + "version": "3.0.4", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.0.4.tgz", + "integrity": "sha512-yJHVQEhyqPLUTgt9B83PXu6W3rx4MvvHvSUvToogpwoGDOUQ+yDrR0HRot+yOCdCO7u4hX3pWft6kWBBcqh0UA==", + "dev": true, + "dependencies": { + "brace-expansion": "^1.1.7" + }, + "engines": { + "node": "*" + } + }, + "node_modules/minimist": { + "version": "0.0.10", + "resolved": "http://registry.npmjs.org/minimist/-/minimist-0.0.10.tgz", + "integrity": "sha1-3j+YVD2/lggr5IrRoMfNqDYwHc8=", + "dev": true + }, + "node_modules/nan": { + "version": "2.14.0", + "resolved": "https://registry.npmjs.org/nan/-/nan-2.14.0.tgz", + "integrity": "sha512-INOFj37C7k3AfaNTtX8RhsTw7qRy7eLET14cROi9+5HAVbbHuIWUHEauBv5qT4Av2tWasiTY1Jw6puUNqRJXQg==" + }, + "node_modules/once": { + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/once/-/once-1.4.0.tgz", + "integrity": "sha1-WDsap3WWHUsROsF9nFC6753Xa9E=", + "dev": true, + "dependencies": { + "wrappy": "1" + } + }, + "node_modules/optimist": { + "version": "0.6.1", + "resolved": "https://registry.npmjs.org/optimist/-/optimist-0.6.1.tgz", + "integrity": "sha1-2j6nRob6IaGaERwybpDrFaAZZoY=", + "dev": true, + "dependencies": { + "minimist": "~0.0.1", + "wordwrap": "~0.0.2" + } + }, + "node_modules/path-is-absolute": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/path-is-absolute/-/path-is-absolute-1.0.1.tgz", + "integrity": "sha1-F0uSaHNVNP+8es5r9TpanhtcX18=", + "dev": true, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/resolve": { + "version": "0.6.3", + "resolved": "https://registry.npmjs.org/resolve/-/resolve-0.6.3.tgz", + "integrity": "sha1-3ZV5gufnNt699TtYpN2RdUV13UY=", + "dev": true + }, + "node_modules/strip-json-comments": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/strip-json-comments/-/strip-json-comments-1.0.4.tgz", + "integrity": "sha1-HhX7ysl9Pumb8tc7TGVrCCu6+5E=", + "dev": true, + "bin": { + "strip-json-comments": "cli.js" + }, + "engines": { + "node": ">=0.8.0" + } + }, + "node_modules/tree-sitter-c": { + "version": "0.19.0", + "resolved": "https://registry.npmjs.org/tree-sitter-c/-/tree-sitter-c-0.19.0.tgz", + "integrity": "sha512-6wlHAZRUmibYwTv4XdLhiMZ3EXYLZdvkiRIXJJ1rMCSMFwhaaLnBjyFw7aILJT1x6V7oD7I3VNOOniwKHV9osA==", + "hasInstallScript": true, + "dependencies": { + "nan": "^2.14.0" + } + }, + "node_modules/tree-sitter-cpp": { + "version": "0.19.0", + "resolved": "https://registry.npmjs.org/tree-sitter-cpp/-/tree-sitter-cpp-0.19.0.tgz", + "integrity": "sha512-nCbHpC3ZaLnuvfHVtC40M5Mzc4QA11Zogz/lLUMBirysxmc9ResCN+gNMWjl7OrG4RPRSd0wQbjg9z6UTtl0SQ==", + "hasInstallScript": true, + "dependencies": { + "nan": "^2.14.0" + } + }, + "node_modules/wordwrap": { + "version": "0.0.3", + "resolved": "https://registry.npmjs.org/wordwrap/-/wordwrap-0.0.3.tgz", + "integrity": "sha1-o9XabNXAvAAI03I0u68b7WMFkQc=", + "dev": true, + "engines": { + "node": ">=0.4.0" + } + }, + "node_modules/wrappy": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz", + "integrity": "sha1-tSQ9jz7BqjXxNkYFvA0QNuMKtp8=", + "dev": true + } + }, + "dependencies": { + "balanced-match": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.0.tgz", + "integrity": "sha1-ibTRmasr7kneFk6gK4nORi1xt2c=", + "dev": true + }, + "brace-expansion": { + "version": "1.1.11", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz", + "integrity": "sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==", + "dev": true, + "requires": { + "balanced-match": "^1.0.0", + "concat-map": "0.0.1" + } + }, + "coffee-script": { + "version": "1.11.1", + "resolved": "https://registry.npmjs.org/coffee-script/-/coffee-script-1.11.1.tgz", + "integrity": "sha1-vxxHrWREOg2V0S3ysUfMCk2q1uk=", + "dev": true + }, + "coffeelint": { + "version": "1.16.2", + "resolved": "https://registry.npmjs.org/coffeelint/-/coffeelint-1.16.2.tgz", + "integrity": "sha512-6mzgOo4zb17WfdrSui/cSUEgQ0AQkW3gXDht+6lHkfkqGUtSYKwGdGcXsDfAyuScVzTlTtKdfwkAlJWfqul7zg==", + "dev": true, + "requires": { + "coffee-script": "~1.11.0", + "glob": "^7.0.6", + "ignore": "^3.0.9", + "optimist": "^0.6.1", + "resolve": "^0.6.3", + "strip-json-comments": "^1.0.2" + } + }, + "concat-map": { + "version": "0.0.1", + "resolved": "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz", + "integrity": "sha1-2Klr13/Wjfd5OnMDajug1UBdR3s=", + "dev": true + }, + "fs.realpath": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/fs.realpath/-/fs.realpath-1.0.0.tgz", + "integrity": "sha1-FQStJSMVjKpA20onh8sBQRmU6k8=", + "dev": true + }, + "glob": { + "version": "7.1.3", + "resolved": "https://registry.npmjs.org/glob/-/glob-7.1.3.tgz", + "integrity": "sha512-vcfuiIxogLV4DlGBHIUOwI0IbrJ8HWPc4MU7HzviGeNho/UJDfi6B5p3sHeWIQ0KGIU0Jpxi5ZHxemQfLkkAwQ==", + "dev": true, + "requires": { + "fs.realpath": "^1.0.0", + "inflight": "^1.0.4", + "inherits": "2", + "minimatch": "^3.0.4", + "once": "^1.3.0", + "path-is-absolute": "^1.0.0" + } + }, + "ignore": { + "version": "3.3.10", + "resolved": "https://registry.npmjs.org/ignore/-/ignore-3.3.10.tgz", + "integrity": "sha512-Pgs951kaMm5GXP7MOvxERINe3gsaVjUWFm+UZPSq9xYriQAksyhg0csnS0KXSNRD5NmNdapXEpjxG49+AKh/ug==", + "dev": true + }, + "inflight": { + "version": "1.0.6", + "resolved": "https://registry.npmjs.org/inflight/-/inflight-1.0.6.tgz", + "integrity": "sha1-Sb1jMdfQLQwJvJEKEHW6gWW1bfk=", + "dev": true, + "requires": { + "once": "^1.3.0", + "wrappy": "1" + } + }, + "inherits": { + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.3.tgz", + "integrity": "sha1-Yzwsg+PaQqUC9SRmAiSA9CCCYd4=", + "dev": true + }, + "minimatch": { + "version": "3.0.4", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.0.4.tgz", + "integrity": "sha512-yJHVQEhyqPLUTgt9B83PXu6W3rx4MvvHvSUvToogpwoGDOUQ+yDrR0HRot+yOCdCO7u4hX3pWft6kWBBcqh0UA==", + "dev": true, + "requires": { + "brace-expansion": "^1.1.7" + } + }, + "minimist": { + "version": "0.0.10", + "resolved": "http://registry.npmjs.org/minimist/-/minimist-0.0.10.tgz", + "integrity": "sha1-3j+YVD2/lggr5IrRoMfNqDYwHc8=", + "dev": true + }, + "nan": { + "version": "2.14.0", + "resolved": "https://registry.npmjs.org/nan/-/nan-2.14.0.tgz", + "integrity": "sha512-INOFj37C7k3AfaNTtX8RhsTw7qRy7eLET14cROi9+5HAVbbHuIWUHEauBv5qT4Av2tWasiTY1Jw6puUNqRJXQg==" + }, + "once": { + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/once/-/once-1.4.0.tgz", + "integrity": "sha1-WDsap3WWHUsROsF9nFC6753Xa9E=", + "dev": true, + "requires": { + "wrappy": "1" + } + }, + "optimist": { + "version": "0.6.1", + "resolved": "https://registry.npmjs.org/optimist/-/optimist-0.6.1.tgz", + "integrity": "sha1-2j6nRob6IaGaERwybpDrFaAZZoY=", + "dev": true, + "requires": { + "minimist": "~0.0.1", + "wordwrap": "~0.0.2" + } + }, + "path-is-absolute": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/path-is-absolute/-/path-is-absolute-1.0.1.tgz", + "integrity": "sha1-F0uSaHNVNP+8es5r9TpanhtcX18=", + "dev": true + }, + "resolve": { + "version": "0.6.3", + "resolved": "https://registry.npmjs.org/resolve/-/resolve-0.6.3.tgz", + "integrity": "sha1-3ZV5gufnNt699TtYpN2RdUV13UY=", + "dev": true + }, + "strip-json-comments": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/strip-json-comments/-/strip-json-comments-1.0.4.tgz", + "integrity": "sha1-HhX7ysl9Pumb8tc7TGVrCCu6+5E=", + "dev": true + }, + "tree-sitter-c": { + "version": "0.19.0", + "resolved": "https://registry.npmjs.org/tree-sitter-c/-/tree-sitter-c-0.19.0.tgz", + "integrity": "sha512-6wlHAZRUmibYwTv4XdLhiMZ3EXYLZdvkiRIXJJ1rMCSMFwhaaLnBjyFw7aILJT1x6V7oD7I3VNOOniwKHV9osA==", + "requires": { + "nan": "^2.14.0" + } + }, + "tree-sitter-cpp": { + "version": "0.19.0", + "resolved": "https://registry.npmjs.org/tree-sitter-cpp/-/tree-sitter-cpp-0.19.0.tgz", + "integrity": "sha512-nCbHpC3ZaLnuvfHVtC40M5Mzc4QA11Zogz/lLUMBirysxmc9ResCN+gNMWjl7OrG4RPRSd0wQbjg9z6UTtl0SQ==", + "requires": { + "nan": "^2.14.0" + } + }, + "wordwrap": { + "version": "0.0.3", + "resolved": "https://registry.npmjs.org/wordwrap/-/wordwrap-0.0.3.tgz", + "integrity": "sha1-o9XabNXAvAAI03I0u68b7WMFkQc=", + "dev": true + }, + "wrappy": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz", + "integrity": "sha1-tSQ9jz7BqjXxNkYFvA0QNuMKtp8=", + "dev": true + } + } +} diff --git a/packages/language-c/package.json b/packages/language-c/package.json new file mode 100644 index 000000000..9feb71ed8 --- /dev/null +++ b/packages/language-c/package.json @@ -0,0 +1,29 @@ +{ + "version": "0.60.20", + "name": "language-c", + "description": "Atom language support for C/C++", + "keywords": [ + "tree-sitter" + ], + "main": "lib/main", + "homepage": "https://atom.github.io/language-c", + "repository": { + "type": "git", + "url": "https://github.com/atom/language-c.git" + }, + "license": "MIT", + "bugs": { + "url": "https://github.com/atom/language-c/issues" + }, + "engines": { + "atom": "*", + "node": "*" + }, + "dependencies": { + "tree-sitter-c": "0.19.0", + "tree-sitter-cpp": "0.19.0" + }, + "devDependencies": { + "coffeelint": "^1.10.1" + } +} diff --git a/packages/language-c/settings/language-c.cson b/packages/language-c/settings/language-c.cson new file mode 100644 index 000000000..17d33e417 --- /dev/null +++ b/packages/language-c/settings/language-c.cson @@ -0,0 +1,19 @@ +'.source.c, .source.cpp, .source.objc, .source.objcpp': + 'editor': + 'commentStart': '// ' + 'increaseIndentPattern': '(?x) + ^ .* \\{ [^}"\']* $ + |^ .* \\( [^\\)"\']* $ + |^ \\s* (public|private|protected): \\s* $ + |^ \\s* @(public|private|protected) \\s* $ + |^ \\s* \\{ \\} $ + ' + 'decreaseIndentPattern': '(?x) + ^ \\s* (\\s* /[*] .* [*]/ \\s*)* \\} + |^ \\s* (\\s* /[*] .* [*]/ \\s*)* \\) + |^ \\s* (public|private|protected): \\s* $ + |^ \\s* @(public|private|protected) \\s* $ + ' +'.source.c, .source.cpp': + 'editor': + 'foldEndPattern': '(?': + 'prefix': 'Inc' + 'body': '#include <${1:.h}>' + '#include ""': + 'prefix': 'inc' + 'body': '#include "${1:.h}"' + '#pragma mark': + 'prefix': 'mark' + 'body': '#if 0\n${1:#pragma mark -\n}#pragma mark $2\n#endif\n\n$0' + 'main()': + 'prefix': 'main' + 'body': 'int main(int argc, char const *argv[]) {\n\t${1:/* code */}\n\treturn 0;\n}' + 'For Loop': + 'prefix': 'for' + 'body': 'for (size_t ${1:i} = 0; ${1:i} < ${2:count}; ${1:i}${3:++}) {\n\t${4:/* code */}\n}' + 'Header Include-Guard': + 'prefix': 'once' + 'body': '#ifndef ${1:SYMBOL}\n#define $1\n\n${2}\n\n#endif /* end of include guard: $1 */\n' + 'Shared Pointer': + 'prefix': 'sp' + 'body': 'typedef std::shared_ptr<${2:${1:my_type}_t}> ${3:${4:my_type}_ptr};' + 'Typedef': + 'prefix': 'td' + 'body': 'typedef ${1:int} ${2:MyCustomType};' + 'Do While Loop': + 'prefix': 'do' + 'body': 'do {\n\t${0:/* code */}\n} while(${1:/* condition */});' + 'While Loop': + 'prefix': 'while' + 'body': 'while (${1:/* condition */}) {\n\t${2:/* code */}\n}' + 'fprintf': + 'prefix': 'fprintf' + 'body': 'fprintf(${1:stderr}, "${2:%s}\\\\n", $3);$4' + 'If Condition': + 'prefix': 'if' + 'body': 'if (${1:/* condition */}) {\n\t${2:/* code */}\n}' + 'If Else': + 'prefix': 'ife' + 'body': 'if (${1:/* condition */}) {\n\t${2:/* code */}\n} else {\n\t${3:/* code */}\n}' + 'If ElseIf': + 'prefix': 'iff' + 'body': 'if (${1:/* condition */}) {\n\t${2:/* code */}\n} else if (${3:/* condition */}) {\n\t${4:/* code */}\n}' + 'If ElseIf Else': + 'prefix': 'iffe' + 'body': 'if (${1:/* condition */}) {\n\t${2:/* code */}\n} else if (${3:/* condition */}) {\n\t${4:/* code */}\n} else {\n\t${5:/* code */}\n}' + 'Switch Statement': + 'prefix': 'switch' + 'body': 'switch (${1:/* expression */}) {\n\tcase ${2:/* value */}:\n}' + 'case': + 'prefix': 'cs' + 'body': 'case ${1:/* value */}:$0' + 'printf': + 'prefix': 'printf' + 'body': 'printf("${1:%s}\\\\n", $2);$3' + 'scanf': + 'prefix': 'scanf' + 'body': 'scanf(\"${1:%s}\\\\n\", $2);$3' + 'Struct': + 'prefix': 'st' + 'body': 'struct ${1:name_t} {\n\t${2:/* data */}\n};' + 'void': + 'prefix': 'void' + 'body': 'void ${1:name}(${2:/* arguments */}) {\n\t${3:/* code */}\n}' + 'any function': + 'prefix': 'func' + 'body': '${1:int} ${2:name}(${3:/* arguments */}) {\n\t${5:/* code */}\n\treturn ${4:0};\n}' + 'write file': + 'prefix': 'wf' + 'body': 'FILE *${1:fp};\n${1:fp} = fopen ("${2:filename.txt}","w");\nif (${1:fp}!=NULL)\n{\n\tfprintf(${1:fp},"${3:Some String\\\\n}");\n\tfclose (${1:fp});\n}' + 'read file': + 'prefix': 'rf' + 'body': 'FILE *${1:fp};\n${1:fp} = fopen ("${2:filename.txt}","r");\nif (${1:fp}!=NULL)\n{\n\tfscanf(${1:fp},"${3:Some String\\\\n}", ${3:&var});\n\tfclose (${1:fp});\n}' +'.source.cpp, .source.objcpp': + 'Enumeration': + 'prefix': 'enum' + 'body': 'enum ${1:name} { $0 };' + 'Class': + 'prefix': 'cl' + 'body': 'class ${1:name_t} {\nprivate:\n\t${0:/* data */}\n\npublic:\n\t${1:name_t} (${2:arguments});\n\tvirtual ~${1:name_t} ();\n};' + 'Namespace': + 'prefix': 'ns' + 'body': 'namespace ${1:name} {\n\t$2\n} /* $1 */' + 'cout': + 'prefix': 'cout' + 'body': 'std::cout << \"${1:/* message */}\" << \'\\\\n\';' + 'cin': + 'prefix': 'cin' + 'body': 'std::cin >> ${1:/* variable */};' + 'cerr': + 'prefix': 'cerr' + 'body': 'std::cerr << \"${1:/* error message */}\" << \'\\\\n\';' + 'std::map': + 'prefix': 'map' + 'body': 'std::map<${1:key}, ${2:value}> map$3;' + 'std::string': + 'prefix': 'str' + 'body': 'std::string' + 'std::vector': + 'prefix': 'vector' + 'body': 'std::vector<${1:int}> v$2;' + 'template ': + 'prefix': 'tp' + 'body': 'template ' + 'output file': + 'prefix': 'outf' + 'body': 'std::ofstream ${1:afile}("${2:filename.txt}", std::ios::out);\nif (${1:afile}.is_open()) {\n\t${1:afile} << "${3:This is a line.}\\\\n";\n\t${1:afile}.close();\n}' + 'input file': + 'prefix': 'inf' + 'body': 'std::ifstream ${1:afile}("${2:filename.txt}", std::ios::in);\nif (${1:afile}.is_open()) {\n\tstd::string line;\n\twhile (std::getline(${1:afile}, line)) {\n\t\tstd::cout << line << \'\\\\n\';\n\t}\n\t${1:afile}.close();\n}\nelse {\n\tstd::cerr << "Unable to open file\\\\n";\n}' diff --git a/packages/language-c/spec/c-spec.coffee b/packages/language-c/spec/c-spec.coffee new file mode 100644 index 000000000..01d71056a --- /dev/null +++ b/packages/language-c/spec/c-spec.coffee @@ -0,0 +1,1140 @@ +TextEditor = null +buildTextEditor = (params) -> + if atom.workspace.buildTextEditor? + atom.workspace.buildTextEditor(params) + else + TextEditor ?= require('atom').TextEditor + new TextEditor(params) + +describe "Language-C", -> + grammar = null + + beforeEach -> + atom.config.set('core.useTreeSitterParsers', false) + + waitsForPromise -> + atom.packages.activatePackage('language-c') + + describe "C", -> + beforeEach -> + grammar = atom.grammars.grammarForScopeName('source.c') + + it "parses the grammar", -> + expect(grammar).toBeTruthy() + expect(grammar.scopeName).toBe 'source.c' + + it "tokenizes punctuation", -> + {tokens} = grammar.tokenizeLine 'hi;' + expect(tokens[1]).toEqual value: ';', scopes: ['source.c', 'punctuation.terminator.statement.c'] + + {tokens} = grammar.tokenizeLine 'a[b]' + expect(tokens[1]).toEqual value: '[', scopes: ['source.c', 'punctuation.definition.begin.bracket.square.c'] + expect(tokens[3]).toEqual value: ']', scopes: ['source.c', 'punctuation.definition.end.bracket.square.c'] + + {tokens} = grammar.tokenizeLine 'a, b' + expect(tokens[1]).toEqual value: ',', scopes: ['source.c', 'punctuation.separator.delimiter.c'] + + it "tokenizes functions", -> + lines = grammar.tokenizeLines ''' + int something(int param) { + return 0; + } + ''' + expect(lines[0][0]).toEqual value: 'int', scopes: ['source.c', 'storage.type.c'] + expect(lines[0][2]).toEqual value: 'something', scopes: ['source.c', 'meta.function.c', 'entity.name.function.c'] + expect(lines[0][3]).toEqual value: '(', scopes: ['source.c', 'meta.function.c', 'punctuation.section.parameters.begin.bracket.round.c'] + expect(lines[0][4]).toEqual value: 'int', scopes: ['source.c', 'meta.function.c', 'storage.type.c'] + expect(lines[0][6]).toEqual value: ')', scopes: ['source.c', 'meta.function.c', 'punctuation.section.parameters.end.bracket.round.c'] + expect(lines[0][8]).toEqual value: '{', scopes: ['source.c', 'meta.block.c', 'punctuation.section.block.begin.bracket.curly.c'] + expect(lines[1][1]).toEqual value: 'return', scopes: ['source.c', 'meta.block.c', 'keyword.control.c'] + expect(lines[1][3]).toEqual value: '0', scopes: ['source.c', 'meta.block.c', 'constant.numeric.c'] + expect(lines[2][0]).toEqual value: '}', scopes: ['source.c', 'meta.block.c', 'punctuation.section.block.end.bracket.curly.c'] + + it "tokenizes varargs ellipses", -> + {tokens} = grammar.tokenizeLine 'void function(...);' + expect(tokens[0]).toEqual value: 'void', scopes: ['source.c', 'storage.type.c'] + expect(tokens[2]).toEqual value: 'function', scopes: ['source.c', 'meta.function.c', 'entity.name.function.c'] + expect(tokens[3]).toEqual value: '(', scopes: ['source.c', 'meta.function.c', 'punctuation.section.parameters.begin.bracket.round.c'] + expect(tokens[4]).toEqual value: '...', scopes: ['source.c', 'meta.function.c', 'punctuation.vararg-ellipses.c'] + expect(tokens[5]).toEqual value: ')', scopes: ['source.c', 'meta.function.c', 'punctuation.section.parameters.end.bracket.round.c'] + + it "tokenizes various _t types", -> + {tokens} = grammar.tokenizeLine 'size_t var;' + expect(tokens[0]).toEqual value: 'size_t', scopes: ['source.c', 'support.type.sys-types.c'] + + {tokens} = grammar.tokenizeLine 'pthread_t var;' + expect(tokens[0]).toEqual value: 'pthread_t', scopes: ['source.c', 'support.type.pthread.c'] + + {tokens} = grammar.tokenizeLine 'int32_t var;' + expect(tokens[0]).toEqual value: 'int32_t', scopes: ['source.c', 'support.type.stdint.c'] + + {tokens} = grammar.tokenizeLine 'myType_t var;' + expect(tokens[0]).toEqual value: 'myType_t', scopes: ['source.c', 'support.type.posix-reserved.c'] + + it "tokenizes 'line continuation' character", -> + {tokens} = grammar.tokenizeLine 'ma' + '\\' + '\n' + 'in(){};' + expect(tokens[0]).toEqual value: 'ma', scopes: ['source.c'] + expect(tokens[1]).toEqual value: '\\', scopes: ['source.c', 'constant.character.escape.line-continuation.c'] + expect(tokens[3]).toEqual value: 'in', scopes: ['source.c', 'meta.function.c', 'entity.name.function.c'] + + describe "strings", -> + it "tokenizes them", -> + delimsByScope = + 'string.quoted.double.c': '"' + 'string.quoted.single.c': '\'' + + for scope, delim of delimsByScope + {tokens} = grammar.tokenizeLine delim + 'a' + delim + expect(tokens[0]).toEqual value: delim, scopes: ['source.c', scope, 'punctuation.definition.string.begin.c'] + expect(tokens[1]).toEqual value: 'a', scopes: ['source.c', scope] + expect(tokens[2]).toEqual value: delim, scopes: ['source.c', scope, 'punctuation.definition.string.end.c'] + + {tokens} = grammar.tokenizeLine delim + 'a' + '\\' + '\n' + 'b' + delim + expect(tokens[0]).toEqual value: delim, scopes: ['source.c', scope, 'punctuation.definition.string.begin.c'] + expect(tokens[1]).toEqual value: 'a', scopes: ['source.c', scope] + expect(tokens[2]).toEqual value: '\\', scopes: ['source.c', scope, 'constant.character.escape.line-continuation.c'] + expect(tokens[4]).toEqual value: 'b', scopes: ['source.c', scope] + expect(tokens[5]).toEqual value: delim, scopes: ['source.c', scope, 'punctuation.definition.string.end.c'] + + {tokens} = grammar.tokenizeLine '"%d"' + expect(tokens[0]).toEqual value: '"', scopes: ['source.c', 'string.quoted.double.c', 'punctuation.definition.string.begin.c'] + expect(tokens[1]).toEqual value: '%d', scopes: ['source.c', 'string.quoted.double.c', 'constant.other.placeholder.c'] + expect(tokens[2]).toEqual value: '"', scopes: ['source.c', 'string.quoted.double.c', 'punctuation.definition.string.end.c'] + + {tokens} = grammar.tokenizeLine '"%"' + expect(tokens[0]).toEqual value: '"', scopes: ['source.c', 'string.quoted.double.c', 'punctuation.definition.string.begin.c'] + expect(tokens[1]).toEqual value: '%', scopes: ['source.c', 'string.quoted.double.c', 'invalid.illegal.placeholder.c'] + expect(tokens[2]).toEqual value: '"', scopes: ['source.c', 'string.quoted.double.c', 'punctuation.definition.string.end.c'] + + {tokens} = grammar.tokenizeLine '"%" PRId32' + expect(tokens[0]).toEqual value: '"', scopes: ['source.c', 'string.quoted.double.c', 'punctuation.definition.string.begin.c'] + expect(tokens[1]).toEqual value: '%', scopes: ['source.c', 'string.quoted.double.c'] + expect(tokens[2]).toEqual value: '"', scopes: ['source.c', 'string.quoted.double.c', 'punctuation.definition.string.end.c'] + + {tokens} = grammar.tokenizeLine '"%" SCNd32' + expect(tokens[0]).toEqual value: '"', scopes: ['source.c', 'string.quoted.double.c', 'punctuation.definition.string.begin.c'] + expect(tokens[1]).toEqual value: '%', scopes: ['source.c', 'string.quoted.double.c'] + expect(tokens[2]).toEqual value: '"', scopes: ['source.c', 'string.quoted.double.c', 'punctuation.definition.string.end.c'] + + describe "comments", -> + it "tokenizes them", -> + {tokens} = grammar.tokenizeLine '/**/' + expect(tokens[0]).toEqual value: '/*', scopes: ['source.c', 'comment.block.c', 'punctuation.definition.comment.begin.c'] + expect(tokens[1]).toEqual value: '*/', scopes: ['source.c', 'comment.block.c', 'punctuation.definition.comment.end.c'] + + {tokens} = grammar.tokenizeLine '/* foo */' + expect(tokens[0]).toEqual value: '/*', scopes: ['source.c', 'comment.block.c', 'punctuation.definition.comment.begin.c'] + expect(tokens[1]).toEqual value: ' foo ', scopes: ['source.c', 'comment.block.c'] + expect(tokens[2]).toEqual value: '*/', scopes: ['source.c', 'comment.block.c', 'punctuation.definition.comment.end.c'] + + {tokens} = grammar.tokenizeLine '*/*' + expect(tokens[0]).toEqual value: '*/*', scopes: ['source.c', 'invalid.illegal.stray-comment-end.c'] + + describe "preprocessor directives", -> + it "tokenizes '#line'", -> + {tokens} = grammar.tokenizeLine '#line 151 "copy.c"' + expect(tokens[0]).toEqual value: '#', scopes: ['source.c', 'meta.preprocessor.c', 'keyword.control.directive.line.c', 'punctuation.definition.directive.c'] + expect(tokens[1]).toEqual value: 'line', scopes: ['source.c', 'meta.preprocessor.c', 'keyword.control.directive.line.c'] + expect(tokens[3]).toEqual value: '151', scopes: ['source.c', 'meta.preprocessor.c', 'constant.numeric.c'] + expect(tokens[5]).toEqual value: '"', scopes: ['source.c', 'meta.preprocessor.c', 'string.quoted.double.c', 'punctuation.definition.string.begin.c'] + expect(tokens[6]).toEqual value: 'copy.c', scopes: ['source.c', 'meta.preprocessor.c', 'string.quoted.double.c'] + expect(tokens[7]).toEqual value: '"', scopes: ['source.c', 'meta.preprocessor.c', 'string.quoted.double.c', 'punctuation.definition.string.end.c'] + + it "tokenizes '#undef'", -> + {tokens} = grammar.tokenizeLine '#undef FOO' + expect(tokens[0]).toEqual value: '#', scopes: ['source.c', 'meta.preprocessor.c', 'keyword.control.directive.undef.c', 'punctuation.definition.directive.c'] + expect(tokens[1]).toEqual value: 'undef', scopes: ['source.c', 'meta.preprocessor.c', 'keyword.control.directive.undef.c'] + expect(tokens[2]).toEqual value: ' ', scopes: ['source.c', 'meta.preprocessor.c'] + expect(tokens[3]).toEqual value: 'FOO', scopes: ['source.c', 'meta.preprocessor.c', 'entity.name.function.preprocessor.c'] + + it "tokenizes '#pragma'", -> + {tokens} = grammar.tokenizeLine '#pragma once' + expect(tokens[0]).toEqual value: '#', scopes: ['source.c', 'meta.preprocessor.pragma.c', 'keyword.control.directive.pragma.c', 'punctuation.definition.directive.c'] + expect(tokens[1]).toEqual value: 'pragma', scopes: ['source.c', 'meta.preprocessor.pragma.c', 'keyword.control.directive.pragma.c'] + expect(tokens[2]).toEqual value: ' ', scopes: ['source.c', 'meta.preprocessor.pragma.c'] + expect(tokens[3]).toEqual value: 'once', scopes: ['source.c', 'meta.preprocessor.pragma.c', 'entity.other.attribute-name.pragma.preprocessor.c'] + + {tokens} = grammar.tokenizeLine '#pragma clang diagnostic ignored "-Wunused-variable"' + expect(tokens[0]).toEqual value: '#', scopes: ['source.c', 'meta.preprocessor.pragma.c', 'keyword.control.directive.pragma.c', 'punctuation.definition.directive.c'] + expect(tokens[1]).toEqual value: 'pragma', scopes: ['source.c', 'meta.preprocessor.pragma.c', 'keyword.control.directive.pragma.c'] + expect(tokens[2]).toEqual value: ' ', scopes: ['source.c', 'meta.preprocessor.pragma.c'] + expect(tokens[3]).toEqual value: 'clang', scopes: ['source.c', 'meta.preprocessor.pragma.c', 'entity.other.attribute-name.pragma.preprocessor.c'] + expect(tokens[5]).toEqual value: 'diagnostic', scopes: ['source.c', 'meta.preprocessor.pragma.c', 'entity.other.attribute-name.pragma.preprocessor.c'] + expect(tokens[7]).toEqual value: 'ignored', scopes: ['source.c', 'meta.preprocessor.pragma.c', 'entity.other.attribute-name.pragma.preprocessor.c'] + expect(tokens[10]).toEqual value: '-Wunused-variable', scopes: ['source.c', 'meta.preprocessor.pragma.c', 'string.quoted.double.c'] + + {tokens} = grammar.tokenizeLine '#pragma mark – Initialization' + expect(tokens[0]).toEqual value: '#', scopes: ['source.c', 'meta.section', 'meta.preprocessor.pragma.c', 'keyword.control.directive.pragma.pragma-mark.c', 'punctuation.definition.directive.c'] + expect(tokens[1]).toEqual value: 'pragma mark', scopes: ['source.c', 'meta.section', 'meta.preprocessor.pragma.c', 'keyword.control.directive.pragma.pragma-mark.c'] + expect(tokens[3]).toEqual value: '– Initialization', scopes: ['source.c', 'meta.section', 'meta.preprocessor.pragma.c', 'entity.name.tag.pragma-mark.c'] + + describe "define", -> + it "tokenizes '#define [identifier name]'", -> + {tokens} = grammar.tokenizeLine '#define _FILE_NAME_H_' + expect(tokens[0]).toEqual value: '#', scopes: ['source.c', 'meta.preprocessor.macro.c', 'keyword.control.directive.define.c', 'punctuation.definition.directive.c'] + expect(tokens[1]).toEqual value: 'define', scopes: ['source.c', 'meta.preprocessor.macro.c', 'keyword.control.directive.define.c'] + expect(tokens[3]).toEqual value: '_FILE_NAME_H_', scopes: ['source.c', 'meta.preprocessor.macro.c', 'entity.name.function.preprocessor.c'] + + it "tokenizes '#define [identifier name] [value]'", -> + {tokens} = grammar.tokenizeLine '#define WIDTH 80' + expect(tokens[0]).toEqual value: '#', scopes: ['source.c', 'meta.preprocessor.macro.c', 'keyword.control.directive.define.c', 'punctuation.definition.directive.c'] + expect(tokens[1]).toEqual value: 'define', scopes: ['source.c', 'meta.preprocessor.macro.c', 'keyword.control.directive.define.c'] + expect(tokens[3]).toEqual value: 'WIDTH', scopes: ['source.c', 'meta.preprocessor.macro.c', 'entity.name.function.preprocessor.c'] + expect(tokens[5]).toEqual value: '80', scopes: ['source.c', 'meta.preprocessor.macro.c', 'constant.numeric.c'] + + {tokens} = grammar.tokenizeLine '#define ABC XYZ(1)' + expect(tokens[0]).toEqual value: '#', scopes: ['source.c', 'meta.preprocessor.macro.c', 'keyword.control.directive.define.c', 'punctuation.definition.directive.c'] + expect(tokens[1]).toEqual value: 'define', scopes: ['source.c', 'meta.preprocessor.macro.c', 'keyword.control.directive.define.c'] + expect(tokens[3]).toEqual value: 'ABC', scopes: ['source.c', 'meta.preprocessor.macro.c', 'entity.name.function.preprocessor.c'] + expect(tokens[4]).toEqual value: ' ', scopes: ['source.c', 'meta.preprocessor.macro.c'] + expect(tokens[5]).toEqual value: 'XYZ', scopes: ['source.c', 'meta.preprocessor.macro.c', 'meta.function.c', 'entity.name.function.c'] + expect(tokens[6]).toEqual value: '(', scopes: ['source.c', 'meta.preprocessor.macro.c', 'meta.function.c', 'punctuation.section.arguments.begin.bracket.round.c'] + expect(tokens[7]).toEqual value: '1', scopes: ['source.c', 'meta.preprocessor.macro.c', 'meta.function.c', 'constant.numeric.c'] + expect(tokens[8]).toEqual value: ')', scopes: ['source.c', 'meta.preprocessor.macro.c', 'meta.function.c', 'punctuation.section.arguments.end.bracket.round.c'] + + {tokens} = grammar.tokenizeLine '#define PI_PLUS_ONE (3.14 + 1)' + expect(tokens[0]).toEqual value: '#', scopes: ['source.c', 'meta.preprocessor.macro.c', 'keyword.control.directive.define.c', 'punctuation.definition.directive.c'] + expect(tokens[1]).toEqual value: 'define', scopes: ['source.c', 'meta.preprocessor.macro.c', 'keyword.control.directive.define.c'] + expect(tokens[3]).toEqual value: 'PI_PLUS_ONE', scopes: ['source.c', 'meta.preprocessor.macro.c', 'entity.name.function.preprocessor.c'] + expect(tokens[4]).toEqual value: ' ', scopes: ['source.c', 'meta.preprocessor.macro.c'] + expect(tokens[5]).toEqual value: '(', scopes: ['source.c', 'meta.preprocessor.macro.c', 'punctuation.section.parens.begin.bracket.round.c'] + expect(tokens[6]).toEqual value: '3.14', scopes: ['source.c', 'meta.preprocessor.macro.c', 'constant.numeric.c'] + expect(tokens[8]).toEqual value: '+', scopes: ['source.c', 'meta.preprocessor.macro.c', 'keyword.operator.c'] + expect(tokens[10]).toEqual value: '1', scopes: ['source.c', 'meta.preprocessor.macro.c', 'constant.numeric.c'] + expect(tokens[11]).toEqual value: ')', scopes: ['source.c', 'meta.preprocessor.macro.c', 'punctuation.section.parens.end.bracket.round.c'] + + describe "macros", -> + it "tokenizes them", -> + {tokens} = grammar.tokenizeLine '#define INCREMENT(x) x++' + expect(tokens[0]).toEqual value: '#', scopes: ['source.c', 'meta.preprocessor.macro.c', 'keyword.control.directive.define.c', 'punctuation.definition.directive.c'] + expect(tokens[1]).toEqual value: 'define', scopes: ['source.c', 'meta.preprocessor.macro.c', 'keyword.control.directive.define.c'] + expect(tokens[3]).toEqual value: 'INCREMENT', scopes: ['source.c', 'meta.preprocessor.macro.c', 'entity.name.function.preprocessor.c'] + expect(tokens[4]).toEqual value: '(', scopes: ['source.c', 'meta.preprocessor.macro.c', 'punctuation.definition.parameters.begin.c'] + expect(tokens[5]).toEqual value: 'x', scopes: ['source.c', 'meta.preprocessor.macro.c', 'variable.parameter.preprocessor.c'] + expect(tokens[6]).toEqual value: ')', scopes: ['source.c', 'meta.preprocessor.macro.c', 'punctuation.definition.parameters.end.c'] + expect(tokens[7]).toEqual value: ' x', scopes: ['source.c', 'meta.preprocessor.macro.c'] + expect(tokens[8]).toEqual value: '++', scopes: ['source.c', 'meta.preprocessor.macro.c', 'keyword.operator.increment.c'] + + {tokens} = grammar.tokenizeLine '#define MULT(x, y) (x) * (y)' + expect(tokens[0]).toEqual value: '#', scopes: ['source.c', 'meta.preprocessor.macro.c', 'keyword.control.directive.define.c', 'punctuation.definition.directive.c'] + expect(tokens[1]).toEqual value: 'define', scopes: ['source.c', 'meta.preprocessor.macro.c', 'keyword.control.directive.define.c'] + expect(tokens[3]).toEqual value: 'MULT', scopes: ['source.c', 'meta.preprocessor.macro.c', 'entity.name.function.preprocessor.c'] + expect(tokens[4]).toEqual value: '(', scopes: ['source.c', 'meta.preprocessor.macro.c', 'punctuation.definition.parameters.begin.c'] + expect(tokens[5]).toEqual value: 'x', scopes: ['source.c', 'meta.preprocessor.macro.c', 'variable.parameter.preprocessor.c'] + expect(tokens[6]).toEqual value: ',', scopes: ['source.c', 'meta.preprocessor.macro.c', 'variable.parameter.preprocessor.c', 'punctuation.separator.parameters.c'] + expect(tokens[7]).toEqual value: ' y', scopes: ['source.c', 'meta.preprocessor.macro.c', 'variable.parameter.preprocessor.c'] + expect(tokens[8]).toEqual value: ')', scopes: ['source.c', 'meta.preprocessor.macro.c', 'punctuation.definition.parameters.end.c'] + expect(tokens[9]).toEqual value: ' ', scopes: ['source.c', 'meta.preprocessor.macro.c'] + expect(tokens[10]).toEqual value: '(', scopes: ['source.c', 'meta.preprocessor.macro.c', 'punctuation.section.parens.begin.bracket.round.c'] + expect(tokens[11]).toEqual value: 'x', scopes: ['source.c', 'meta.preprocessor.macro.c'] + expect(tokens[12]).toEqual value: ')', scopes: ['source.c', 'meta.preprocessor.macro.c', 'punctuation.section.parens.end.bracket.round.c'] + expect(tokens[13]).toEqual value: ' ', scopes: ['source.c', 'meta.preprocessor.macro.c'] + expect(tokens[14]).toEqual value: '*', scopes: ['source.c', 'meta.preprocessor.macro.c', 'keyword.operator.c'] + expect(tokens[15]).toEqual value: ' ', scopes: ['source.c', 'meta.preprocessor.macro.c'] + expect(tokens[16]).toEqual value: '(', scopes: ['source.c', 'meta.preprocessor.macro.c', 'punctuation.section.parens.begin.bracket.round.c'] + expect(tokens[17]).toEqual value: 'y', scopes: ['source.c', 'meta.preprocessor.macro.c'] + expect(tokens[18]).toEqual value: ')', scopes: ['source.c', 'meta.preprocessor.macro.c', 'punctuation.section.parens.end.bracket.round.c'] + + {tokens} = grammar.tokenizeLine '#define SWAP(a, b) do { a ^= b; b ^= a; a ^= b; } while ( 0 )' + expect(tokens[0]).toEqual value: '#', scopes: ['source.c', 'meta.preprocessor.macro.c', 'keyword.control.directive.define.c', 'punctuation.definition.directive.c'] + expect(tokens[1]).toEqual value: 'define', scopes: ['source.c', 'meta.preprocessor.macro.c', 'keyword.control.directive.define.c'] + expect(tokens[3]).toEqual value: 'SWAP', scopes: ['source.c', 'meta.preprocessor.macro.c', 'entity.name.function.preprocessor.c'] + expect(tokens[4]).toEqual value: '(', scopes: ['source.c', 'meta.preprocessor.macro.c', 'punctuation.definition.parameters.begin.c'] + expect(tokens[5]).toEqual value: 'a', scopes: ['source.c', 'meta.preprocessor.macro.c', 'variable.parameter.preprocessor.c'] + expect(tokens[6]).toEqual value: ',', scopes: ['source.c', 'meta.preprocessor.macro.c', 'variable.parameter.preprocessor.c', 'punctuation.separator.parameters.c'] + expect(tokens[7]).toEqual value: ' b', scopes: ['source.c', 'meta.preprocessor.macro.c', 'variable.parameter.preprocessor.c'] + expect(tokens[8]).toEqual value: ')', scopes: ['source.c', 'meta.preprocessor.macro.c', 'punctuation.definition.parameters.end.c'] + expect(tokens[10]).toEqual value: 'do', scopes: ['source.c', 'meta.preprocessor.macro.c', 'keyword.control.c'] + expect(tokens[12]).toEqual value: '{', scopes: ['source.c', 'meta.preprocessor.macro.c', 'meta.block.c', 'punctuation.section.block.begin.bracket.curly.c'] + expect(tokens[13]).toEqual value: ' a ', scopes: ['source.c', 'meta.preprocessor.macro.c', 'meta.block.c'] + expect(tokens[14]).toEqual value: '^=', scopes: ['source.c', 'meta.preprocessor.macro.c', 'meta.block.c', 'keyword.operator.assignment.compound.bitwise.c'] + expect(tokens[15]).toEqual value: ' b', scopes: ['source.c', 'meta.preprocessor.macro.c', 'meta.block.c'] + expect(tokens[16]).toEqual value: ';', scopes: ['source.c', 'meta.preprocessor.macro.c', 'meta.block.c', 'punctuation.terminator.statement.c'] + expect(tokens[17]).toEqual value: ' b ', scopes: ['source.c', 'meta.preprocessor.macro.c', 'meta.block.c'] + expect(tokens[18]).toEqual value: '^=', scopes: ['source.c', 'meta.preprocessor.macro.c', 'meta.block.c', 'keyword.operator.assignment.compound.bitwise.c'] + expect(tokens[19]).toEqual value: ' a', scopes: ['source.c', 'meta.preprocessor.macro.c', 'meta.block.c'] + expect(tokens[20]).toEqual value: ';', scopes: ['source.c', 'meta.preprocessor.macro.c', 'meta.block.c', 'punctuation.terminator.statement.c'] + expect(tokens[21]).toEqual value: ' a ', scopes: ['source.c', 'meta.preprocessor.macro.c', 'meta.block.c'] + expect(tokens[22]).toEqual value: '^=', scopes: ['source.c', 'meta.preprocessor.macro.c', 'meta.block.c', 'keyword.operator.assignment.compound.bitwise.c'] + expect(tokens[23]).toEqual value: ' b', scopes: ['source.c', 'meta.preprocessor.macro.c', 'meta.block.c'] + expect(tokens[24]).toEqual value: ';', scopes: ['source.c', 'meta.preprocessor.macro.c', 'meta.block.c', 'punctuation.terminator.statement.c'] + expect(tokens[25]).toEqual value: ' ', scopes: ['source.c', 'meta.preprocessor.macro.c', 'meta.block.c'] + expect(tokens[26]).toEqual value: '}', scopes: ['source.c', 'meta.preprocessor.macro.c', 'meta.block.c', 'punctuation.section.block.end.bracket.curly.c'] + expect(tokens[28]).toEqual value: 'while', scopes: ['source.c', 'meta.preprocessor.macro.c', 'keyword.control.c'] + expect(tokens[29]).toEqual value: ' ', scopes: ['source.c', 'meta.preprocessor.macro.c'] + expect(tokens[30]).toEqual value: '(', scopes: ['source.c', 'meta.preprocessor.macro.c', 'punctuation.section.parens.begin.bracket.round.c'] + expect(tokens[32]).toEqual value: '0', scopes: ['source.c', 'meta.preprocessor.macro.c', 'constant.numeric.c'] + expect(tokens[34]).toEqual value: ')', scopes: ['source.c', 'meta.preprocessor.macro.c', 'punctuation.section.parens.end.bracket.round.c'] + + it "tokenizes multiline macros", -> + lines = grammar.tokenizeLines ''' + #define max(a,b) (a>b)? \\ + a:b + ''' + expect(lines[0][17]).toEqual value: '\\', scopes: ['source.c', 'meta.preprocessor.macro.c', 'constant.character.escape.line-continuation.c'] + expect(lines[1][0]).toEqual value: ' a', scopes: ['source.c', 'meta.preprocessor.macro.c'] + + lines = grammar.tokenizeLines ''' + #define SWAP(a, b) { \\ + a ^= b; \\ + b ^= a; \\ + a ^= b; \\ + } + ''' + expect(lines[0][0]).toEqual value: '#', scopes: ['source.c', 'meta.preprocessor.macro.c', 'keyword.control.directive.define.c', 'punctuation.definition.directive.c'] + expect(lines[0][1]).toEqual value: 'define', scopes: ['source.c', 'meta.preprocessor.macro.c', 'keyword.control.directive.define.c'] + expect(lines[0][3]).toEqual value: 'SWAP', scopes: ['source.c', 'meta.preprocessor.macro.c', 'entity.name.function.preprocessor.c'] + expect(lines[0][4]).toEqual value: '(', scopes: ['source.c', 'meta.preprocessor.macro.c', 'punctuation.definition.parameters.begin.c'] + expect(lines[0][5]).toEqual value: 'a', scopes: ['source.c', 'meta.preprocessor.macro.c', 'variable.parameter.preprocessor.c'] + expect(lines[0][6]).toEqual value: ',', scopes: ['source.c', 'meta.preprocessor.macro.c', 'variable.parameter.preprocessor.c', 'punctuation.separator.parameters.c'] + expect(lines[0][7]).toEqual value: ' b', scopes: ['source.c', 'meta.preprocessor.macro.c', 'variable.parameter.preprocessor.c'] + expect(lines[0][8]).toEqual value: ')', scopes: ['source.c', 'meta.preprocessor.macro.c', 'punctuation.definition.parameters.end.c'] + expect(lines[0][10]).toEqual value: '{', scopes: ['source.c', 'meta.preprocessor.macro.c', 'meta.block.c', 'punctuation.section.block.begin.bracket.curly.c'] + expect(lines[0][12]).toEqual value: '\\', scopes: ['source.c', 'meta.preprocessor.macro.c', 'meta.block.c', 'constant.character.escape.line-continuation.c'] + expect(lines[1][1]).toEqual value: '^=', scopes: ['source.c', 'meta.preprocessor.macro.c', 'meta.block.c', 'keyword.operator.assignment.compound.bitwise.c'] + expect(lines[1][5]).toEqual value: '\\', scopes: ['source.c', 'meta.preprocessor.macro.c', 'meta.block.c', 'constant.character.escape.line-continuation.c'] + expect(lines[2][1]).toEqual value: '^=', scopes: ['source.c', 'meta.preprocessor.macro.c', 'meta.block.c', 'keyword.operator.assignment.compound.bitwise.c'] + expect(lines[2][5]).toEqual value: '\\', scopes: ['source.c', 'meta.preprocessor.macro.c', 'meta.block.c', 'constant.character.escape.line-continuation.c'] + expect(lines[3][1]).toEqual value: '^=', scopes: ['source.c', 'meta.preprocessor.macro.c', 'meta.block.c', 'keyword.operator.assignment.compound.bitwise.c'] + expect(lines[3][5]).toEqual value: '\\', scopes: ['source.c', 'meta.preprocessor.macro.c', 'meta.block.c', 'constant.character.escape.line-continuation.c'] + expect(lines[4][0]).toEqual value: '}', scopes: ['source.c', 'meta.preprocessor.macro.c', 'meta.block.c', 'punctuation.section.block.end.bracket.curly.c'] + + it "tokenizes complex definitions", -> + lines = grammar.tokenizeLines ''' + #define MakeHook(name) struct HOOK name = {{false, 0L}, \\ + ((HOOKF)(*HookEnt)), ID("hook")} + ''' + expect(lines[0][0]).toEqual value: '#', scopes: ['source.c', 'meta.preprocessor.macro.c', 'keyword.control.directive.define.c', 'punctuation.definition.directive.c'] + expect(lines[0][1]).toEqual value: 'define', scopes: ['source.c', 'meta.preprocessor.macro.c', 'keyword.control.directive.define.c'] + expect(lines[0][3]).toEqual value: 'MakeHook', scopes: ['source.c', 'meta.preprocessor.macro.c', 'entity.name.function.preprocessor.c'] + expect(lines[0][4]).toEqual value: '(', scopes: ['source.c', 'meta.preprocessor.macro.c', 'punctuation.definition.parameters.begin.c'] + expect(lines[0][5]).toEqual value: 'name', scopes: ['source.c', 'meta.preprocessor.macro.c', 'variable.parameter.preprocessor.c'] + expect(lines[0][6]).toEqual value: ')', scopes: ['source.c', 'meta.preprocessor.macro.c', 'punctuation.definition.parameters.end.c'] + expect(lines[0][8]).toEqual value: 'struct', scopes: ['source.c', 'meta.preprocessor.macro.c', 'storage.type.c'] + expect(lines[0][10]).toEqual value: '=', scopes: ['source.c', 'meta.preprocessor.macro.c', 'keyword.operator.assignment.c'] + expect(lines[0][12]).toEqual value: '{', scopes: ['source.c', 'meta.preprocessor.macro.c', 'meta.block.c', 'punctuation.section.block.begin.bracket.curly.c'] + expect(lines[0][13]).toEqual value: '{', scopes: ['source.c', 'meta.preprocessor.macro.c', 'meta.block.c', 'punctuation.section.block.begin.bracket.curly.c'] + expect(lines[0][14]).toEqual value: 'false', scopes: ['source.c', 'meta.preprocessor.macro.c', 'meta.block.c', 'constant.language.c'] + expect(lines[0][15]).toEqual value: ',', scopes: ['source.c', 'meta.preprocessor.macro.c', 'meta.block.c', 'punctuation.separator.delimiter.c'] + expect(lines[0][17]).toEqual value: '0L', scopes: ['source.c', 'meta.preprocessor.macro.c', 'meta.block.c', 'constant.numeric.c'] + expect(lines[0][18]).toEqual value: '}', scopes: ['source.c', 'meta.preprocessor.macro.c', 'meta.block.c', 'punctuation.section.block.end.bracket.curly.c'] + expect(lines[0][19]).toEqual value: ',', scopes: ['source.c', 'meta.preprocessor.macro.c', 'meta.block.c', 'punctuation.separator.delimiter.c'] + expect(lines[0][21]).toEqual value: '\\', scopes: ['source.c', 'meta.preprocessor.macro.c', 'meta.block.c', 'constant.character.escape.line-continuation.c'] + expect(lines[1][0]).toEqual value: '(', scopes: ['source.c', 'meta.preprocessor.macro.c', 'meta.block.c', 'punctuation.section.parens.begin.bracket.round.c'] + expect(lines[1][1]).toEqual value: '(', scopes: ['source.c', 'meta.preprocessor.macro.c', 'meta.block.c', 'punctuation.section.parens.begin.bracket.round.c'] + expect(lines[1][3]).toEqual value: ')', scopes: ['source.c', 'meta.preprocessor.macro.c', 'meta.block.c', 'punctuation.section.parens.end.bracket.round.c'] + expect(lines[1][4]).toEqual value: '(', scopes: ['source.c', 'meta.preprocessor.macro.c', 'meta.block.c', 'punctuation.section.parens.begin.bracket.round.c'] + expect(lines[1][5]).toEqual value: '*', scopes: ['source.c', 'meta.preprocessor.macro.c', 'meta.block.c', 'keyword.operator.c'] + expect(lines[1][7]).toEqual value: ')', scopes: ['source.c', 'meta.preprocessor.macro.c', 'meta.block.c', 'punctuation.section.parens.end.bracket.round.c'] + expect(lines[1][8]).toEqual value: ')', scopes: ['source.c', 'meta.preprocessor.macro.c', 'meta.block.c', 'punctuation.section.parens.end.bracket.round.c'] + expect(lines[1][9]).toEqual value: ',', scopes: ['source.c', 'meta.preprocessor.macro.c', 'meta.block.c', 'punctuation.separator.delimiter.c'] + expect(lines[1][11]).toEqual value: 'ID', scopes: ['source.c', 'meta.preprocessor.macro.c', 'meta.block.c', 'meta.function.c', 'entity.name.function.c'] + expect(lines[1][12]).toEqual value: '(', scopes: ['source.c', 'meta.preprocessor.macro.c', 'meta.block.c', 'meta.function.c', 'punctuation.section.arguments.begin.bracket.round.c'] + expect(lines[1][13]).toEqual value: '"', scopes: ['source.c', 'meta.preprocessor.macro.c', 'meta.block.c', 'meta.function.c', 'string.quoted.double.c', "punctuation.definition.string.begin.c"] + expect(lines[1][14]).toEqual value: 'hook', scopes: ['source.c', 'meta.preprocessor.macro.c', 'meta.block.c', 'meta.function.c', 'string.quoted.double.c'] + expect(lines[1][15]).toEqual value: '"', scopes: ['source.c', 'meta.preprocessor.macro.c', 'meta.block.c', 'meta.function.c', 'string.quoted.double.c', "punctuation.definition.string.end.c"] + expect(lines[1][16]).toEqual value: ')', scopes: ['source.c', 'meta.preprocessor.macro.c', 'meta.block.c', 'meta.function.c', 'punctuation.section.arguments.end.bracket.round.c'] + expect(lines[1][17]).toEqual value: '}', scopes: ['source.c', 'meta.preprocessor.macro.c', 'meta.block.c', 'punctuation.section.block.end.bracket.curly.c'] + + describe "includes", -> + it "tokenizes '#include'", -> + {tokens} = grammar.tokenizeLine '#include ' + expect(tokens[0]).toEqual value: '#', scopes: ['source.c', 'meta.preprocessor.include.c', 'keyword.control.directive.include.c', 'punctuation.definition.directive.c'] + expect(tokens[1]).toEqual value: 'include', scopes: ['source.c', 'meta.preprocessor.include.c', 'keyword.control.directive.include.c'] + expect(tokens[3]).toEqual value: '<', scopes: ['source.c', 'meta.preprocessor.include.c', 'string.quoted.other.lt-gt.include.c', 'punctuation.definition.string.begin.c'] + expect(tokens[4]).toEqual value: 'stdio.h', scopes: ['source.c', 'meta.preprocessor.include.c', 'string.quoted.other.lt-gt.include.c'] + expect(tokens[5]).toEqual value: '>', scopes: ['source.c', 'meta.preprocessor.include.c', 'string.quoted.other.lt-gt.include.c', 'punctuation.definition.string.end.c'] + + {tokens} = grammar.tokenizeLine '#include' + expect(tokens[0]).toEqual value: '#', scopes: ['source.c', 'meta.preprocessor.include.c', 'keyword.control.directive.include.c', 'punctuation.definition.directive.c'] + expect(tokens[1]).toEqual value: 'include', scopes: ['source.c', 'meta.preprocessor.include.c', 'keyword.control.directive.include.c'] + expect(tokens[2]).toEqual value: '<', scopes: ['source.c', 'meta.preprocessor.include.c', 'string.quoted.other.lt-gt.include.c', 'punctuation.definition.string.begin.c'] + expect(tokens[3]).toEqual value: 'stdio.h', scopes: ['source.c', 'meta.preprocessor.include.c', 'string.quoted.other.lt-gt.include.c'] + expect(tokens[4]).toEqual value: '>', scopes: ['source.c', 'meta.preprocessor.include.c', 'string.quoted.other.lt-gt.include.c', 'punctuation.definition.string.end.c'] + + {tokens} = grammar.tokenizeLine '#include_' + expect(tokens[0]).toEqual value: '#include_', scopes: ['source.c'] + + {tokens} = grammar.tokenizeLine '#include "file"' + expect(tokens[0]).toEqual value: '#', scopes: ['source.c', 'meta.preprocessor.include.c', 'keyword.control.directive.include.c', 'punctuation.definition.directive.c'] + expect(tokens[1]).toEqual value: 'include', scopes: ['source.c', 'meta.preprocessor.include.c', 'keyword.control.directive.include.c'] + expect(tokens[3]).toEqual value: '"', scopes: ['source.c', 'meta.preprocessor.include.c', 'string.quoted.double.include.c', 'punctuation.definition.string.begin.c'] + expect(tokens[4]).toEqual value: 'file', scopes: ['source.c', 'meta.preprocessor.include.c', 'string.quoted.double.include.c'] + expect(tokens[5]).toEqual value: '"', scopes: ['source.c', 'meta.preprocessor.include.c', 'string.quoted.double.include.c', 'punctuation.definition.string.end.c'] + + it "tokenizes '#import'", -> + {tokens} = grammar.tokenizeLine '#import "file"' + expect(tokens[0]).toEqual value: '#', scopes: ['source.c', 'meta.preprocessor.include.c', 'keyword.control.directive.import.c', 'punctuation.definition.directive.c'] + expect(tokens[1]).toEqual value: 'import', scopes: ['source.c', 'meta.preprocessor.include.c', 'keyword.control.directive.import.c'] + expect(tokens[3]).toEqual value: '"', scopes: ['source.c', 'meta.preprocessor.include.c', 'string.quoted.double.include.c', 'punctuation.definition.string.begin.c'] + expect(tokens[4]).toEqual value: 'file', scopes: ['source.c', 'meta.preprocessor.include.c', 'string.quoted.double.include.c'] + expect(tokens[5]).toEqual value: '"', scopes: ['source.c', 'meta.preprocessor.include.c', 'string.quoted.double.include.c', 'punctuation.definition.string.end.c'] + + it "tokenizes '#include_next'", -> + {tokens} = grammar.tokenizeLine '#include_next "next.h"' + expect(tokens[0]).toEqual value: '#', scopes: ['source.c', 'meta.preprocessor.include.c', 'keyword.control.directive.include_next.c', 'punctuation.definition.directive.c'] + expect(tokens[1]).toEqual value: 'include_next', scopes: ['source.c', 'meta.preprocessor.include.c', 'keyword.control.directive.include_next.c'] + expect(tokens[3]).toEqual value: '"', scopes: ['source.c', 'meta.preprocessor.include.c', 'string.quoted.double.include.c', 'punctuation.definition.string.begin.c'] + expect(tokens[4]).toEqual value: 'next.h', scopes: ['source.c', 'meta.preprocessor.include.c', 'string.quoted.double.include.c'] + expect(tokens[5]).toEqual value: '"', scopes: ['source.c', 'meta.preprocessor.include.c', 'string.quoted.double.include.c', 'punctuation.definition.string.end.c'] + + describe "diagnostics", -> + it "tokenizes '#error'", -> + {tokens} = grammar.tokenizeLine '#error "C++ compiler required."' + expect(tokens[0]).toEqual value: '#', scopes: ['source.c', 'meta.preprocessor.diagnostic.c', 'keyword.control.directive.diagnostic.error.c', 'punctuation.definition.directive.c'] + expect(tokens[1]).toEqual value: 'error', scopes: ['source.c', 'meta.preprocessor.diagnostic.c', 'keyword.control.directive.diagnostic.error.c'] + expect(tokens[4]).toEqual value: 'C++ compiler required.', scopes: ['source.c', 'meta.preprocessor.diagnostic.c', 'string.quoted.double.c'] + + it "tokenizes '#warning'", -> + {tokens} = grammar.tokenizeLine '#warning "This is a warning."' + expect(tokens[0]).toEqual value: '#', scopes: ['source.c', 'meta.preprocessor.diagnostic.c', 'keyword.control.directive.diagnostic.warning.c', 'punctuation.definition.directive.c'] + expect(tokens[1]).toEqual value: 'warning', scopes: ['source.c', 'meta.preprocessor.diagnostic.c', 'keyword.control.directive.diagnostic.warning.c'] + expect(tokens[4]).toEqual value: 'This is a warning.', scopes: ['source.c', 'meta.preprocessor.diagnostic.c', 'string.quoted.double.c'] + + describe "conditionals", -> + it "tokenizes if-elif-else preprocessor blocks", -> + lines = grammar.tokenizeLines ''' + #if defined(CREDIT) + credit(); + #elif defined(DEBIT) + debit(); + #else + printerror(); + #endif + ''' + expect(lines[0][0]).toEqual value: '#', scopes: ['source.c', 'meta.preprocessor.c', 'keyword.control.directive.conditional.c', 'punctuation.definition.directive.c'] + expect(lines[0][1]).toEqual value: 'if', scopes: ['source.c', 'meta.preprocessor.c', 'keyword.control.directive.conditional.c'] + expect(lines[0][3]).toEqual value: 'defined', scopes: ['source.c', 'meta.preprocessor.c', 'keyword.control.directive.conditional.c'] + expect(lines[0][5]).toEqual value: 'CREDIT', scopes: ['source.c', 'meta.preprocessor.c', 'entity.name.function.preprocessor.c'] + expect(lines[1][1]).toEqual value: 'credit', scopes: ['source.c', 'meta.function.c', 'entity.name.function.c'] + expect(lines[1][2]).toEqual value: '(', scopes: ['source.c', 'meta.function.c', 'punctuation.section.parameters.begin.bracket.round.c'] + expect(lines[1][3]).toEqual value: ')', scopes: ['source.c', 'meta.function.c', 'punctuation.section.parameters.end.bracket.round.c'] + expect(lines[2][0]).toEqual value: '#', scopes: ['source.c', 'meta.preprocessor.c', 'keyword.control.directive.conditional.c', 'punctuation.definition.directive.c'] + expect(lines[2][1]).toEqual value: 'elif', scopes: ['source.c', 'meta.preprocessor.c', 'keyword.control.directive.conditional.c'] + expect(lines[2][3]).toEqual value: 'defined', scopes: ['source.c', 'meta.preprocessor.c', 'keyword.control.directive.conditional.c'] + expect(lines[2][5]).toEqual value: 'DEBIT', scopes: ['source.c', 'meta.preprocessor.c', 'entity.name.function.preprocessor.c'] + expect(lines[3][1]).toEqual value: 'debit', scopes: ['source.c', 'meta.function.c', 'entity.name.function.c'] + expect(lines[3][2]).toEqual value: '(', scopes: ['source.c', 'meta.function.c', 'punctuation.section.parameters.begin.bracket.round.c'] + expect(lines[3][3]).toEqual value: ')', scopes: ['source.c', 'meta.function.c', 'punctuation.section.parameters.end.bracket.round.c'] + expect(lines[4][0]).toEqual value: '#', scopes: ['source.c', 'meta.preprocessor.c', 'keyword.control.directive.conditional.c', 'punctuation.definition.directive.c'] + expect(lines[4][1]).toEqual value: 'else', scopes: ['source.c', 'meta.preprocessor.c', 'keyword.control.directive.conditional.c'] + expect(lines[5][1]).toEqual value: 'printerror', scopes: ['source.c', 'meta.function.c', 'entity.name.function.c'] + expect(lines[5][2]).toEqual value: '(', scopes: ['source.c', 'meta.function.c', 'punctuation.section.parameters.begin.bracket.round.c'] + expect(lines[5][3]).toEqual value: ')', scopes: ['source.c', 'meta.function.c', 'punctuation.section.parameters.end.bracket.round.c'] + expect(lines[6][0]).toEqual value: '#', scopes: ['source.c', 'meta.preprocessor.c', 'keyword.control.directive.conditional.c', 'punctuation.definition.directive.c'] + expect(lines[6][1]).toEqual value: 'endif', scopes: ['source.c', 'meta.preprocessor.c', 'keyword.control.directive.conditional.c'] + + it "tokenizes if-true-else blocks", -> + lines = grammar.tokenizeLines ''' + #if 1 + int something() { + #if 1 + return 1; + #else + return 0; + #endif + } + #else + int something() { + return 0; + } + #endif + ''' + expect(lines[0][0]).toEqual value: '#', scopes: ['source.c', 'meta.preprocessor.c', 'keyword.control.directive.conditional.c', 'punctuation.definition.directive.c'] + expect(lines[0][1]).toEqual value: 'if', scopes: ['source.c', 'meta.preprocessor.c', 'keyword.control.directive.conditional.c'] + expect(lines[0][3]).toEqual value: '1', scopes: ['source.c', 'meta.preprocessor.c', 'constant.numeric.c'] + expect(lines[1][0]).toEqual value: 'int', scopes: ['source.c', 'storage.type.c'] + expect(lines[1][2]).toEqual value: 'something', scopes: ['source.c', 'meta.function.c', 'entity.name.function.c'] + expect(lines[2][1]).toEqual value: '#', scopes: ['source.c', 'meta.block.c', 'meta.preprocessor.c', 'keyword.control.directive.conditional.c', 'punctuation.definition.directive.c'] + expect(lines[2][2]).toEqual value: 'if', scopes: ['source.c', 'meta.block.c', 'meta.preprocessor.c', 'keyword.control.directive.conditional.c'] + expect(lines[2][4]).toEqual value: '1', scopes: ['source.c', 'meta.block.c', 'meta.preprocessor.c', 'constant.numeric.c'] + expect(lines[3][1]).toEqual value: 'return', scopes: ['source.c', 'meta.block.c', 'keyword.control.c'] + expect(lines[3][3]).toEqual value: '1', scopes: ['source.c', 'meta.block.c', 'constant.numeric.c'] + expect(lines[4][1]).toEqual value: '#', scopes: ['source.c', 'meta.block.c', 'meta.preprocessor.c', 'keyword.control.directive.conditional.c', 'punctuation.definition.directive.c'] + expect(lines[4][2]).toEqual value: 'else', scopes: ['source.c', 'meta.block.c', 'meta.preprocessor.c', 'keyword.control.directive.conditional.c'] + expect(lines[5][0]).toEqual value: ' return 0;', scopes: ['source.c', 'meta.block.c', 'comment.block.preprocessor.else-branch.in-block.c'] + expect(lines[6][1]).toEqual value: '#', scopes: ['source.c', 'meta.block.c', 'meta.preprocessor.c', 'keyword.control.directive.conditional.c', 'punctuation.definition.directive.c'] + expect(lines[6][2]).toEqual value: 'endif', scopes: ['source.c', 'meta.block.c', 'meta.preprocessor.c', 'keyword.control.directive.conditional.c'] + expect(lines[8][0]).toEqual value: '#', scopes: ['source.c', 'meta.preprocessor.c', 'keyword.control.directive.conditional.c', 'punctuation.definition.directive.c'] + expect(lines[8][1]).toEqual value: 'else', scopes: ['source.c', 'meta.preprocessor.c', 'keyword.control.directive.conditional.c'] + expect(lines[9][0]).toEqual value: 'int something() {', scopes: ['source.c', 'comment.block.preprocessor.else-branch.c'] + expect(lines[12][0]).toEqual value: '#', scopes: ['source.c', 'meta.preprocessor.c', 'keyword.control.directive.conditional.c', 'punctuation.definition.directive.c'] + expect(lines[12][1]).toEqual value: 'endif', scopes: ['source.c', 'meta.preprocessor.c', 'keyword.control.directive.conditional.c'] + + it "tokenizes if-false-else blocks", -> + lines = grammar.tokenizeLines ''' + int something() { + #if 0 + return 1; + #else + return 0; + #endif + } + ''' + expect(lines[0][0]).toEqual value: 'int', scopes: ['source.c', 'storage.type.c'] + expect(lines[0][2]).toEqual value: 'something', scopes: ['source.c', 'meta.function.c', 'entity.name.function.c'] + expect(lines[1][1]).toEqual value: '#', scopes: ['source.c', 'meta.block.c', 'meta.preprocessor.c', 'keyword.control.directive.conditional.c', 'punctuation.definition.directive.c'] + expect(lines[1][2]).toEqual value: 'if', scopes: ['source.c', 'meta.block.c', 'meta.preprocessor.c', 'keyword.control.directive.conditional.c'] + expect(lines[1][4]).toEqual value: '0', scopes: ['source.c', 'meta.block.c', 'meta.preprocessor.c', 'constant.numeric.c'] + expect(lines[2][0]).toEqual value: ' return 1;', scopes: ['source.c', 'meta.block.c', 'comment.block.preprocessor.if-branch.in-block.c'] + expect(lines[3][1]).toEqual value: '#', scopes: ['source.c', 'meta.block.c', 'meta.preprocessor.c', 'keyword.control.directive.conditional.c', 'punctuation.definition.directive.c'] + expect(lines[3][2]).toEqual value: 'else', scopes: ['source.c', 'meta.block.c', 'meta.preprocessor.c', 'keyword.control.directive.conditional.c'] + expect(lines[4][1]).toEqual value: 'return', scopes: ['source.c', 'meta.block.c', 'keyword.control.c'] + expect(lines[4][3]).toEqual value: '0', scopes: ['source.c', 'meta.block.c', 'constant.numeric.c'] + expect(lines[5][1]).toEqual value: '#', scopes: ['source.c', 'meta.block.c', 'meta.preprocessor.c', 'keyword.control.directive.conditional.c', 'punctuation.definition.directive.c'] + expect(lines[5][2]).toEqual value: 'endif', scopes: ['source.c', 'meta.block.c', 'meta.preprocessor.c', 'keyword.control.directive.conditional.c'] + + lines = grammar.tokenizeLines ''' + #if 0 + something(); + #endif + ''' + expect(lines[0][0]).toEqual value: '#', scopes: ['source.c', 'meta.preprocessor.c', 'keyword.control.directive.conditional.c', 'punctuation.definition.directive.c'] + expect(lines[0][1]).toEqual value: 'if', scopes: ['source.c', 'meta.preprocessor.c', 'keyword.control.directive.conditional.c'] + expect(lines[0][3]).toEqual value: '0', scopes: ['source.c', 'meta.preprocessor.c', 'constant.numeric.c'] + expect(lines[1][0]).toEqual value: ' something();', scopes: ['source.c', 'comment.block.preprocessor.if-branch.c'] + expect(lines[2][0]).toEqual value: '#', scopes: ['source.c', 'meta.preprocessor.c', 'keyword.control.directive.conditional.c', 'punctuation.definition.directive.c'] + expect(lines[2][1]).toEqual value: 'endif', scopes: ['source.c', 'meta.preprocessor.c', 'keyword.control.directive.conditional.c'] + + it "tokenizes ifdef-elif blocks", -> + lines = grammar.tokenizeLines ''' + #ifdef __unix__ /* is defined by compilers targeting Unix systems */ + # include + #elif defined _WIN32 /* is defined by compilers targeting Windows systems */ + # include + #endif + ''' + expect(lines[0][0]).toEqual value: '#', scopes: ['source.c', 'meta.preprocessor.c', 'keyword.control.directive.conditional.c', 'punctuation.definition.directive.c'] + expect(lines[0][1]).toEqual value: 'ifdef', scopes: ['source.c', 'meta.preprocessor.c', 'keyword.control.directive.conditional.c'] + expect(lines[0][3]).toEqual value: '__unix__', scopes: ['source.c', 'meta.preprocessor.c', 'entity.name.function.preprocessor.c'] + expect(lines[0][5]).toEqual value: '/*', scopes: ['source.c', 'comment.block.c', 'punctuation.definition.comment.begin.c'] + expect(lines[0][6]).toEqual value: ' is defined by compilers targeting Unix systems ', scopes: ['source.c', 'comment.block.c'] + expect(lines[0][7]).toEqual value: '*/', scopes: ['source.c', 'comment.block.c', 'punctuation.definition.comment.end.c'] + expect(lines[1][1]).toEqual value: '#', scopes: ['source.c', 'meta.preprocessor.include.c', 'keyword.control.directive.include.c', 'punctuation.definition.directive.c'] + expect(lines[1][2]).toEqual value: ' include', scopes: ['source.c', 'meta.preprocessor.include.c', 'keyword.control.directive.include.c'] + expect(lines[1][4]).toEqual value: '<', scopes: ['source.c', 'meta.preprocessor.include.c', 'string.quoted.other.lt-gt.include.c', 'punctuation.definition.string.begin.c'] + expect(lines[1][5]).toEqual value: 'unistd.h', scopes: ['source.c', 'meta.preprocessor.include.c', 'string.quoted.other.lt-gt.include.c'] + expect(lines[1][6]).toEqual value: '>', scopes: ['source.c', 'meta.preprocessor.include.c', 'string.quoted.other.lt-gt.include.c', 'punctuation.definition.string.end.c'] + expect(lines[2][0]).toEqual value: '#', scopes: ['source.c', 'meta.preprocessor.c', 'keyword.control.directive.conditional.c', 'punctuation.definition.directive.c'] + expect(lines[2][1]).toEqual value: 'elif', scopes: ['source.c', 'meta.preprocessor.c', 'keyword.control.directive.conditional.c'] + expect(lines[2][3]).toEqual value: 'defined', scopes: ['source.c', 'meta.preprocessor.c', 'keyword.control.directive.conditional.c'] + expect(lines[2][5]).toEqual value: '_WIN32', scopes: ['source.c', 'meta.preprocessor.c', 'entity.name.function.preprocessor.c'] + expect(lines[2][7]).toEqual value: '/*', scopes: ['source.c', 'comment.block.c', 'punctuation.definition.comment.begin.c'] + expect(lines[2][8]).toEqual value: ' is defined by compilers targeting Windows systems ', scopes: ['source.c', 'comment.block.c'] + expect(lines[2][9]).toEqual value: '*/', scopes: ['source.c', 'comment.block.c', 'punctuation.definition.comment.end.c'] + expect(lines[3][1]).toEqual value: '#', scopes: ['source.c', 'meta.preprocessor.include.c', 'keyword.control.directive.include.c', 'punctuation.definition.directive.c'] + expect(lines[3][2]).toEqual value: ' include', scopes: ['source.c', 'meta.preprocessor.include.c', 'keyword.control.directive.include.c'] + expect(lines[3][4]).toEqual value: '<', scopes: ['source.c', 'meta.preprocessor.include.c', 'string.quoted.other.lt-gt.include.c', 'punctuation.definition.string.begin.c'] + expect(lines[3][5]).toEqual value: 'windows.h', scopes: ['source.c', 'meta.preprocessor.include.c', 'string.quoted.other.lt-gt.include.c'] + expect(lines[3][6]).toEqual value: '>', scopes: ['source.c', 'meta.preprocessor.include.c', 'string.quoted.other.lt-gt.include.c', 'punctuation.definition.string.end.c'] + expect(lines[4][0]).toEqual value: '#', scopes: ['source.c', 'meta.preprocessor.c', 'keyword.control.directive.conditional.c', 'punctuation.definition.directive.c'] + expect(lines[4][1]).toEqual value: 'endif', scopes: ['source.c', 'meta.preprocessor.c', 'keyword.control.directive.conditional.c'] + + it "tokenizes ifndef blocks", -> + lines = grammar.tokenizeLines ''' + #ifndef _INCL_GUARD + #define _INCL_GUARD + #endif + ''' + expect(lines[0][0]).toEqual value: '#', scopes: ['source.c', 'meta.preprocessor.c', 'keyword.control.directive.conditional.c', 'punctuation.definition.directive.c'] + expect(lines[0][1]).toEqual value: 'ifndef', scopes: ['source.c', 'meta.preprocessor.c', 'keyword.control.directive.conditional.c'] + expect(lines[0][3]).toEqual value: '_INCL_GUARD', scopes: ['source.c', 'meta.preprocessor.c', 'entity.name.function.preprocessor.c'] + expect(lines[1][1]).toEqual value: '#', scopes: ['source.c', 'meta.preprocessor.macro.c', 'keyword.control.directive.define.c', 'punctuation.definition.directive.c'] + expect(lines[1][2]).toEqual value: 'define', scopes: ['source.c', 'meta.preprocessor.macro.c', 'keyword.control.directive.define.c'] + expect(lines[1][4]).toEqual value: '_INCL_GUARD', scopes: ['source.c', 'meta.preprocessor.macro.c', 'entity.name.function.preprocessor.c'] + expect(lines[2][0]).toEqual value: '#', scopes: ['source.c', 'meta.preprocessor.c', 'keyword.control.directive.conditional.c', 'punctuation.definition.directive.c'] + expect(lines[2][1]).toEqual value: 'endif', scopes: ['source.c', 'meta.preprocessor.c', 'keyword.control.directive.conditional.c'] + + it "highlights stray elif, else and endif usages as invalid", -> + lines = grammar.tokenizeLines ''' + #if defined SOMEMACRO + #else + #elif //elif not permitted here + #endif + #else //else without if + #endif //endif without if + ''' + expect(lines[2][0]).toEqual value: '#elif', scopes: ['source.c', 'invalid.illegal.stray-elif.c'] + expect(lines[4][0]).toEqual value: '#else', scopes: ['source.c', 'invalid.illegal.stray-else.c'] + expect(lines[5][0]).toEqual value: '#endif', scopes: ['source.c', 'invalid.illegal.stray-endif.c'] + + it "highlights errorneous defined usage as invalid", -> + {tokens} = grammar.tokenizeLine '#if defined == VALUE' + expect(tokens[3]).toEqual value: 'defined', scopes: ['source.c', 'meta.preprocessor.c', 'invalid.illegal.macro-name.c'] + + it "tokenizes multi line conditional queries", -> + lines = grammar.tokenizeLines ''' + #if !defined (MACRO_A) \\ + || !defined MACRO_C + #define MACRO_A TRUE + #elif MACRO_C == (5 + 4 - /* multi line comment */ \\ + SOMEMACRO(TRUE) * 8) // single line comment + #endif + ''' + expect(lines[0][2]).toEqual value: ' ', scopes: ['source.c', 'meta.preprocessor.c'] + expect(lines[0][3]).toEqual value: '!', scopes: ['source.c', 'meta.preprocessor.c', 'keyword.operator.logical.c'] + expect(lines[0][7]).toEqual value: 'MACRO_A', scopes: ['source.c', 'meta.preprocessor.c', 'entity.name.function.preprocessor.c'] + expect(lines[0][10]).toEqual value: '\\', scopes: ['source.c', 'meta.preprocessor.c', 'constant.character.escape.line-continuation.c'] + expect(lines[1][1]).toEqual value: '||', scopes: ['source.c', 'meta.preprocessor.c', 'keyword.operator.logical.c'] + expect(lines[1][3]).toEqual value: '!', scopes: ['source.c', 'meta.preprocessor.c', 'keyword.operator.logical.c'] + expect(lines[1][4]).toEqual value: 'defined', scopes: ['source.c', 'meta.preprocessor.c', 'keyword.control.directive.conditional.c'] + expect(lines[1][6]).toEqual value: 'MACRO_C', scopes: ['source.c', 'meta.preprocessor.c', 'entity.name.function.preprocessor.c'] + expect(lines[3][2]).toEqual value: ' ', scopes: ['source.c', 'meta.preprocessor.c'] + expect(lines[3][3]).toEqual value: 'MACRO_C', scopes: ['source.c', 'meta.preprocessor.c', 'entity.name.function.preprocessor.c'] + expect(lines[3][5]).toEqual value: '==', scopes: ['source.c', 'meta.preprocessor.c', 'keyword.operator.comparison.c'] + expect(lines[3][7]).toEqual value: '(', scopes: ['source.c', 'meta.preprocessor.c', 'punctuation.section.parens.begin.bracket.round.c'] + expect(lines[3][8]).toEqual value: '5', scopes: ['source.c', 'meta.preprocessor.c', 'constant.numeric.c'] + expect(lines[3][10]).toEqual value: '+', scopes: ['source.c', 'meta.preprocessor.c', 'keyword.operator.c'] + expect(lines[3][14]).toEqual value: '-', scopes: ['source.c', 'meta.preprocessor.c', 'keyword.operator.c'] + expect(lines[3][16]).toEqual value: '/*', scopes: ['source.c', 'meta.preprocessor.c', 'comment.block.c', 'punctuation.definition.comment.begin.c'] + expect(lines[3][17]).toEqual value: ' multi line comment ', scopes: ['source.c', 'meta.preprocessor.c', 'comment.block.c'] + expect(lines[3][18]).toEqual value: '*/', scopes: ['source.c', 'meta.preprocessor.c', 'comment.block.c', 'punctuation.definition.comment.end.c'] + expect(lines[3][20]).toEqual value: '\\', scopes: ['source.c', 'meta.preprocessor.c', 'constant.character.escape.line-continuation.c'] + expect(lines[4][1]).toEqual value: 'SOMEMACRO', scopes: ['source.c', 'meta.preprocessor.c', 'entity.name.function.preprocessor.c'] + expect(lines[4][3]).toEqual value: 'TRUE', scopes: ['source.c', 'meta.preprocessor.c', 'constant.language.c'] + expect(lines[4][6]).toEqual value: '*', scopes: ['source.c', 'meta.preprocessor.c', 'keyword.operator.c'] + expect(lines[4][9]).toEqual value: ')', scopes: ['source.c', 'meta.preprocessor.c', 'punctuation.section.parens.end.bracket.round.c'] + expect(lines[4][11]).toEqual value: '//', scopes: ['source.c', 'comment.line.double-slash.cpp', 'punctuation.definition.comment.cpp'] + expect(lines[4][12]).toEqual value: ' single line comment', scopes: ['source.c', 'comment.line.double-slash.cpp'] + + it "tokenizes ternary operator usage in preprocessor conditionals", -> + {tokens} = grammar.tokenizeLine '#if defined (__GNU_LIBRARY__) ? defined (__USE_GNU) : !defined (__STRICT_ANSI__)' + expect(tokens[9]).toEqual value: '?', scopes: ['source.c', 'meta.preprocessor.c', 'keyword.operator.ternary.c'] + expect(tokens[11]).toEqual value: 'defined', scopes: ['source.c', 'meta.preprocessor.c', 'keyword.control.directive.conditional.c'] + expect(tokens[17]).toEqual value: ':', scopes: ['source.c', 'meta.preprocessor.c', 'keyword.operator.ternary.c'] + + describe "indentation", -> + editor = null + + beforeEach -> + editor = buildTextEditor() + editor.setGrammar(grammar) + + expectPreservedIndentation = (text) -> + editor.setText(text) + editor.autoIndentBufferRows(0, editor.getLineCount() - 1) + + expectedLines = text.split('\n') + actualLines = editor.getText().split('\n') + for actualLine, i in actualLines + expect([ + actualLine, + editor.indentLevelForLine(actualLine) + ]).toEqual([ + expectedLines[i], + editor.indentLevelForLine(expectedLines[i]) + ]) + + it "indents allman-style curly braces", -> + expectPreservedIndentation ''' + if (a) + { + for (;;) + { + do + { + while (b) + { + c(); + } + } + while (d) + } + } + ''' + + it "indents non-allman-style curly braces", -> + expectPreservedIndentation ''' + if (a) { + for (;;) { + do { + while (b) { + c(); + } + } while (d) + } + } + ''' + + it "indents function arguments", -> + expectPreservedIndentation ''' + a( + b, + c( + d + ) + ); + ''' + + it "indents array and struct literals", -> + expectPreservedIndentation ''' + some_t a[3] = { + { .b = c }, + { .b = c, .d = {1, 2} }, + }; + ''' + + it "tokenizes binary literal", -> + {tokens} = grammar.tokenizeLine '0b101010' + expect(tokens[0]).toEqual value: '0b101010', scopes: ['source.c', 'constant.numeric.c'] + + describe "access", -> + it "tokenizes the dot access operator", -> + lines = grammar.tokenizeLines ''' + { + a. + } + ''' + expect(lines[1][0]).toEqual value: ' a', scopes: ['source.c', 'meta.block.c'] + expect(lines[1][1]).toEqual value: '.', scopes: ['source.c', 'meta.block.c', 'punctuation.separator.dot-access.c'] + + lines = grammar.tokenizeLines ''' + { + a.b; + } + ''' + expect(lines[1][0]).toEqual value: ' a', scopes: ['source.c', 'meta.block.c'] + expect(lines[1][1]).toEqual value: '.', scopes: ['source.c', 'meta.block.c', 'punctuation.separator.dot-access.c'] + expect(lines[1][2]).toEqual value: 'b', scopes: ['source.c', 'meta.block.c', 'variable.other.member.c'] + + lines = grammar.tokenizeLines ''' + { + a.b() + } + ''' + expect(lines[1][0]).toEqual value: ' a', scopes: ['source.c', 'meta.block.c'] + expect(lines[1][1]).toEqual value: '.', scopes: ['source.c', 'meta.block.c', 'punctuation.separator.dot-access.c'] + expect(lines[1][2]).toEqual value: 'b', scopes: ['source.c', 'meta.block.c', 'meta.function-call.c', 'entity.name.function.c'] + + lines = grammar.tokenizeLines ''' + { + a. b; + } + ''' + expect(lines[1][1]).toEqual value: '.', scopes: ['source.c', 'meta.block.c', 'punctuation.separator.dot-access.c'] + expect(lines[1][3]).toEqual value: 'b', scopes: ['source.c', 'meta.block.c', 'variable.other.member.c'] + + lines = grammar.tokenizeLines ''' + { + a .b; + } + ''' + expect(lines[1][1]).toEqual value: '.', scopes: ['source.c', 'meta.block.c', 'punctuation.separator.dot-access.c'] + expect(lines[1][2]).toEqual value: 'b', scopes: ['source.c', 'meta.block.c', 'variable.other.member.c'] + + lines = grammar.tokenizeLines ''' + { + a . b; + } + ''' + expect(lines[1][1]).toEqual value: '.', scopes: ['source.c', 'meta.block.c', 'punctuation.separator.dot-access.c'] + expect(lines[1][3]).toEqual value: 'b', scopes: ['source.c', 'meta.block.c', 'variable.other.member.c'] + + it "tokenizes the pointer access operator", -> + lines = grammar.tokenizeLines ''' + { + a->b; + } + ''' + expect(lines[1][1]).toEqual value: '->', scopes: ['source.c', 'meta.block.c', 'punctuation.separator.pointer-access.c'] + expect(lines[1][2]).toEqual value: 'b', scopes: ['source.c', 'meta.block.c', 'variable.other.member.c'] + + lines = grammar.tokenizeLines ''' + { + a->b() + } + ''' + expect(lines[1][0]).toEqual value: ' a', scopes: ['source.c', 'meta.block.c'] + expect(lines[1][1]).toEqual value: '->', scopes: ['source.c', 'meta.block.c', 'punctuation.separator.pointer-access.c'] + + lines = grammar.tokenizeLines ''' + { + a-> b; + } + ''' + expect(lines[1][1]).toEqual value: '->', scopes: ['source.c', 'meta.block.c', 'punctuation.separator.pointer-access.c'] + expect(lines[1][3]).toEqual value: 'b', scopes: ['source.c', 'meta.block.c', 'variable.other.member.c'] + + lines = grammar.tokenizeLines ''' + { + a ->b; + } + ''' + expect(lines[1][1]).toEqual value: '->', scopes: ['source.c', 'meta.block.c', 'punctuation.separator.pointer-access.c'] + expect(lines[1][2]).toEqual value: 'b', scopes: ['source.c', 'meta.block.c', 'variable.other.member.c'] + + lines = grammar.tokenizeLines ''' + { + a -> b; + } + ''' + expect(lines[1][1]).toEqual value: '->', scopes: ['source.c', 'meta.block.c', 'punctuation.separator.pointer-access.c'] + expect(lines[1][3]).toEqual value: 'b', scopes: ['source.c', 'meta.block.c', 'variable.other.member.c'] + + lines = grammar.tokenizeLines ''' + { + a-> + } + ''' + expect(lines[1][0]).toEqual value: ' a', scopes: ['source.c', 'meta.block.c'] + expect(lines[1][1]).toEqual value: '->', scopes: ['source.c', 'meta.block.c', 'punctuation.separator.pointer-access.c'] + + describe "operators", -> + it "tokenizes the sizeof operator", -> + {tokens} = grammar.tokenizeLine('sizeof unary_expression') + expect(tokens[0]).toEqual value: 'sizeof', scopes: ['source.c', 'keyword.operator.sizeof.c'] + expect(tokens[1]).toEqual value: ' unary_expression', scopes: ['source.c'] + + {tokens} = grammar.tokenizeLine('sizeof (int)') + expect(tokens[0]).toEqual value: 'sizeof', scopes: ['source.c', 'keyword.operator.sizeof.c'] + expect(tokens[1]).toEqual value: ' ', scopes: ['source.c'] + expect(tokens[2]).toEqual value: '(', scopes: ['source.c', 'punctuation.section.parens.begin.bracket.round.c'] + expect(tokens[3]).toEqual value: 'int', scopes: ['source.c', 'storage.type.c'] + expect(tokens[4]).toEqual value: ')', scopes: ['source.c', 'punctuation.section.parens.end.bracket.round.c'] + + {tokens} = grammar.tokenizeLine('$sizeof') + expect(tokens[1]).not.toEqual value: 'sizeof', scopes: ['source.c', 'keyword.operator.sizeof.c'] + + {tokens} = grammar.tokenizeLine('sizeof$') + expect(tokens[0]).not.toEqual value: 'sizeof', scopes: ['source.c', 'keyword.operator.sizeof.c'] + + {tokens} = grammar.tokenizeLine('sizeof_') + expect(tokens[0]).not.toEqual value: 'sizeof', scopes: ['source.c', 'keyword.operator.sizeof.c'] + + it "tokenizes the increment operator", -> + {tokens} = grammar.tokenizeLine('i++') + expect(tokens[0]).toEqual value: 'i', scopes: ['source.c'] + expect(tokens[1]).toEqual value: '++', scopes: ['source.c', 'keyword.operator.increment.c'] + + {tokens} = grammar.tokenizeLine('++i') + expect(tokens[0]).toEqual value: '++', scopes: ['source.c', 'keyword.operator.increment.c'] + expect(tokens[1]).toEqual value: 'i', scopes: ['source.c'] + + it "tokenizes the decrement operator", -> + {tokens} = grammar.tokenizeLine('i--') + expect(tokens[0]).toEqual value: 'i', scopes: ['source.c'] + expect(tokens[1]).toEqual value: '--', scopes: ['source.c', 'keyword.operator.decrement.c'] + + {tokens} = grammar.tokenizeLine('--i') + expect(tokens[0]).toEqual value: '--', scopes: ['source.c', 'keyword.operator.decrement.c'] + expect(tokens[1]).toEqual value: 'i', scopes: ['source.c'] + + it "tokenizes logical operators", -> + {tokens} = grammar.tokenizeLine('!a') + expect(tokens[0]).toEqual value: '!', scopes: ['source.c', 'keyword.operator.logical.c'] + expect(tokens[1]).toEqual value: 'a', scopes: ['source.c'] + + operators = ['&&', '||'] + for operator in operators + {tokens} = grammar.tokenizeLine('a ' + operator + ' b') + expect(tokens[0]).toEqual value: 'a ', scopes: ['source.c'] + expect(tokens[1]).toEqual value: operator, scopes: ['source.c', 'keyword.operator.logical.c'] + expect(tokens[2]).toEqual value: ' b', scopes: ['source.c'] + + it "tokenizes comparison operators", -> + operators = ['<=', '>=', '!=', '==', '<', '>' ] + + for operator in operators + {tokens} = grammar.tokenizeLine('a ' + operator + ' b') + expect(tokens[0]).toEqual value: 'a ', scopes: ['source.c'] + expect(tokens[1]).toEqual value: operator, scopes: ['source.c', 'keyword.operator.comparison.c'] + expect(tokens[2]).toEqual value: ' b', scopes: ['source.c'] + + it "tokenizes arithmetic operators", -> + operators = ['+', '-', '*', '/', '%'] + + for operator in operators + {tokens} = grammar.tokenizeLine('a ' + operator + ' b') + expect(tokens[0]).toEqual value: 'a ', scopes: ['source.c'] + expect(tokens[1]).toEqual value: operator, scopes: ['source.c', 'keyword.operator.c'] + expect(tokens[2]).toEqual value: ' b', scopes: ['source.c'] + + it "tokenizes ternary operators", -> + {tokens} = grammar.tokenizeLine('a ? b : c') + expect(tokens[0]).toEqual value: 'a ', scopes: ['source.c'] + expect(tokens[1]).toEqual value: '?', scopes: ['source.c', 'keyword.operator.ternary.c'] + expect(tokens[2]).toEqual value: ' b ', scopes: ['source.c'] + expect(tokens[3]).toEqual value: ':', scopes: ['source.c', 'keyword.operator.ternary.c'] + expect(tokens[4]).toEqual value: ' c', scopes: ['source.c'] + + it "tokenizes ternary operators with member access", -> + {tokens} = grammar.tokenizeLine('a ? b.c : d') + expect(tokens[0]).toEqual value: 'a ', scopes: ['source.c'] + expect(tokens[1]).toEqual value: '?', scopes: ['source.c', 'keyword.operator.ternary.c'] + expect(tokens[2]).toEqual value: ' b', scopes: ['source.c'] + expect(tokens[3]).toEqual value: '.', scopes: ['source.c', 'punctuation.separator.dot-access.c'] + expect(tokens[4]).toEqual value: 'c', scopes: ['source.c', 'variable.other.member.c'] + expect(tokens[5]).toEqual value: ' ', scopes: ['source.c'] + expect(tokens[6]).toEqual value: ':', scopes: ['source.c', 'keyword.operator.ternary.c'] + expect(tokens[7]).toEqual value: ' d', scopes: ['source.c'] + + it "tokenizes ternary operators with pointer dereferencing", -> + {tokens} = grammar.tokenizeLine('a ? b->c : d') + expect(tokens[0]).toEqual value: 'a ', scopes: ['source.c'] + expect(tokens[1]).toEqual value: '?', scopes: ['source.c', 'keyword.operator.ternary.c'] + expect(tokens[2]).toEqual value: ' b', scopes: ['source.c'] + expect(tokens[3]).toEqual value: '->', scopes: ['source.c', 'punctuation.separator.pointer-access.c'] + expect(tokens[4]).toEqual value: 'c', scopes: ['source.c', 'variable.other.member.c'] + expect(tokens[5]).toEqual value: ' ', scopes: ['source.c'] + expect(tokens[6]).toEqual value: ':', scopes: ['source.c', 'keyword.operator.ternary.c'] + expect(tokens[7]).toEqual value: ' d', scopes: ['source.c'] + + it "tokenizes ternary operators with function invocation", -> + {tokens} = grammar.tokenizeLine('a ? f(b) : c') + expect(tokens[0]).toEqual value: 'a ', scopes: ['source.c'] + expect(tokens[1]).toEqual value: '?', scopes: ['source.c', 'keyword.operator.ternary.c'] + expect(tokens[2]).toEqual value: ' ', scopes: ['source.c'] + expect(tokens[3]).toEqual value: 'f', scopes: ['source.c', 'meta.function-call.c', 'entity.name.function.c'] + expect(tokens[4]).toEqual value: '(', scopes: ['source.c', 'meta.function-call.c', 'punctuation.section.arguments.begin.bracket.round.c'] + expect(tokens[5]).toEqual value: 'b', scopes: ['source.c', 'meta.function-call.c'] + expect(tokens[6]).toEqual value: ')', scopes: ['source.c', 'meta.function-call.c', 'punctuation.section.arguments.end.bracket.round.c'] + expect(tokens[7]).toEqual value: ' ', scopes: ['source.c'] + expect(tokens[8]).toEqual value: ':', scopes: ['source.c', 'keyword.operator.ternary.c'] + expect(tokens[9]).toEqual value: ' c', scopes: ['source.c'] + + describe "bitwise", -> + it "tokenizes bitwise 'not'", -> + {tokens} = grammar.tokenizeLine('~a') + expect(tokens[0]).toEqual value: '~', scopes: ['source.c', 'keyword.operator.c'] + expect(tokens[1]).toEqual value: 'a', scopes: ['source.c'] + + it "tokenizes shift operators", -> + {tokens} = grammar.tokenizeLine('>>') + expect(tokens[0]).toEqual value: '>>', scopes: ['source.c', 'keyword.operator.bitwise.shift.c'] + + {tokens} = grammar.tokenizeLine('<<') + expect(tokens[0]).toEqual value: '<<', scopes: ['source.c', 'keyword.operator.bitwise.shift.c'] + + it "tokenizes them", -> + operators = ['|', '^', '&'] + + for operator in operators + {tokens} = grammar.tokenizeLine('a ' + operator + ' b') + expect(tokens[0]).toEqual value: 'a ', scopes: ['source.c'] + expect(tokens[1]).toEqual value: operator, scopes: ['source.c', 'keyword.operator.c'] + expect(tokens[2]).toEqual value: ' b', scopes: ['source.c'] + + describe "assignment", -> + it "tokenizes the assignment operator", -> + {tokens} = grammar.tokenizeLine('a = b') + expect(tokens[0]).toEqual value: 'a ', scopes: ['source.c'] + expect(tokens[1]).toEqual value: '=', scopes: ['source.c', 'keyword.operator.assignment.c'] + expect(tokens[2]).toEqual value: ' b', scopes: ['source.c'] + + it "tokenizes compound assignment operators", -> + operators = ['+=', '-=', '*=', '/=', '%='] + for operator in operators + {tokens} = grammar.tokenizeLine('a ' + operator + ' b') + expect(tokens[0]).toEqual value: 'a ', scopes: ['source.c'] + expect(tokens[1]).toEqual value: operator, scopes: ['source.c', 'keyword.operator.assignment.compound.c'] + expect(tokens[2]).toEqual value: ' b', scopes: ['source.c'] + + it "tokenizes bitwise compound operators", -> + operators = ['<<=', '>>=', '&=', '^=', '|='] + for operator in operators + {tokens} = grammar.tokenizeLine('a ' + operator + ' b') + expect(tokens[0]).toEqual value: 'a ', scopes: ['source.c'] + expect(tokens[1]).toEqual value: operator, scopes: ['source.c', 'keyword.operator.assignment.compound.bitwise.c'] + expect(tokens[2]).toEqual value: ' b', scopes: ['source.c'] + + describe "C++", -> + beforeEach -> + grammar = atom.grammars.grammarForScopeName('source.cpp') + + it "parses the grammar", -> + expect(grammar).toBeTruthy() + expect(grammar.scopeName).toBe 'source.cpp' + + it "tokenizes this with `.this` class", -> + {tokens} = grammar.tokenizeLine 'this.x' + expect(tokens[0]).toEqual value: 'this', scopes: ['source.cpp', 'variable.language.this.cpp'] + + it "tokenizes classes", -> + lines = grammar.tokenizeLines ''' + class Thing { + int x; + } + ''' + expect(lines[0][0]).toEqual value: 'class', scopes: ['source.cpp', 'meta.class-struct-block.cpp', 'storage.type.cpp'] + expect(lines[0][2]).toEqual value: 'Thing', scopes: ['source.cpp', 'meta.class-struct-block.cpp', 'entity.name.type.cpp'] + + it "tokenizes 'extern C'", -> + lines = grammar.tokenizeLines ''' + extern "C" { + #include "legacy_C_header.h" + } + ''' + expect(lines[0][0]).toEqual value: 'extern', scopes: ['source.cpp', 'meta.extern-block.cpp', 'storage.modifier.cpp'] + expect(lines[0][2]).toEqual value: '"', scopes: ['source.cpp', 'meta.extern-block.cpp', 'string.quoted.double.cpp', 'punctuation.definition.string.begin.cpp'] + expect(lines[0][3]).toEqual value: 'C', scopes: ['source.cpp', 'meta.extern-block.cpp', 'string.quoted.double.cpp'] + expect(lines[0][4]).toEqual value: '"', scopes: ['source.cpp', 'meta.extern-block.cpp', 'string.quoted.double.cpp', 'punctuation.definition.string.end.cpp'] + expect(lines[0][6]).toEqual value: '{', scopes: ['source.cpp', 'meta.extern-block.cpp', 'punctuation.section.block.begin.bracket.curly.c'] + expect(lines[1][0]).toEqual value: '#', scopes: ['source.cpp', 'meta.extern-block.cpp', 'meta.preprocessor.include.c', 'keyword.control.directive.include.c', 'punctuation.definition.directive.c'] + expect(lines[1][1]).toEqual value: 'include', scopes: ['source.cpp', 'meta.extern-block.cpp', 'meta.preprocessor.include.c', 'keyword.control.directive.include.c'] + expect(lines[1][3]).toEqual value: '"', scopes: ['source.cpp', 'meta.extern-block.cpp', 'meta.preprocessor.include.c', 'string.quoted.double.include.c', 'punctuation.definition.string.begin.c'] + expect(lines[1][4]).toEqual value: 'legacy_C_header.h', scopes: ['source.cpp', 'meta.extern-block.cpp', 'meta.preprocessor.include.c', 'string.quoted.double.include.c'] + expect(lines[1][5]).toEqual value: '"', scopes: ['source.cpp', 'meta.extern-block.cpp', 'meta.preprocessor.include.c', 'string.quoted.double.include.c', 'punctuation.definition.string.end.c'] + expect(lines[2][0]).toEqual value: '}', scopes: ['source.cpp', 'meta.extern-block.cpp', 'punctuation.section.block.end.bracket.curly.c'] + + lines = grammar.tokenizeLines ''' + #ifdef __cplusplus + extern "C" { + #endif + // legacy C code here + #ifdef __cplusplus + } + #endif + ''' + expect(lines[0][0]).toEqual value: '#', scopes: ['source.cpp', 'meta.preprocessor.c', 'keyword.control.directive.conditional.c', 'punctuation.definition.directive.c'] + expect(lines[0][1]).toEqual value: 'ifdef', scopes: ['source.cpp', 'meta.preprocessor.c', 'keyword.control.directive.conditional.c'] + expect(lines[0][3]).toEqual value: '__cplusplus', scopes: ['source.cpp', 'meta.preprocessor.c', 'entity.name.function.preprocessor.c'] + expect(lines[1][0]).toEqual value: 'extern', scopes: ['source.cpp', 'meta.extern-block.cpp', 'storage.modifier.cpp'] + expect(lines[1][2]).toEqual value: '"', scopes: ['source.cpp', 'meta.extern-block.cpp', 'string.quoted.double.cpp', 'punctuation.definition.string.begin.cpp'] + expect(lines[1][3]).toEqual value: 'C', scopes: ['source.cpp', 'meta.extern-block.cpp', 'string.quoted.double.cpp'] + expect(lines[1][4]).toEqual value: '"', scopes: ['source.cpp', 'meta.extern-block.cpp', 'string.quoted.double.cpp', 'punctuation.definition.string.end.cpp'] + expect(lines[1][6]).toEqual value: '{', scopes: ['source.cpp', 'meta.extern-block.cpp', 'punctuation.section.block.begin.bracket.curly.c'] + expect(lines[2][0]).toEqual value: '#', scopes: ['source.cpp', 'meta.preprocessor.c', 'keyword.control.directive.conditional.c', 'punctuation.definition.directive.c'] + expect(lines[2][1]).toEqual value: 'endif', scopes: ['source.cpp', 'meta.preprocessor.c', 'keyword.control.directive.conditional.c'] + expect(lines[3][1]).toEqual value: '//', scopes: ['source.cpp', 'comment.line.double-slash.cpp', 'punctuation.definition.comment.cpp'] + expect(lines[3][2]).toEqual value: ' legacy C code here', scopes: ['source.cpp', 'comment.line.double-slash.cpp'] + expect(lines[4][0]).toEqual value: '#', scopes: ['source.cpp', 'meta.preprocessor.c', 'keyword.control.directive.conditional.c', 'punctuation.definition.directive.c'] + expect(lines[4][1]).toEqual value: 'ifdef', scopes: ['source.cpp', 'meta.preprocessor.c', 'keyword.control.directive.conditional.c'] + expect(lines[5][0]).toEqual value: '}', scopes: ['source.cpp'] + expect(lines[6][0]).toEqual value: '#', scopes: ['source.cpp', 'meta.preprocessor.c', 'keyword.control.directive.conditional.c', 'punctuation.definition.directive.c'] + expect(lines[6][1]).toEqual value: 'endif', scopes: ['source.cpp', 'meta.preprocessor.c', 'keyword.control.directive.conditional.c'] + + it "tokenizes UTF string escapes", -> + lines = grammar.tokenizeLines ''' + string str = U"\\U01234567\\u0123\\"\\0123\\x123"; + ''' + expect(lines[0][0]).toEqual value: 'string str ', scopes: ['source.cpp'] + expect(lines[0][1]).toEqual value: '=', scopes: ['source.cpp', 'keyword.operator.assignment.c'] + expect(lines[0][3]).toEqual value: 'U', scopes: ['source.cpp', 'string.quoted.double.cpp', 'punctuation.definition.string.begin.cpp', 'meta.encoding.cpp'] + expect(lines[0][4]).toEqual value: '"', scopes: ['source.cpp', 'string.quoted.double.cpp', 'punctuation.definition.string.begin.cpp'] + expect(lines[0][5]).toEqual value: '\\U01234567', scopes: ['source.cpp', 'string.quoted.double.cpp', 'constant.character.escape.cpp'] + expect(lines[0][6]).toEqual value: '\\u0123', scopes: ['source.cpp', 'string.quoted.double.cpp', 'constant.character.escape.cpp'] + expect(lines[0][7]).toEqual value: '\\"', scopes: ['source.cpp', 'string.quoted.double.cpp', 'constant.character.escape.cpp'] + expect(lines[0][8]).toEqual value: '\\012', scopes: ['source.cpp', 'string.quoted.double.cpp', 'constant.character.escape.cpp'] + expect(lines[0][9]).toEqual value: '3', scopes: ['source.cpp', 'string.quoted.double.cpp'] + expect(lines[0][10]).toEqual value: '\\x123', scopes: ['source.cpp', 'string.quoted.double.cpp', 'constant.character.escape.cpp'] + expect(lines[0][11]).toEqual value: '"', scopes: ['source.cpp', 'string.quoted.double.cpp', 'punctuation.definition.string.end.cpp'] + expect(lines[0][12]).toEqual value: ';', scopes: ['source.cpp', 'punctuation.terminator.statement.c'] + + it "tokenizes % format specifiers", -> + {tokens} = grammar.tokenizeLine '"%d"' + expect(tokens[0]).toEqual value: '"', scopes: ['source.cpp', 'string.quoted.double.cpp', 'punctuation.definition.string.begin.cpp'] + expect(tokens[1]).toEqual value: '%d', scopes: ['source.cpp', 'string.quoted.double.cpp', 'constant.other.placeholder.c'] + expect(tokens[2]).toEqual value: '"', scopes: ['source.cpp', 'string.quoted.double.cpp', 'punctuation.definition.string.end.cpp'] + + {tokens} = grammar.tokenizeLine '"%"' + expect(tokens[0]).toEqual value: '"', scopes: ['source.cpp', 'string.quoted.double.cpp', 'punctuation.definition.string.begin.cpp'] + expect(tokens[1]).toEqual value: '%', scopes: ['source.cpp', 'string.quoted.double.cpp', 'invalid.illegal.placeholder.c'] + expect(tokens[2]).toEqual value: '"', scopes: ['source.cpp', 'string.quoted.double.cpp', 'punctuation.definition.string.end.cpp'] + + {tokens} = grammar.tokenizeLine '"%" PRId32' + expect(tokens[0]).toEqual value: '"', scopes: ['source.cpp', 'string.quoted.double.cpp', 'punctuation.definition.string.begin.cpp'] + expect(tokens[1]).toEqual value: '%', scopes: ['source.cpp', 'string.quoted.double.cpp'] + expect(tokens[2]).toEqual value: '"', scopes: ['source.cpp', 'string.quoted.double.cpp', 'punctuation.definition.string.end.cpp'] + + it "tokenizes raw string literals", -> + lines = grammar.tokenizeLines ''' + string str = R"test( + this is \"a\" test 'string' + )test"; + ''' + expect(lines[0][0]).toEqual value: 'string str ', scopes: ['source.cpp'] + expect(lines[0][3]).toEqual value: 'R"test(', scopes: ['source.cpp', 'string.quoted.double.raw.cpp', 'punctuation.definition.string.begin.cpp'] + expect(lines[1][0]).toEqual value: ' this is "a" test \'string\'', scopes: ['source.cpp', 'string.quoted.double.raw.cpp'] + expect(lines[2][0]).toEqual value: ')test"', scopes: ['source.cpp', 'string.quoted.double.raw.cpp', 'punctuation.definition.string.end.cpp'] + expect(lines[2][1]).toEqual value: ';', scopes: ['source.cpp', 'punctuation.terminator.statement.c'] + + it "errors on long raw string delimiters", -> + lines = grammar.tokenizeLines ''' + string str = R"01234567890123456()01234567890123456"; + ''' + expect(lines[0][0]).toEqual value: 'string str ', scopes: ['source.cpp'] + expect(lines[0][3]).toEqual value: 'R"', scopes: ['source.cpp', 'string.quoted.double.raw.cpp', 'punctuation.definition.string.begin.cpp'] + expect(lines[0][4]).toEqual value: '01234567890123456', scopes: ['source.cpp', 'string.quoted.double.raw.cpp', 'punctuation.definition.string.begin.cpp', 'invalid.illegal.delimiter-too-long.cpp'] + expect(lines[0][5]).toEqual value: '(', scopes: ['source.cpp', 'string.quoted.double.raw.cpp', 'punctuation.definition.string.begin.cpp'] + expect(lines[0][6]).toEqual value: ')', scopes: ['source.cpp', 'string.quoted.double.raw.cpp', 'punctuation.definition.string.end.cpp'] + expect(lines[0][7]).toEqual value: '01234567890123456', scopes: ['source.cpp', 'string.quoted.double.raw.cpp', 'punctuation.definition.string.end.cpp', 'invalid.illegal.delimiter-too-long.cpp'] + expect(lines[0][8]).toEqual value: '"', scopes: ['source.cpp', 'string.quoted.double.raw.cpp', 'punctuation.definition.string.end.cpp'] + expect(lines[0][9]).toEqual value: ';', scopes: ['source.cpp', 'punctuation.terminator.statement.c'] + + it "tokenizes destructors", -> + {tokens} = grammar.tokenizeLine('~Foo() {}') + expect(tokens[0]).toEqual value: '~Foo', scopes: ['source.cpp', 'meta.function.destructor.cpp', 'entity.name.function.cpp'] + expect(tokens[1]).toEqual value: '(', scopes: ['source.cpp', 'meta.function.destructor.cpp', 'punctuation.definition.parameters.begin.c'] + expect(tokens[2]).toEqual value: ')', scopes: ['source.cpp', 'meta.function.destructor.cpp', 'punctuation.definition.parameters.end.c'] + expect(tokens[4]).toEqual value: '{', scopes: ['source.cpp', 'meta.block.c', 'punctuation.section.block.begin.bracket.curly.c'] + expect(tokens[5]).toEqual value: '}', scopes: ['source.cpp', 'meta.block.c', 'punctuation.section.block.end.bracket.curly.c'] + + {tokens} = grammar.tokenizeLine('Foo::~Bar() {}') + expect(tokens[0]).toEqual value: 'Foo::~Bar', scopes: ['source.cpp', 'meta.function.destructor.cpp', 'entity.name.function.cpp'] + expect(tokens[1]).toEqual value: '(', scopes: ['source.cpp', 'meta.function.destructor.cpp', 'punctuation.definition.parameters.begin.c'] + expect(tokens[2]).toEqual value: ')', scopes: ['source.cpp', 'meta.function.destructor.cpp', 'punctuation.definition.parameters.end.c'] + expect(tokens[4]).toEqual value: '{', scopes: ['source.cpp', 'meta.block.c', 'punctuation.section.block.begin.bracket.curly.c'] + expect(tokens[5]).toEqual value: '}', scopes: ['source.cpp', 'meta.block.c', 'punctuation.section.block.end.bracket.curly.c'] + + describe "digit separators", -> + it "recognizes numbers with digit separators", -> + {tokens} = grammar.tokenizeLine "1'000" + expect(tokens[0]).toEqual value: "1'000", scopes: ['source.cpp', 'constant.numeric.c'] + + {tokens} = grammar.tokenizeLine "123'456.500'000e-1'5" + expect(tokens[0]).toEqual value: "123'456.500'000e-1'5", scopes: ['source.cpp', 'constant.numeric.c'] + + {tokens} = grammar.tokenizeLine "0x1234'5678" + expect(tokens[0]).toEqual value: "0x1234'5678", scopes: ['source.cpp', 'constant.numeric.c'] + + {tokens} = grammar.tokenizeLine "0'123'456" + expect(tokens[0]).toEqual value: "0'123'456", scopes: ['source.cpp', 'constant.numeric.c'] + + {tokens} = grammar.tokenizeLine "0b1100'0011'1111'0000" + expect(tokens[0]).toEqual value: "0b1100'0011'1111'0000", scopes: ['source.cpp', 'constant.numeric.c'] + + it "does not tokenize single quotes at the beginning or end of numbers as digit separators", -> + {tokens} = grammar.tokenizeLine "'1000" + expect(tokens[0]).toEqual value: "'", scopes: ['source.cpp', 'string.quoted.single.c', 'punctuation.definition.string.begin.c'] + expect(tokens[1]).toEqual value: "1000", scopes: ['source.cpp', 'string.quoted.single.c'] + + {tokens} = grammar.tokenizeLine "1000'" + expect(tokens[0]).toEqual value: "1000", scopes: ['source.cpp', 'constant.numeric.c'] + expect(tokens[1]).toEqual value: "'", scopes: ['source.cpp', 'string.quoted.single.c', 'punctuation.definition.string.begin.c'] + + describe "comments", -> + it "tokenizes them", -> + {tokens} = grammar.tokenizeLine '// comment' + expect(tokens[0]).toEqual value: '//', scopes: ['source.cpp', 'comment.line.double-slash.cpp', 'punctuation.definition.comment.cpp'] + expect(tokens[1]).toEqual value: ' comment', scopes: ['source.cpp', 'comment.line.double-slash.cpp'] + + lines = grammar.tokenizeLines ''' + // separated\\ + comment + ''' + expect(lines[0][0]).toEqual value: '//', scopes: ['source.cpp', 'comment.line.double-slash.cpp', 'punctuation.definition.comment.cpp'] + expect(lines[0][1]).toEqual value: ' separated', scopes: ['source.cpp', 'comment.line.double-slash.cpp'] + expect(lines[0][2]).toEqual value: '\\', scopes: ['source.cpp', 'comment.line.double-slash.cpp', 'constant.character.escape.line-continuation.c'] + expect(lines[1][0]).toEqual value: 'comment', scopes: ['source.cpp', 'comment.line.double-slash.cpp'] + + lines = grammar.tokenizeLines ''' + // The space character \x20 is used to prevent stripping trailing whitespace + // not separated\\\x20 + comment + ''' + expect(lines[1][0]).toEqual value: '//', scopes: ['source.cpp', 'comment.line.double-slash.cpp', 'punctuation.definition.comment.cpp'] + expect(lines[1][1]).toEqual value: ' not separated\\ ', scopes: ['source.cpp', 'comment.line.double-slash.cpp'] + expect(lines[2][0]).toEqual value: 'comment', scopes: ['source.cpp'] + + describe "operators", -> + it "tokenizes ternary operators with namespace resolution", -> + {tokens} = grammar.tokenizeLine('a ? ns::b : ns::c') + expect(tokens[0]).toEqual value: 'a ', scopes: ['source.cpp'] + expect(tokens[1]).toEqual value: '?', scopes: ['source.cpp', 'keyword.operator.ternary.c'] + expect(tokens[2]).toEqual value: ' ns', scopes: ['source.cpp'] + expect(tokens[3]).toEqual value: '::', scopes: ['source.cpp', 'punctuation.separator.namespace.access.cpp'] + expect(tokens[4]).toEqual value: 'b ', scopes: ['source.cpp'] + expect(tokens[5]).toEqual value: ':', scopes: ['source.cpp', 'keyword.operator.ternary.c'] + expect(tokens[6]).toEqual value: ' ns', scopes: ['source.cpp'] + expect(tokens[7]).toEqual value: '::', scopes: ['source.cpp', 'punctuation.separator.namespace.access.cpp'] + expect(tokens[8]).toEqual value: 'c', scopes: ['source.cpp'] diff --git a/packages/language-clojure/.coffeelintignore b/packages/language-clojure/.coffeelintignore new file mode 100644 index 000000000..1db51fed7 --- /dev/null +++ b/packages/language-clojure/.coffeelintignore @@ -0,0 +1 @@ +spec/fixtures diff --git a/packages/language-clojure/.github/no-response.yml b/packages/language-clojure/.github/no-response.yml new file mode 100644 index 000000000..1c8799d13 --- /dev/null +++ b/packages/language-clojure/.github/no-response.yml @@ -0,0 +1,15 @@ +# Configuration for probot-no-response - https://github.com/probot/no-response + +# Number of days of inactivity before an issue is closed for lack of response +daysUntilClose: 28 + +# Label requiring a response +responseRequiredLabel: more-information-needed + +# Comment to post when closing an issue for lack of response. Set to `false` to disable. +closeComment: > + This issue has been automatically closed because there has been no response + to our request for more information from the original author. With only the + information that is currently in the issue, we don't have enough information + to take action. Please reach out if you have or find the answers we need so + that we can investigate further. diff --git a/packages/language-clojure/.github/workflows/ci.yml b/packages/language-clojure/.github/workflows/ci.yml new file mode 100644 index 000000000..ab77c1f1f --- /dev/null +++ b/packages/language-clojure/.github/workflows/ci.yml @@ -0,0 +1,23 @@ +name: CI + +on: [push] + +env: + CI: true + +jobs: + Test: + strategy: + matrix: + os: [ubuntu-latest, macos-latest, windows-latest] + channel: [stable, beta] + runs-on: ${{ matrix.os }} + steps: + - uses: actions/checkout@v1 + - uses: UziTech/action-setup-atom@v2 + with: + version: ${{ matrix.channel }} + - name: Install dependencies + run: apm install + - name: Run tests + run: atom --test spec diff --git a/packages/language-clojure/.gitignore b/packages/language-clojure/.gitignore new file mode 100644 index 000000000..3c3629e64 --- /dev/null +++ b/packages/language-clojure/.gitignore @@ -0,0 +1 @@ +node_modules diff --git a/packages/language-clojure/ISSUE_TEMPLATE.md b/packages/language-clojure/ISSUE_TEMPLATE.md new file mode 100644 index 000000000..b60bb86c9 --- /dev/null +++ b/packages/language-clojure/ISSUE_TEMPLATE.md @@ -0,0 +1,40 @@ + + +### Prerequisites + +* [ ] Put an X between the brackets on this line if you have done all of the following: + * Reproduced the problem in Safe Mode: http://flight-manual.atom.io/hacking-atom/sections/debugging/#using-safe-mode + * Followed all applicable steps in the debugging guide: http://flight-manual.atom.io/hacking-atom/sections/debugging/ + * Checked the FAQs on the message board for common solutions: https://discuss.atom.io/c/faq + * Checked that your issue isn't already filed: https://github.com/issues?utf8=✓&q=is%3Aissue+user%3Aatom + * Checked that there is not already an Atom package that provides the described functionality: https://atom.io/packages + +### Description + +[Description of the issue] + +### Steps to Reproduce + +1. [First Step] +2. [Second Step] +3. [and so on...] + +**Expected behavior:** [What you expect to happen] + +**Actual behavior:** [What actually happens] + +**Reproduces how often:** [What percentage of the time does it reproduce?] + +### Versions + +You can get this information from copy and pasting the output of `atom --version` and `apm --version` from the command line. Also, please include the OS and what version of the OS you're running. + +### Additional Information + +Any additional information, configuration or data that might be necessary to reproduce the issue. diff --git a/packages/language-clojure/LICENSE.md b/packages/language-clojure/LICENSE.md new file mode 100644 index 000000000..6c77a82ce --- /dev/null +++ b/packages/language-clojure/LICENSE.md @@ -0,0 +1,48 @@ +Copyright (c) 2014 GitHub Inc. + +Permission is hereby granted, free of charge, to any person obtaining +a copy of this software and associated documentation files (the +"Software"), to deal in the Software without restriction, including +without limitation the rights to use, copy, modify, merge, publish, +distribute, sublicense, and/or sell copies of the Software, and to +permit persons to whom the Software is furnished to do so, subject to +the following conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. + +-------------------------------------------------------------------- + +This package was derived from a TextMate bundle located at +https://github.com/mmcgrana/textmate-clojure and distributed under the +following license, located in `LICENSE.md`: + +The MIT License (MIT) + +Copyright (c) 2010- Mark McGranaghan + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/packages/language-clojure/PULL_REQUEST_TEMPLATE.md b/packages/language-clojure/PULL_REQUEST_TEMPLATE.md new file mode 100644 index 000000000..cdaa94a86 --- /dev/null +++ b/packages/language-clojure/PULL_REQUEST_TEMPLATE.md @@ -0,0 +1,28 @@ +### Requirements + +* Filling out the template is required. Any pull request that does not include enough information to be reviewed in a timely manner may be closed at the maintainers' discretion. +* All new code requires tests to ensure against regressions + +### Description of the Change + + + +### Alternate Designs + + + +### Benefits + + + +### Possible Drawbacks + + + +### Applicable Issues + + diff --git a/packages/language-clojure/README.md b/packages/language-clojure/README.md new file mode 100644 index 000000000..a4e5a78d2 --- /dev/null +++ b/packages/language-clojure/README.md @@ -0,0 +1,9 @@ +# Clojure language support in Atom +[![macOS Build Status](https://travis-ci.org/atom/language-clojure.svg?branch=master)](https://travis-ci.org/atom/language-clojure) [![Build status](https://ci.appveyor.com/api/projects/status/6kd5fs48y5hixde6/branch/master?svg=true)](https://ci.appveyor.com/project/Atom/language-clojure/branch/master) [![Dependency Status](https://david-dm.org/atom/language-clojure.svg)](https://david-dm.org/atom/language-clojure) + +Adds syntax highlighting to Clojure files in Atom. + +Originally [converted](http://flight-manual.atom.io/hacking-atom/sections/converting-from-textmate) +from the [Clojure TextMate bundle](https://github.com/mmcgrana/textmate-clojure). + +Contributions are greatly appreciated. Please fork this repository and open a pull request to add snippets, make grammar tweaks, etc. diff --git a/packages/language-clojure/coffeelint.json b/packages/language-clojure/coffeelint.json new file mode 100644 index 000000000..a5dd715e3 --- /dev/null +++ b/packages/language-clojure/coffeelint.json @@ -0,0 +1,37 @@ +{ + "max_line_length": { + "level": "ignore" + }, + "no_empty_param_list": { + "level": "error" + }, + "arrow_spacing": { + "level": "error" + }, + "no_interpolation_in_single_quotes": { + "level": "error" + }, + "no_debugger": { + "level": "error" + }, + "prefer_english_operator": { + "level": "error" + }, + "colon_assignment_spacing": { + "spacing": { + "left": 0, + "right": 1 + }, + "level": "error" + }, + "braces_spacing": { + "spaces": 0, + "level": "error" + }, + "spacing_after_comma": { + "level": "error" + }, + "no_stand_alone_at": { + "level": "error" + } +} diff --git a/packages/language-clojure/grammars/clojure.cson b/packages/language-clojure/grammars/clojure.cson new file mode 100644 index 000000000..53b38df46 --- /dev/null +++ b/packages/language-clojure/grammars/clojure.cson @@ -0,0 +1,398 @@ +'scopeName': 'source.clojure' +'fileTypes': [ + 'boot' + 'clj' + 'clj.hl' + 'cljc' + 'cljs' + 'cljs.hl' + 'cljx' + 'clojure' + 'edn' + 'org' + 'joke' + 'joker' +] +'foldingStartMarker': '\\(\\s*$' +'foldingStopMarker': '^\\s*\\)' +'firstLineMatch': '''(?x) + # Hashbang + ^\\#!.*(?:\\s|\\/) + boot + (?:$|\\s) + | + # Modeline + (?i: + # Emacs + -\\*-(?:\\s*(?=[^:;\\s]+\\s*-\\*-)|(?:.*?[;\\s]|(?<=-\\*-))mode\\s*:\\s*) + clojure(script)? + (?=[\\s;]|(?]?\\d+|m)?|\\sex)(?=:(?=\\s*set?\\s[^\\n:]+:)|:(?!\\s*set?\\s))(?:(?:\\s|\\s*:\\s*)\\w*(?:\\s*=(?:[^\\n\\\\\\s]|\\\\.)*)?)*[\\s:](?:filetype|ft|syntax)\\s*= + clojure + (?=\\s|:|$) + ) +''' +'name': 'Clojure' +'patterns': [ + { + 'include': '#comment' + } + { + 'include': '#shebang-comment' + } + { + 'include': '#quoted-sexp' + } + { + 'include': '#sexp' + } + { + 'include': '#keyfn' + } + { + 'include': '#string' + } + { + 'include': '#vector' + } + { + 'include': '#set' + } + { + 'include': '#map' + } + { + 'include': '#regexp' + } + { + 'include': '#var' + } + { + 'include': '#constants' + } + { + 'include': '#dynamic-variables' + } + { + 'include': '#metadata' + } + { + 'include': '#namespace-symbol' + } + { + 'include': '#symbol' + } +] +'repository': + 'comment': + # NOTE: This must be kept as a begin/end match for language-todo to work + 'begin': '(?\\<\\/\\!\\?\\*]+(?=(\\s|\\)|\\]|\\}|\\,))' + 'name': 'constant.keyword.clojure' + 'keyfn': + 'patterns': [ + { + 'match': '(?<=(\\s|\\(|\\[|\\{))(if(-[-\\p{Ll}\\?]*)?|when(-[-\\p{Ll}]*)?|for(-[-\\p{Ll}]*)?|cond|do|let(-[-\\p{Ll}\\?]*)?|binding|loop|recur|fn|throw[\\p{Ll}\\-]*|try|catch|finally|([\\p{Ll}]*case))(?=(\\s|\\)|\\]|\\}))' + 'name': 'storage.control.clojure' + } + { + 'match': '(?<=(\\s|\\(|\\[|\\{))(declare-?|(in-)?ns|import|use|require|load|compile|(def[\\p{Ll}\\-]*))(?=(\\s|\\)|\\]|\\}))' + 'name': 'keyword.control.clojure' + } + ] + 'dynamic-variables': + 'match': '\\*[\\w\\.\\-\\_\\:\\+\\=\\>\\<\\!\\?\\d]+\\*' + 'name': 'meta.symbol.dynamic.clojure' + 'map': + 'begin': '(\\{)' + 'beginCaptures': + '1': + 'name': 'punctuation.section.map.begin.clojure' + 'end': '(\\}(?=[\\}\\]\\)\\s]*(?:;|$)))|(\\})' + 'endCaptures': + '1': + 'name': 'punctuation.section.map.end.trailing.clojure' + '2': + 'name': 'punctuation.section.map.end.clojure' + 'name': 'meta.map.clojure' + 'patterns': [ + { + 'include': '$self' + } + ] + 'metadata': + 'patterns': [ + { + 'begin': '(\\^\\{)' + 'beginCaptures': + '1': + 'name': 'punctuation.section.metadata.map.begin.clojure' + 'end': '(\\}(?=[\\}\\]\\)\\s]*(?:;|$)))|(\\})' + 'endCaptures': + '1': + 'name': 'punctuation.section.metadata.map.end.trailing.clojure' + '2': + 'name': 'punctuation.section.metadata.map.end.clojure' + 'name': 'meta.metadata.map.clojure' + 'patterns': [ + { + 'include': '$self' + } + ] + } + { + 'begin': '(\\^)' + 'end': '(\\s)' + 'name': 'meta.metadata.simple.clojure' + 'patterns': [ + { + 'include': '#keyword' + } + { + 'include': '$self' + } + ] + } + ] + 'quoted-sexp': + 'begin': '([\'``]\\()' + 'beginCaptures': + '1': + 'name': 'punctuation.section.expression.begin.clojure' + 'end': '(\\))$|(\\)(?=[\\}\\]\\)\\s]*(?:;|$)))|(\\))' + 'endCaptures': + '1': + 'name': 'punctuation.section.expression.end.trailing.clojure' + '2': + 'name': 'punctuation.section.expression.end.trailing.clojure' + '3': + 'name': 'punctuation.section.expression.end.clojure' + 'name': 'meta.quoted-expression.clojure' + 'patterns': [ + { + 'include': '$self' + } + ] + 'regexp': + 'begin': '#"' + 'beginCaptures': + '0': + 'name': 'punctuation.definition.regexp.begin.clojure' + 'end': '"' + 'endCaptures': + '0': + 'name': 'punctuation.definition.regexp.end.clojure' + 'name': 'string.regexp.clojure' + 'patterns': [ + { + 'include': '#regexp_escaped_char' + } + ] + 'regexp_escaped_char': + 'match': '\\\\.' + 'name': 'constant.character.escape.clojure' + 'set': + 'begin': '(\\#\\{)' + 'beginCaptures': + '1': + 'name': 'punctuation.section.set.begin.clojure' + 'end': '(\\}(?=[\\}\\]\\)\\s]*(?:;|$)))|(\\})' + 'endCaptures': + '1': + 'name': 'punctuation.section.set.end.trailing.clojure' + '2': + 'name': 'punctuation.section.set.end.clojure' + 'name': 'meta.set.clojure' + 'patterns': [ + { + 'include': '$self' + } + ] + 'sexp': + 'begin': '(\\()' + 'beginCaptures': + '1': + 'name': 'punctuation.section.expression.begin.clojure' + 'end': '(\\))$|(\\)(?=[\\}\\]\\)\\s]*(?:;|$)))|(\\))' + 'endCaptures': + '1': + 'name': 'punctuation.section.expression.end.trailing.clojure' + '2': + 'name': 'punctuation.section.expression.end.trailing.clojure' + '3': + 'name': 'punctuation.section.expression.end.clojure' + 'name': 'meta.expression.clojure' + 'patterns': [ + { + # ns, declare and everything that starts with def* or namespace/def* + 'begin': '(?<=\\()(ns|declare|def[\\w\\d._:+=>\\<\\!\\?\\*][\\w\\.\\-\\_\\:\\+\\=\\>\\<\\!\\?\\*\\d]*)' + 'name': 'entity.global.clojure' + } + { + 'include': '$self' + } + ] + } + { + 'include': '#keyfn' + } + { + 'include': '#constants' + } + { + 'include': '#vector' + } + { + 'include': '#map' + } + { + 'include': '#set' + } + { + 'include': '#sexp' + } + { + 'match': '(?<=\\()(.+?)(?=\\s|\\))' + 'captures': + '1': + 'name': 'entity.name.function.clojure' + 'patterns': [ + { + 'include': '$self' + } + ] + } + { + 'include': '$self' + } + ] + 'shebang-comment': + # NOTE: This must be kept as a begin/end match for language-todo to work + 'begin': '^(#!)' + 'beginCaptures': + '1': + 'name': 'punctuation.definition.comment.shebang.clojure' + 'end': '$' + 'name': 'comment.line.shebang.clojure' + 'string': + 'begin': '(?\\<\\!\\?\\*][\\w\\.\\-\\_\\:\\+\\=\\>\\<\\!\\?\\*\\d]*)/' + 'captures': + '1': + 'name': 'meta.symbol.namespace.clojure' + } + ] + 'symbol': + 'patterns': [ + { + 'match': '([\\p{L}\\.\\-\\_\\+\\=\\>\\<\\!\\?\\*][\\w\\.\\-\\_\\:\\+\\=\\>\\<\\!\\?\\*\\d]*)' + 'name': 'meta.symbol.clojure' + } + ] + 'var': + 'match': '(?<=(\\s|\\(|\\[|\\{)\\#)\'[\\w\\.\\-\\_\\:\\+\\=\\>\\<\\/\\!\\?\\*]+(?=(\\s|\\)|\\]|\\}))' + 'name': 'meta.var.clojure' + 'vector': + 'begin': '(\\[)' + 'beginCaptures': + '1': + 'name': 'punctuation.section.vector.begin.clojure' + 'end': '(\\](?=[\\}\\]\\)\\s]*(?:;|$)))|(\\])' + 'endCaptures': + '1': + 'name': 'punctuation.section.vector.end.trailing.clojure' + '2': + 'name': 'punctuation.section.vector.end.clojure' + 'name': 'meta.vector.clojure' + 'patterns': [ + { + 'include': '$self' + } + ] diff --git a/packages/language-clojure/package.json b/packages/language-clojure/package.json new file mode 100644 index 000000000..6bfed157b --- /dev/null +++ b/packages/language-clojure/package.json @@ -0,0 +1,21 @@ +{ + "name": "language-clojure", + "version": "0.22.8", + "description": "Clojure language support in Atom", + "engines": { + "atom": "*", + "node": "*" + }, + "homepage": "http://atom.github.io/language-clojure", + "repository": { + "type": "git", + "url": "https://github.com/atom/language-clojure" + }, + "license": "MIT", + "bugs": { + "url": "https://github.com/atom/language-clojure/issues" + }, + "devDependencies": { + "coffeelint": "^1.10.1" + } +} diff --git a/packages/language-clojure/settings/language-clojure.cson b/packages/language-clojure/settings/language-clojure.cson new file mode 100644 index 000000000..d0dd7180e --- /dev/null +++ b/packages/language-clojure/settings/language-clojure.cson @@ -0,0 +1,5 @@ +'.source.clojure': + 'editor': + 'commentStart': '; ' + 'autocomplete': + 'extraWordCharacters': '-' diff --git a/packages/language-clojure/snippets/language-clojure.cson b/packages/language-clojure/snippets/language-clojure.cson new file mode 100644 index 000000000..4b56104cf --- /dev/null +++ b/packages/language-clojure/snippets/language-clojure.cson @@ -0,0 +1,111 @@ +'.source.clojure': + 'ns': + 'prefix': 'ns' + 'body': """ + (ns ${1:name} + (:require [${2:libraries}])) + $0 + """ + + 'def': + 'prefix': 'def' + 'body': '(def ${1:symbol} ${2:value})' + + 'defn': + 'prefix': 'defn' + 'body': """ + (defn ${1:name} + [${2:params}] + ${3:body}) + """ + + 'fn': + 'prefix': 'fn' + 'body': """ + (fn [${1:params}] + ${2:body})$0 + """ + + 'let': + 'prefix': 'let' + 'body': """ + (let [${1:bindings}] + ${2:body}) + """ + + 'if': + 'prefix': 'if' + 'body': """ + (if ${1:test} + ${2:then} + ${3:else}) + """ + + 'if-let': + 'prefix': 'ifl' + 'body': """ + (if-let [${1:bindings}] + ${2:then} + ${3:else}) + """ + + 'if-not': + 'prefix': 'ifn' + 'body': """ + (if-not ${1:test} + ${2:then} + ${3:else}) + """ + + 'when': + 'prefix': 'when' + 'body': """ + (when ${1:test} + ${2:body}) + """ + + 'when-let': + 'prefix': 'whenl' + 'body': """ + (when-let [${1:bindings}] + ${2:body}) + """ + + 'when-not': + 'prefix': 'whenn' + 'body': """ + (when-not ${1:test} + ${2:body}) + """ + + 'map': + 'prefix': 'map' + 'body': '(map $1 $2)' + + 'map lambda': + 'prefix': 'mapl' + 'body': '(map #($1) $2)' + + 'condp': + 'prefix': 'condp' + 'body': """ + (condp ${1:pred} ${2:expr} + $0) + """ + + 'try': + 'prefix': 'try' + 'body': """ + (try + $1 + (catch ${2:exception} e + $3)) + """ + + 'prn': + 'prefix': 'prn' + 'body': '(prn $1)' + + 'println': + 'prefix': 'prnl' + 'body': '(println $1)' diff --git a/packages/language-clojure/spec/clojure-spec.coffee b/packages/language-clojure/spec/clojure-spec.coffee new file mode 100644 index 000000000..802858c8d --- /dev/null +++ b/packages/language-clojure/spec/clojure-spec.coffee @@ -0,0 +1,391 @@ +describe "Clojure grammar", -> + grammar = null + + beforeEach -> + waitsForPromise -> + atom.packages.activatePackage("language-clojure") + + runs -> + grammar = atom.grammars.grammarForScopeName("source.clojure") + + it "parses the grammar", -> + expect(grammar).toBeDefined() + expect(grammar.scopeName).toBe "source.clojure" + + it "tokenizes semicolon comments", -> + {tokens} = grammar.tokenizeLine "; clojure" + expect(tokens[0]).toEqual value: ";", scopes: ["source.clojure", "comment.line.semicolon.clojure", "punctuation.definition.comment.clojure"] + expect(tokens[1]).toEqual value: " clojure", scopes: ["source.clojure", "comment.line.semicolon.clojure"] + + it "does not tokenize escaped semicolons as comments", -> + {tokens} = grammar.tokenizeLine "\\; clojure" + expect(tokens[0]).toEqual value: "\\; ", scopes: ["source.clojure"] + expect(tokens[1]).toEqual value: "clojure", scopes: ["source.clojure", "meta.symbol.clojure"] + + it "tokenizes shebang comments", -> + {tokens} = grammar.tokenizeLine "#!/usr/bin/env clojure" + expect(tokens[0]).toEqual value: "#!", scopes: ["source.clojure", "comment.line.shebang.clojure", "punctuation.definition.comment.shebang.clojure"] + expect(tokens[1]).toEqual value: "/usr/bin/env clojure", scopes: ["source.clojure", "comment.line.shebang.clojure"] + + it "tokenizes strings", -> + {tokens} = grammar.tokenizeLine '"foo bar"' + expect(tokens[0]).toEqual value: '"', scopes: ["source.clojure", "string.quoted.double.clojure", "punctuation.definition.string.begin.clojure"] + expect(tokens[1]).toEqual value: 'foo bar', scopes: ["source.clojure", "string.quoted.double.clojure"] + expect(tokens[2]).toEqual value: '"', scopes: ["source.clojure", "string.quoted.double.clojure", "punctuation.definition.string.end.clojure"] + + it "tokenizes character escape sequences", -> + {tokens} = grammar.tokenizeLine '"\\n"' + expect(tokens[0]).toEqual value: '"', scopes: ["source.clojure", "string.quoted.double.clojure", "punctuation.definition.string.begin.clojure"] + expect(tokens[1]).toEqual value: '\\n', scopes: ["source.clojure", "string.quoted.double.clojure", "constant.character.escape.clojure"] + expect(tokens[2]).toEqual value: '"', scopes: ["source.clojure", "string.quoted.double.clojure", "punctuation.definition.string.end.clojure"] + + it "tokenizes regexes", -> + {tokens} = grammar.tokenizeLine '#"foo"' + expect(tokens[0]).toEqual value: '#"', scopes: ["source.clojure", "string.regexp.clojure", "punctuation.definition.regexp.begin.clojure"] + expect(tokens[1]).toEqual value: 'foo', scopes: ["source.clojure", "string.regexp.clojure"] + expect(tokens[2]).toEqual value: '"', scopes: ["source.clojure", "string.regexp.clojure", "punctuation.definition.regexp.end.clojure"] + + it "tokenizes backslash escape character in regexes", -> + {tokens} = grammar.tokenizeLine '#"\\\\" "/"' + expect(tokens[0]).toEqual value: '#"', scopes: ["source.clojure", "string.regexp.clojure", "punctuation.definition.regexp.begin.clojure"] + expect(tokens[1]).toEqual value: "\\\\", scopes: ['source.clojure', 'string.regexp.clojure', 'constant.character.escape.clojure'] + expect(tokens[2]).toEqual value: '"', scopes: ['source.clojure', 'string.regexp.clojure', "punctuation.definition.regexp.end.clojure"] + expect(tokens[4]).toEqual value: '"', scopes: ['source.clojure', 'string.quoted.double.clojure', 'punctuation.definition.string.begin.clojure'] + expect(tokens[5]).toEqual value: "/", scopes: ['source.clojure', 'string.quoted.double.clojure'] + expect(tokens[6]).toEqual value: '"', scopes: ['source.clojure', 'string.quoted.double.clojure', 'punctuation.definition.string.end.clojure'] + + it "tokenizes escaped double quote in regexes", -> + {tokens} = grammar.tokenizeLine '#"\\""' + expect(tokens[0]).toEqual value: '#"', scopes: ["source.clojure", "string.regexp.clojure", "punctuation.definition.regexp.begin.clojure"] + expect(tokens[1]).toEqual value: '\\"', scopes: ['source.clojure', 'string.regexp.clojure', 'constant.character.escape.clojure'] + expect(tokens[2]).toEqual value: '"', scopes: ['source.clojure', 'string.regexp.clojure', "punctuation.definition.regexp.end.clojure"] + + it "tokenizes numerics", -> + numbers = + "constant.numeric.ratio.clojure": ["1/2", "123/456", "+0/2", "-23/1"] + "constant.numeric.arbitrary-radix.clojure": ["2R1011", "16rDEADBEEF", "16rDEADBEEFN", "36rZebra"] + "constant.numeric.hexadecimal.clojure": ["0xDEADBEEF", "0XDEADBEEF", "0xDEADBEEFN", "0x0"] + "constant.numeric.octal.clojure": ["0123", "0123N", "00"] + "constant.numeric.double.clojure": ["123.45", "123.45e6", "123.45E6", "123.456M", "42.", "42.M", "42E+9M", "42E-0", "0M", "+0M", "42.E-23M"] + "constant.numeric.long.clojure": ["123", "12321", "123N", "+123N", "-123", "0"] + "constant.numeric.symbol.clojure": ["##Inf", "##-Inf", "##NaN"] + + for scope, nums of numbers + for num in nums + {tokens} = grammar.tokenizeLine num + expect(tokens[0]).toEqual value: num, scopes: ["source.clojure", scope] + + it "tokenizes booleans", -> + booleans = + "constant.language.boolean.clojure": ["true", "false"] + + for scope, bools of booleans + for bool in bools + {tokens} = grammar.tokenizeLine bool + expect(tokens[0]).toEqual value: bool, scopes: ["source.clojure", scope] + + it "tokenizes nil", -> + {tokens} = grammar.tokenizeLine "nil" + expect(tokens[0]).toEqual value: "nil", scopes: ["source.clojure", "constant.language.nil.clojure"] + + it "tokenizes keywords", -> + tests = + "meta.expression.clojure": ["(:foo)"] + "meta.map.clojure": ["{:foo}"] + "meta.vector.clojure": ["[:foo]"] + "meta.quoted-expression.clojure": ["'(:foo)", "`(:foo)"] + + for metaScope, lines of tests + for line in lines + {tokens} = grammar.tokenizeLine line + expect(tokens[1]).toEqual value: ":foo", scopes: ["source.clojure", metaScope, "constant.keyword.clojure"] + + {tokens} = grammar.tokenizeLine "(def foo :bar)" + expect(tokens[5]).toEqual value: ":bar", scopes: ["source.clojure", "meta.expression.clojure", "meta.definition.global.clojure", "constant.keyword.clojure"] + + # keywords can start with an uppercase non-ASCII letter + {tokens} = grammar.tokenizeLine "(def foo :Öπ)" + expect(tokens[5]).toEqual value: ":Öπ", scopes: ["source.clojure", "meta.expression.clojure", "meta.definition.global.clojure", "constant.keyword.clojure"] + + it "tokenizes keyfns (keyword control)", -> + keyfns = ["declare", "declare-", "ns", "in-ns", "import", "use", "require", "load", "compile", "def", "defn", "defn-", "defmacro", "defåπç"] + + for keyfn in keyfns + {tokens} = grammar.tokenizeLine "(#{keyfn})" + expect(tokens[1]).toEqual value: keyfn, scopes: ["source.clojure", "meta.expression.clojure", "keyword.control.clojure"] + + it "tokenizes keyfns (storage control)", -> + keyfns = ["if", "when", "for", "cond", "do", "let", "binding", "loop", "recur", "fn", "throw", "try", "catch", "finally", "case"] + + for keyfn in keyfns + {tokens} = grammar.tokenizeLine "(#{keyfn})" + expect(tokens[1]).toEqual value: keyfn, scopes: ["source.clojure", "meta.expression.clojure", "storage.control.clojure"] + + it "tokenizes global definitions", -> + macros = ["ns", "declare", "def", "defn", "defn-", "defroutes", "compojure/defroutes", "rum.core/defc123-", "some.nested-ns/def-nested->symbol!?*", "def+!.?abc8:<>", "ns/def+!.?abc8:<>", "ns/defåÄÖπç"] + + for macro in macros + {tokens} = grammar.tokenizeLine "(#{macro} foo 'bar)" + expect(tokens[1]).toEqual value: macro, scopes: ["source.clojure", "meta.expression.clojure", "meta.definition.global.clojure", "keyword.control.clojure"] + expect(tokens[3]).toEqual value: "foo", scopes: ["source.clojure", "meta.expression.clojure", "meta.definition.global.clojure", "entity.global.clojure"] + + it "tokenizes dynamic variables", -> + mutables = ["*ns*", "*foo-bar*", "*åÄÖπç*"] + + for mutable in mutables + {tokens} = grammar.tokenizeLine mutable + expect(tokens[0]).toEqual value: mutable, scopes: ["source.clojure", "meta.symbol.dynamic.clojure"] + + it "tokenizes metadata", -> + {tokens} = grammar.tokenizeLine "^Foo" + expect(tokens[0]).toEqual value: "^", scopes: ["source.clojure", "meta.metadata.simple.clojure"] + expect(tokens[1]).toEqual value: "Foo", scopes: ["source.clojure", "meta.metadata.simple.clojure", "meta.symbol.clojure"] + + # non-ASCII letters + {tokens} = grammar.tokenizeLine "^Öπ" + expect(tokens[0]).toEqual value: "^", scopes: ["source.clojure", "meta.metadata.simple.clojure"] + expect(tokens[1]).toEqual value: "Öπ", scopes: ["source.clojure", "meta.metadata.simple.clojure", "meta.symbol.clojure"] + + {tokens} = grammar.tokenizeLine "^{:foo true}" + expect(tokens[0]).toEqual value: "^{", scopes: ["source.clojure", "meta.metadata.map.clojure", "punctuation.section.metadata.map.begin.clojure"] + expect(tokens[1]).toEqual value: ":foo", scopes: ["source.clojure", "meta.metadata.map.clojure", "constant.keyword.clojure"] + expect(tokens[2]).toEqual value: " ", scopes: ["source.clojure", "meta.metadata.map.clojure"] + expect(tokens[3]).toEqual value: "true", scopes: ["source.clojure", "meta.metadata.map.clojure", "constant.language.boolean.clojure"] + expect(tokens[4]).toEqual value: "}", scopes: ["source.clojure", "meta.metadata.map.clojure", "punctuation.section.metadata.map.end.trailing.clojure"] + + it "tokenizes functions", -> + expressions = ["(foo)", "(foo 1 10)"] + + for expr in expressions + {tokens} = grammar.tokenizeLine expr + expect(tokens[1]).toEqual value: "foo", scopes: ["source.clojure", "meta.expression.clojure", "entity.name.function.clojure"] + + #non-ASCII letters + {tokens} = grammar.tokenizeLine "(Öπ 2 20)" + expect(tokens[1]).toEqual value: "Öπ", scopes: ["source.clojure", "meta.expression.clojure", "entity.name.function.clojure"] + + it "tokenizes vars", -> + {tokens} = grammar.tokenizeLine "(func #'foo)" + expect(tokens[2]).toEqual value: " #", scopes: ["source.clojure", "meta.expression.clojure"] + expect(tokens[3]).toEqual value: "'foo", scopes: ["source.clojure", "meta.expression.clojure", "meta.var.clojure"] + + # non-ASCII letters + {tokens} = grammar.tokenizeLine "(func #'Öπ)" + expect(tokens[2]).toEqual value: " #", scopes: ["source.clojure", "meta.expression.clojure"] + expect(tokens[3]).toEqual value: "'Öπ", scopes: ["source.clojure", "meta.expression.clojure", "meta.var.clojure"] + + it "tokenizes symbols", -> + {tokens} = grammar.tokenizeLine "x" + expect(tokens[0]).toEqual value: "x", scopes: ["source.clojure", "meta.symbol.clojure"] + + # non-ASCII letters + {tokens} = grammar.tokenizeLine "Öπ" + expect(tokens[0]).toEqual value: "Öπ", scopes: ["source.clojure", "meta.symbol.clojure"] + + # Should not be tokenized as a symbol + {tokens} = grammar.tokenizeLine "1foobar" + expect(tokens[0]).toEqual value: "1", scopes: ["source.clojure", "constant.numeric.long.clojure"] + + it "tokenizes namespaces", -> + {tokens} = grammar.tokenizeLine "foo/bar" + expect(tokens[0]).toEqual value: "foo", scopes: ["source.clojure", "meta.symbol.namespace.clojure"] + expect(tokens[1]).toEqual value: "/", scopes: ["source.clojure"] + expect(tokens[2]).toEqual value: "bar", scopes: ["source.clojure", "meta.symbol.clojure"] + + # non-ASCII letters + {tokens} = grammar.tokenizeLine "Öπ/Åä" + expect(tokens[0]).toEqual value: "Öπ", scopes: ["source.clojure", "meta.symbol.namespace.clojure"] + expect(tokens[1]).toEqual value: "/", scopes: ["source.clojure"] + expect(tokens[2]).toEqual value: "Åä", scopes: ["source.clojure", "meta.symbol.clojure"] + + testMetaSection = (metaScope, puncScope, startsWith, endsWith) -> + # Entire expression on one line. + {tokens} = grammar.tokenizeLine "#{startsWith}foo, bar#{endsWith}" + + [start, mid..., end] = tokens + + expect(start).toEqual value: startsWith, scopes: ["source.clojure", "meta.#{metaScope}.clojure", "punctuation.section.#{puncScope}.begin.clojure"] + expect(end).toEqual value: endsWith, scopes: ["source.clojure", "meta.#{metaScope}.clojure", "punctuation.section.#{puncScope}.end.trailing.clojure"] + + for token in mid + expect(token.scopes.slice(0, 2)).toEqual ["source.clojure", "meta.#{metaScope}.clojure"] + + # Expression broken over multiple lines. + tokens = grammar.tokenizeLines("#{startsWith}foo\n bar#{endsWith}") + + [start, mid..., after] = tokens[0] + + expect(start).toEqual value: startsWith, scopes: ["source.clojure", "meta.#{metaScope}.clojure", "punctuation.section.#{puncScope}.begin.clojure"] + + for token in mid + expect(token.scopes.slice(0, 2)).toEqual ["source.clojure", "meta.#{metaScope}.clojure"] + + [mid..., end] = tokens[1] + + expect(end).toEqual value: endsWith, scopes: ["source.clojure", "meta.#{metaScope}.clojure", "punctuation.section.#{puncScope}.end.trailing.clojure"] + + for token in mid + expect(token.scopes.slice(0, 2)).toEqual ["source.clojure", "meta.#{metaScope}.clojure"] + + it "tokenizes expressions", -> + testMetaSection "expression", "expression", "(", ")" + + it "tokenizes quoted expressions", -> + testMetaSection "quoted-expression", "expression", "'(", ")" + testMetaSection "quoted-expression", "expression", "`(", ")" + + it "tokenizes vectors", -> + testMetaSection "vector", "vector", "[", "]" + + it "tokenizes maps", -> + testMetaSection "map", "map", "{", "}" + + it "tokenizes sets", -> + testMetaSection "set", "set", "\#{", "}" + + it "tokenizes functions in nested sexp", -> + {tokens} = grammar.tokenizeLine "((foo bar) baz)" + expect(tokens[0]).toEqual value: "(", scopes: ["source.clojure", "meta.expression.clojure", "punctuation.section.expression.begin.clojure"] + expect(tokens[1]).toEqual value: "(", scopes: ["source.clojure", "meta.expression.clojure", "meta.expression.clojure", "punctuation.section.expression.begin.clojure"] + expect(tokens[2]).toEqual value: "foo", scopes: ["source.clojure", "meta.expression.clojure", "meta.expression.clojure", "entity.name.function.clojure"] + expect(tokens[3]).toEqual value: " ", scopes: ["source.clojure", "meta.expression.clojure", "meta.expression.clojure"] + expect(tokens[4]).toEqual value: "bar", scopes: ["source.clojure", "meta.expression.clojure", "meta.expression.clojure", "meta.symbol.clojure"] + expect(tokens[5]).toEqual value: ")", scopes: ["source.clojure", "meta.expression.clojure", "meta.expression.clojure", "punctuation.section.expression.end.clojure"] + expect(tokens[6]).toEqual value: " ", scopes: ["source.clojure", "meta.expression.clojure"] + expect(tokens[7]).toEqual value: "baz", scopes: ["source.clojure", "meta.expression.clojure", "meta.symbol.clojure"] + expect(tokens[8]).toEqual value: ")", scopes: ["source.clojure", "meta.expression.clojure", "punctuation.section.expression.end.trailing.clojure"] + + it "tokenizes maps used as functions", -> + {tokens} = grammar.tokenizeLine "({:foo bar} :foo)" + expect(tokens[0]).toEqual value: "(", scopes: ["source.clojure", "meta.expression.clojure", "punctuation.section.expression.begin.clojure"] + expect(tokens[1]).toEqual value: "{", scopes: ["source.clojure", "meta.expression.clojure", "meta.map.clojure", "punctuation.section.map.begin.clojure"] + expect(tokens[2]).toEqual value: ":foo", scopes: ["source.clojure", "meta.expression.clojure", "meta.map.clojure", "constant.keyword.clojure"] + expect(tokens[3]).toEqual value: " ", scopes: ["source.clojure", "meta.expression.clojure", "meta.map.clojure"] + expect(tokens[4]).toEqual value: "bar", scopes: ["source.clojure", "meta.expression.clojure", "meta.map.clojure", "meta.symbol.clojure"] + expect(tokens[5]).toEqual value: "}", scopes: ["source.clojure", "meta.expression.clojure", "meta.map.clojure", "punctuation.section.map.end.clojure"] + expect(tokens[6]).toEqual value: " ", scopes: ["source.clojure", "meta.expression.clojure"] + expect(tokens[7]).toEqual value: ":foo", scopes: ["source.clojure", "meta.expression.clojure", "constant.keyword.clojure"] + expect(tokens[8]).toEqual value: ")", scopes: ["source.clojure", "meta.expression.clojure", "punctuation.section.expression.end.trailing.clojure"] + + it "tokenizes sets used in functions", -> + {tokens} = grammar.tokenizeLine "(\#{:foo :bar})" + expect(tokens[0]).toEqual value: "(", scopes: ["source.clojure", "meta.expression.clojure", "punctuation.section.expression.begin.clojure"] + expect(tokens[1]).toEqual value: "\#{", scopes: ["source.clojure", "meta.expression.clojure", "meta.set.clojure", "punctuation.section.set.begin.clojure"] + expect(tokens[2]).toEqual value: ":foo", scopes: ["source.clojure", "meta.expression.clojure", "meta.set.clojure", "constant.keyword.clojure"] + expect(tokens[3]).toEqual value: " ", scopes: ["source.clojure", "meta.expression.clojure", "meta.set.clojure"] + expect(tokens[4]).toEqual value: ":bar", scopes: ["source.clojure", "meta.expression.clojure", "meta.set.clojure", "constant.keyword.clojure"] + expect(tokens[5]).toEqual value: "}", scopes: ["source.clojure", "meta.expression.clojure", "meta.set.clojure", "punctuation.section.set.end.trailing.clojure"] + expect(tokens[6]).toEqual value: ")", scopes: ["source.clojure", "meta.expression.clojure", "punctuation.section.expression.end.trailing.clojure"] + + describe "firstLineMatch", -> + it "recognises interpreter directives", -> + valid = """ + #!/usr/sbin/boot foo + #!/usr/bin/boot foo=bar/ + #!/usr/sbin/boot + #!/usr/sbin/boot foo bar baz + #!/usr/bin/boot perl + #!/usr/bin/boot bin/perl + #!/usr/bin/boot + #!/bin/boot + #!/usr/bin/boot --script=usr/bin + #! /usr/bin/env A=003 B=149 C=150 D=xzd E=base64 F=tar G=gz H=head I=tail boot + #!\t/usr/bin/env --foo=bar boot --quu=quux + #! /usr/bin/boot + #!/usr/bin/env boot + """ + for line in valid.split /\n/ + expect(grammar.firstLineRegex.scanner.findNextMatchSync(line)).not.toBeNull() + + invalid = """ + \x20#!/usr/sbin/boot + \t#!/usr/sbin/boot + #!/usr/bin/env-boot/node-env/ + #!/usr/bin/das-boot + #! /usr/binboot + #!\t/usr/bin/env --boot=bar + """ + for line in invalid.split /\n/ + expect(grammar.firstLineRegex.scanner.findNextMatchSync(line)).toBeNull() + + it "recognises Emacs modelines", -> + valid = """ + #-*- Clojure -*- + #-*- mode: ClojureScript -*- + /* -*-clojureScript-*- */ + // -*- Clojure -*- + /* -*- mode:Clojure -*- */ + // -*- font:bar;mode:Clojure -*- + // -*- font:bar;mode:Clojure;foo:bar; -*- + // -*-font:mode;mode:Clojure-*- + // -*- foo:bar mode: clojureSCRIPT bar:baz -*- + " -*-foo:bar;mode:clojure;bar:foo-*- "; + " -*-font-mode:foo;mode:clojure;foo-bar:quux-*-" + "-*-font:x;foo:bar; mode : clojure; bar:foo;foooooo:baaaaar;fo:ba;-*-"; + "-*- font:x;foo : bar ; mode : ClojureScript ; bar : foo ; foooooo:baaaaar;fo:ba-*-"; + """ + for line in valid.split /\n/ + expect(grammar.firstLineRegex.scanner.findNextMatchSync(line)).not.toBeNull() + + invalid = """ + /* --*clojure-*- */ + /* -*-- clojure -*- + /* -*- -- Clojure -*- + /* -*- Clojure -;- -*- + // -*- iClojure -*- + // -*- Clojure; -*- + // -*- clojure-door -*- + /* -*- model:clojure -*- + /* -*- indent-mode:clojure -*- + // -*- font:mode;Clojure -*- + // -*- mode: -*- Clojure + // -*- mode: das-clojure -*- + // -*-font:mode;mode:clojure--*- + """ + for line in invalid.split /\n/ + expect(grammar.firstLineRegex.scanner.findNextMatchSync(line)).toBeNull() + + it "recognises Vim modelines", -> + valid = """ + vim: se filetype=clojure: + # vim: se ft=clojure: + # vim: set ft=Clojure: + # vim: set filetype=Clojure: + # vim: ft=Clojure + # vim: syntax=Clojure + # vim: se syntax=Clojure: + # ex: syntax=Clojure + # vim:ft=clojure + # vim600: ft=clojure + # vim>600: set ft=clojure: + # vi:noai:sw=3 ts=6 ft=clojure + # vi::::::::::noai:::::::::::: ft=clojure + # vim:ts=4:sts=4:sw=4:noexpandtab:ft=clojure + # vi:: noai : : : : sw =3 ts =6 ft =clojure + # vim: ts=4: pi sts=4: ft=clojure: noexpandtab: sw=4: + # vim: ts=4 sts=4: ft=clojure noexpandtab: + # vim:noexpandtab sts=4 ft=clojure ts=4 + # vim:noexpandtab:ft=clojure + # vim:ts=4:sts=4 ft=clojure:noexpandtab:\x20 + # vim:noexpandtab titlestring=hi\|there\\\\ ft=clojure ts=4 + """ + for line in valid.split /\n/ + expect(grammar.firstLineRegex.scanner.findNextMatchSync(line)).not.toBeNull() + + invalid = """ + ex: se filetype=clojure: + _vi: se filetype=clojure: + vi: se filetype=clojure + # vim set ft=klojure + # vim: soft=clojure + # vim: clean-syntax=clojure: + # vim set ft=clojure: + # vim: setft=clojure: + # vim: se ft=clojure backupdir=tmp + # vim: set ft=clojure set cmdheight=1 + # vim:noexpandtab sts:4 ft:clojure ts:4 + # vim:noexpandtab titlestring=hi\\|there\\ ft=clojure ts=4 + # vim:noexpandtab titlestring=hi\\|there\\\\\\ ft=clojure ts=4 + """ + for line in invalid.split /\n/ + expect(grammar.firstLineRegex.scanner.findNextMatchSync(line)).toBeNull() diff --git a/packages/language-coffee-script/.github/no-response.yml b/packages/language-coffee-script/.github/no-response.yml new file mode 100644 index 000000000..1c8799d13 --- /dev/null +++ b/packages/language-coffee-script/.github/no-response.yml @@ -0,0 +1,15 @@ +# Configuration for probot-no-response - https://github.com/probot/no-response + +# Number of days of inactivity before an issue is closed for lack of response +daysUntilClose: 28 + +# Label requiring a response +responseRequiredLabel: more-information-needed + +# Comment to post when closing an issue for lack of response. Set to `false` to disable. +closeComment: > + This issue has been automatically closed because there has been no response + to our request for more information from the original author. With only the + information that is currently in the issue, we don't have enough information + to take action. Please reach out if you have or find the answers we need so + that we can investigate further. diff --git a/packages/language-coffee-script/.github/workflows/ci.yml b/packages/language-coffee-script/.github/workflows/ci.yml new file mode 100644 index 000000000..ab77c1f1f --- /dev/null +++ b/packages/language-coffee-script/.github/workflows/ci.yml @@ -0,0 +1,23 @@ +name: CI + +on: [push] + +env: + CI: true + +jobs: + Test: + strategy: + matrix: + os: [ubuntu-latest, macos-latest, windows-latest] + channel: [stable, beta] + runs-on: ${{ matrix.os }} + steps: + - uses: actions/checkout@v1 + - uses: UziTech/action-setup-atom@v2 + with: + version: ${{ matrix.channel }} + - name: Install dependencies + run: apm install + - name: Run tests + run: atom --test spec diff --git a/packages/language-coffee-script/.gitignore b/packages/language-coffee-script/.gitignore new file mode 100644 index 000000000..1f792438f --- /dev/null +++ b/packages/language-coffee-script/.gitignore @@ -0,0 +1,2 @@ +*.cache +node_modules diff --git a/packages/language-coffee-script/CONTRIBUTING.md b/packages/language-coffee-script/CONTRIBUTING.md new file mode 100644 index 000000000..0fd0ad696 --- /dev/null +++ b/packages/language-coffee-script/CONTRIBUTING.md @@ -0,0 +1 @@ +See the [Atom contributing guide](https://github.com/atom/atom/blob/master/CONTRIBUTING.md) diff --git a/packages/language-coffee-script/ISSUE_TEMPLATE.md b/packages/language-coffee-script/ISSUE_TEMPLATE.md new file mode 100644 index 000000000..b60bb86c9 --- /dev/null +++ b/packages/language-coffee-script/ISSUE_TEMPLATE.md @@ -0,0 +1,40 @@ + + +### Prerequisites + +* [ ] Put an X between the brackets on this line if you have done all of the following: + * Reproduced the problem in Safe Mode: http://flight-manual.atom.io/hacking-atom/sections/debugging/#using-safe-mode + * Followed all applicable steps in the debugging guide: http://flight-manual.atom.io/hacking-atom/sections/debugging/ + * Checked the FAQs on the message board for common solutions: https://discuss.atom.io/c/faq + * Checked that your issue isn't already filed: https://github.com/issues?utf8=✓&q=is%3Aissue+user%3Aatom + * Checked that there is not already an Atom package that provides the described functionality: https://atom.io/packages + +### Description + +[Description of the issue] + +### Steps to Reproduce + +1. [First Step] +2. [Second Step] +3. [and so on...] + +**Expected behavior:** [What you expect to happen] + +**Actual behavior:** [What actually happens] + +**Reproduces how often:** [What percentage of the time does it reproduce?] + +### Versions + +You can get this information from copy and pasting the output of `atom --version` and `apm --version` from the command line. Also, please include the OS and what version of the OS you're running. + +### Additional Information + +Any additional information, configuration or data that might be necessary to reproduce the issue. diff --git a/packages/language-coffee-script/LICENSE.md b/packages/language-coffee-script/LICENSE.md new file mode 100644 index 000000000..c9b97c569 --- /dev/null +++ b/packages/language-coffee-script/LICENSE.md @@ -0,0 +1,49 @@ +Copyright (c) 2014 GitHub Inc. + +Permission is hereby granted, free of charge, to any person obtaining +a copy of this software and associated documentation files (the +"Software"), to deal in the Software without restriction, including +without limitation the rights to use, copy, modify, merge, publish, +distribute, sublicense, and/or sell copies of the Software, and to +permit persons to whom the Software is furnished to do so, subject to +the following conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. + +-------------------------------------------------------------------- + +This package was derived from a TextMate bundle located at +https://github.com/jashkenas/coffee-script-tmbundle and distributed under the +following license, located in `LICENSE`: + +Copyright (c) 2009-2014 Jeremy Ashkenas + +Permission is hereby granted, free of charge, to any person +obtaining a copy of this software and associated documentation +files (the "Software"), to deal in the Software without +restriction, including without limitation the rights to use, +copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the +Software is furnished to do so, subject to the following +conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES +OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT +HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, +WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR +OTHER DEALINGS IN THE SOFTWARE. diff --git a/packages/language-coffee-script/PULL_REQUEST_TEMPLATE.md b/packages/language-coffee-script/PULL_REQUEST_TEMPLATE.md new file mode 100644 index 000000000..cdaa94a86 --- /dev/null +++ b/packages/language-coffee-script/PULL_REQUEST_TEMPLATE.md @@ -0,0 +1,28 @@ +### Requirements + +* Filling out the template is required. Any pull request that does not include enough information to be reviewed in a timely manner may be closed at the maintainers' discretion. +* All new code requires tests to ensure against regressions + +### Description of the Change + + + +### Alternate Designs + + + +### Benefits + + + +### Possible Drawbacks + + + +### Applicable Issues + + diff --git a/packages/language-coffee-script/README.md b/packages/language-coffee-script/README.md new file mode 100644 index 000000000..9f255ec7e --- /dev/null +++ b/packages/language-coffee-script/README.md @@ -0,0 +1,9 @@ +# CoffeeScript language support in Atom +[![macOS Build Status](https://travis-ci.org/atom/language-coffee-script.svg?branch=master)](https://travis-ci.org/atom/language-coffee-script) +[![Windows Build status](https://ci.appveyor.com/api/projects/status/4j9aak7iwn2f2x7a/branch/master?svg=true)](https://ci.appveyor.com/project/Atom/language-coffee-script/branch/master) [![Dependency Status](https://david-dm.org/atom/language-coffee-script.svg)](https://david-dm.org/atom/language-coffee-script) + +Adds syntax highlighting and snippets to CoffeeScript files in Atom. + +Originally [converted](http://flight-manual.atom.io/hacking-atom/sections/converting-from-textmate) from the [CoffeeScript TextMate bundle](https://github.com/jashkenas/coffee-script-tmbundle). + +Contributions are greatly appreciated. Please fork this repository and open a pull request to add snippets, make grammar tweaks, etc. diff --git a/packages/language-coffee-script/coffeelint.json b/packages/language-coffee-script/coffeelint.json new file mode 100644 index 000000000..a5dd715e3 --- /dev/null +++ b/packages/language-coffee-script/coffeelint.json @@ -0,0 +1,37 @@ +{ + "max_line_length": { + "level": "ignore" + }, + "no_empty_param_list": { + "level": "error" + }, + "arrow_spacing": { + "level": "error" + }, + "no_interpolation_in_single_quotes": { + "level": "error" + }, + "no_debugger": { + "level": "error" + }, + "prefer_english_operator": { + "level": "error" + }, + "colon_assignment_spacing": { + "spacing": { + "left": 0, + "right": 1 + }, + "level": "error" + }, + "braces_spacing": { + "spaces": 0, + "level": "error" + }, + "spacing_after_comma": { + "level": "error" + }, + "no_stand_alone_at": { + "level": "error" + } +} diff --git a/packages/language-coffee-script/grammars/coffeescript (literate).cson b/packages/language-coffee-script/grammars/coffeescript (literate).cson new file mode 100644 index 000000000..5f13ce9d9 --- /dev/null +++ b/packages/language-coffee-script/grammars/coffeescript (literate).cson @@ -0,0 +1,724 @@ +'comment': 'CoffeeScript (Literate)' +'fileTypes': [ + 'litcoffee' + 'litcoffee.erb' + 'coffee.md' +] +'name': 'CoffeeScript (Literate)' +'scopeName': 'source.litcoffee' +'firstLineMatch': '''(?x) + # Hashbang + ^\\#!.*(?:\\s|\\/) + coffee(?:\\s.+?)?\\s(?:-l|--literate) + (?:\\s|$) + | + # Modeline + (?i: + # Emacs + -\\*-(?:\\s*(?=[^:;\\s]+\\s*-\\*-)|(?:.*?[;\\s]|(?<=-\\*-))mode\\s*:\\s*) + litcoffee + (?=[\\s;]|(?]?\\d+|m)?|\\sex)(?=:(?=\\s*set?\\s[^\\n:]+:)|:(?!\\s*set?\\s))(?:(?:\\s|\\s*:\\s*)\\w*(?:\\s*=(?:[^\\n\\\\\\s]|\\\\.)*)?)*[\\s:](?:filetype|ft|syntax)\\s*= + litcoffee + (?=\\s|:|$) + ) +''' +'patterns': [ + { + 'begin': '^(?=([ ]{4}|\\t)(?!$))' + 'end': '^(?!([ ]{4}|\\t))' + 'name': 'markup.raw.block.markdown' + 'patterns': [ + { + 'include': '#block_raw' + } + ] + } + { + 'begin': ''' + (?x)^ + (?= [ ]{0,3}>. + | [#]{1,6}\\s*+ + | [ ]{0,3}(?[-*_])([ ]{0,2}\\k){2,}[ \\t]*+$ + ) + ''' + 'comment': 'We could also use an empty end match and set applyEndPatternLast, but then we must be sure that the begin pattern will only match stuff matched by the sub-patterns.' + 'end': ''' + (?x)^ + (?! [ ]{0,3}>. + | [#]{1,6}\\s*+ + | [ ]{0,3}(?[-*_])([ ]{0,2}\\k){2,}[ \\t]*+$ + ) + ''' + 'name': 'meta.block-level.markdown' + 'patterns': [ + { + 'include': '#block_quote' + } + { + 'include': '#heading' + } + { + 'include': '#separator' + } + ] + } + { + 'begin': '^[ ]{0,3}([*+-])(?=\\s)' + 'captures': + '1': + 'name': 'punctuation.definition.list_item.markdown' + 'end': '^(?=\\S|[ ]{4,})|(?!\\G)' + 'name': 'markup.list.unnumbered.markdown' + 'patterns': [ + { + 'include': '#list-paragraph' + } + ] + } + { + 'begin': '^[ ]{0,3}([0-9]+\\.)(?=\\s)' + 'captures': + '1': + 'name': 'punctuation.definition.list_item.markdown' + 'end': '^(?=\\S|[ ]{4,})|(?!\\G)' + 'name': 'markup.list.numbered.markdown' + 'patterns': [ + { + 'include': '#list-paragraph' + } + ] + } + { + 'begin': '^(?=<(p|div|h[1-6]|blockquote|pre|table|dl|ol|ul|script|noscript|form|fieldset|iframe|math|ins|del)\\b)(?!.*?)' + 'comment': 'Markdown formatting is disabled inside block-level tags.' + 'end': '(?<=^$\\n)' + 'name': 'meta.disable-markdown' + 'patterns': [ + { + 'include': 'text.html.basic' + } + ] + } + { + 'begin': '^(?=<(p|div|h[1-6]|blockquote|pre|table|dl|ol|ul|script|noscript|form|fieldset|iframe|math|ins|del)\\b)' + 'comment': 'Same rule but for one line disables.' + 'end': '$\\n?' + 'name': 'meta.disable-markdown' + 'patterns': [ + { + 'include': 'text.html.basic' + } + ] + } + { + 'captures': + '1': + 'name': 'punctuation.definition.constant.markdown' + '2': + 'name': 'constant.other.reference.link.markdown' + '3': + 'name': 'punctuation.definition.constant.markdown' + '4': + 'name': 'punctuation.separator.key-value.markdown' + '5': + 'name': 'punctuation.definition.link.markdown' + '6': + 'name': 'markup.underline.link.markdown' + '7': + 'name': 'punctuation.definition.link.markdown' + '8': + 'name': 'string.other.link.description.title.markdown' + '9': + 'name': 'punctuation.definition.string.begin.markdown' + '10': + 'name': 'punctuation.definition.string.end.markdown' + '11': + 'name': 'string.other.link.description.title.markdown' + '12': + 'name': 'punctuation.definition.string.begin.markdown' + '13': + 'name': 'punctuation.definition.string.end.markdown' + 'match': ''' + (?x) + \\s* # Leading whitespace + (\\[)(.+?)(\\])(:) # Reference name + [ \\t]* # Optional whitespace + (?) # The url + [ \\t]* # Optional whitespace + (?: + ((\\().+?(\\))) # Match title in quotes… + | ((").+?(")) # or in parens. + )? # Title is optional + \\s* # Optional whitespace + $ + ''' + 'name': 'meta.link.reference.def.markdown' + } + { + 'begin': '^(?=\\S)(?![=-]{3,}(?=$))' + 'end': '^(?:\\s*$|(?=[ ]{0,3}>.))|(?=[ \\t]*\\n)(?<=^===|^====|=====|^---|^----|-----)[ \\t]*\\n|(?=^#)' + 'name': 'meta.paragraph.markdown' + 'patterns': [ + { + 'include': '#inline' + } + { + 'include': 'text.html.basic' + } + { + 'captures': + '1': + 'name': 'punctuation.definition.heading.markdown' + 'match': '^(={3,})(?=[ \\t]*$)' + 'name': 'markup.heading.1.markdown' + } + { + 'captures': + '1': + 'name': 'punctuation.definition.heading.markdown' + 'match': '^(-{3,})(?=[ \\t]*$)' + 'name': 'markup.heading.2.markdown' + } + ] + } +] +'repository': + 'ampersand': + 'comment': 'Markdown will convert this for us. We match it so that the HTML grammar will not mark it up as invalid.' + 'match': '&(?!([a-zA-Z0-9]+|#[0-9]+|#x[0-9a-fA-F]+);)' + 'name': 'meta.other.valid-ampersand.markdown' + 'block_quote': + 'begin': '\\G[ ]{0,3}(>)(?!$)[ ]?' + 'beginCaptures': + '1': + 'name': 'punctuation.definition.blockquote.markdown' + 'comment': ' We terminate the block quote when seeing an empty line, a separator or a line with leading > characters. The latter is to “reset” the quote level for quoted lines.' + 'end': ''' + (?x)^ + (?= \\s*$ + | [ ]{0,3}(?[-*_])([ ]{0,2}\\k){2,}[ \\t]*+$ + | [ ]{0,3}>. + ) + ''' + 'name': 'markup.quote.markdown' + 'patterns': [ + { + 'begin': ''' + (?x)\\G + (?= [ ]{0,3}>. + ) + ''' + 'end': '^' + 'patterns': [ + { + 'include': '#block_quote' + } + ] + } + { + 'applyEndPatternLast': 1 + 'begin': ''' + (?x)\\G + (?= ([ ]{4}|\\t) + | [#]{1,6}\\s*+ + | [ ]{0,3}(?[-*_])([ ]{0,2}\\k){2,}[ \\t]*+$ + ) + ''' + 'end': '^' + 'patterns': [ + { + 'include': '#block_raw' + } + { + 'include': '#heading' + } + { + 'include': '#separator' + } + ] + } + { + 'begin': ''' + (?x)\\G + (?! $ + | [ ]{0,3}>. + | ([ ]{4}|\\t) + | [#]{1,6}\\s*+ + | [ ]{0,3}(?[-*_])([ ]{0,2}\\k){2,}[ \\t]*+$ + ) + ''' + 'end': '$|(?<=\\n)' + 'patterns': [ + { + 'include': '#inline' + } + ] + } + ] + 'block_raw': + 'name': 'markup.raw.block.markdown' + 'patterns': [ + { + 'include': '#coffee_script' + } + ] + 'bold': + 'begin': ''' + (?x) + (\\*\\*|__)(?=\\S) # Open + (?= + ( + <[^>]*+> # HTML tags + | (?`+)([^`]|(?!(?(?!`))`)*+\\k # Raw + | \\\\[\\\\`*_{}\\[\\]()#.!+\\->]?+ # Escapes + | \\[ + ( + (? # Named group + [^\\[\\]\\\\] # Match most chars + | \\\\. # Escaped chars + | \\[ \\g*+ \\] # Nested brackets + )*+ + \\] + ( + ( # Reference Link + [ ]? # Optional space + \\[[^\\]]*+\\] # Ref name + ) + | + ( # Inline Link + \\( # Opening paren + [ \\t]*+ # Optional whitespace + ? # URL + [ \\t]*+ # Optional whitespace + ( # Optional Title + (?[\'"]) + (.*?) + \\k<title> + )? + \\) + ) + ) + ) + | (?!(?<=\\S)\\1). # Everything besides + )++ + (?<=\\S)\\1 # Close + ) + ''' + 'captures': + '1': + 'name': 'punctuation.definition.bold.markdown' + 'end': '(?<=\\S)(\\1)' + 'name': 'markup.bold.markdown' + 'patterns': [ + { + 'applyEndPatternLast': 1 + 'begin': '(?=<[^>]*?>)' + 'end': '(?<=>)' + 'patterns': [ + { + 'include': 'text.html.basic' + } + ] + } + { + 'include': '#escape' + } + { + 'include': '#ampersand' + } + { + 'include': '#bracket' + } + { + 'include': '#raw' + } + { + 'include': '#italic' + } + { + 'include': '#image-inline' + } + { + 'include': '#link-inline' + } + { + 'include': '#link-inet' + } + { + 'include': '#link-email' + } + { + 'include': '#image-ref' + } + { + 'include': '#link-ref-literal' + } + { + 'include': '#link-ref' + } + ] + 'bracket': + 'comment': 'Markdown will convert this for us. We match it so that the HTML grammar will not mark it up as invalid.' + 'match': '<(?![a-z/?\\$!])' + 'name': 'meta.other.valid-bracket.markdown' + 'coffee_script': + 'patterns': [ + { + 'include': 'source.coffee' + } + ] + 'escape': + 'match': '\\\\[-`*_#+.!(){}\\[\\]\\\\>]' + 'name': 'constant.character.escape.markdown' + 'heading': + 'begin': '\\G(#{1,6})(?!#)\\s*(?=\\S)' + 'captures': + '1': + 'name': 'punctuation.definition.heading.markdown' + 'contentName': 'entity.name.section.markdown' + 'end': '\\s*(#*)$\\n?' + 'name': 'markup.heading.markdown' + 'patterns': [ + { + 'include': '#inline' + } + ] + 'image-inline': + 'captures': + '1': + 'name': 'punctuation.definition.string.begin.markdown' + '2': + 'name': 'string.other.link.description.markdown' + '3': + 'name': 'punctuation.definition.string.end.markdown' + '5': + 'name': 'invalid.illegal.whitespace.markdown' + '6': + 'name': 'punctuation.definition.metadata.markdown' + '7': + 'name': 'punctuation.definition.link.markdown' + '8': + 'name': 'markup.underline.link.image.markdown' + '9': + 'name': 'punctuation.definition.link.markdown' + '10': + 'name': 'string.other.link.description.title.markdown' + '11': + 'name': 'punctuation.definition.string.markdown' + '12': + 'name': 'punctuation.definition.string.markdown' + '13': + 'name': 'string.other.link.description.title.markdown' + '14': + 'name': 'punctuation.definition.string.markdown' + '15': + 'name': 'punctuation.definition.string.markdown' + '16': + 'name': 'punctuation.definition.metadata.markdown' + 'match': ''' + (?x) + \\! # Images start with ! + (\\[)((?<square>[^\\[\\]\\\\]|\\\\.|\\[\\g<square>*+\\])*+)(\\]) # Match the link text + ([ ])? # Space not allowed + (\\() # Opening paren for url + (<?)(\\S+?)(>?) # The url + [ \\t]* # Optional whitespace + (?: + ((\\().+?(\\))) # Match title in parens… + | ((").+?(")) # or in quotes. + )? # Title is optional + \\s* # Optional whitespace + (\\)) + ''' + 'name': 'meta.image.inline.markdown' + 'image-ref': + 'captures': + '1': + 'name': 'punctuation.definition.string.begin.markdown' + '2': + 'name': 'string.other.link.description.markdown' + '4': + 'name': 'punctuation.definition.string.begin.markdown' + '5': + 'name': 'punctuation.definition.constant.markdown' + '6': + 'name': 'constant.other.reference.link.markdown' + '7': + 'name': 'punctuation.definition.constant.markdown' + 'match': '\\!(\\[)((?<square>[^\\[\\]\\\\]|\\\\.|\\[\\g<square>*+\\])*+)(\\])[ ]?(\\[)(.*?)(\\])' + 'name': 'meta.image.reference.markdown' + 'inline': + 'patterns': [ + { + 'include': '#escape' + } + { + 'include': '#ampersand' + } + { + 'include': '#bracket' + } + { + 'include': '#raw' + } + { + 'include': '#bold' + } + { + 'include': '#italic' + } + { + 'include': '#line-break' + } + { + 'include': '#image-inline' + } + { + 'include': '#link-inline' + } + { + 'include': '#link-inet' + } + { + 'include': '#link-email' + } + { + 'include': '#image-ref' + } + { + 'include': '#link-ref-literal' + } + { + 'include': '#link-ref' + } + ] + 'italic': + 'begin': ''' + (?x) + (\\*|_)(?=\\S) # Open + (?= + ( + <[^>]*+> # HTML tags + | (?<raw>`+)([^`]|(?!(?<!`)\\k<raw>(?!`))`)*+\\k<raw> # Raw + | \\\\[\\\\`*_{}\\[\\]()#.!+\\->]?+ # Escapes + | \\[ + ( + (?<square> # Named group + [^\\[\\]\\\\] # Match most chars + | \\\\. # Escaped chars + | \\[ \\g<square>*+ \\] # Nested brackets + )*+ + \\] + ( + ( # Reference Link + [ ]? # Optional space + \\[[^\\]]*+\\] # Ref name + ) + | + ( # Inline Link + \\( # Opening paren + [ \\t]*+ # Optional whitespace + <?(.*?)>? # URL + [ \\t]*+ # Optional whitespace + ( # Optional Title + (?<title>[\'"]) + (.*?) + \\k<title> + )? + \\) + ) + ) + ) + | \\1\\1 # Must be bold closer + | (?!(?<=\\S)\\1). # Everything besides + )++ + (?<=\\S)\\1 # Close + ) + ''' + 'captures': + '1': + 'name': 'punctuation.definition.italic.markdown' + 'end': '(?<=\\S)(\\1)((?!\\1)|(?=\\1\\1))' + 'name': 'markup.italic.markdown' + 'patterns': [ + { + 'applyEndPatternLast': 1 + 'begin': '(?=<[^>]*?>)' + 'end': '(?<=>)' + 'patterns': [ + { + 'include': 'text.html.basic' + } + ] + } + { + 'include': '#escape' + } + { + 'include': '#ampersand' + } + { + 'include': '#bracket' + } + { + 'include': '#raw' + } + { + 'include': '#bold' + } + { + 'include': '#image-inline' + } + { + 'include': '#link-inline' + } + { + 'include': '#link-inet' + } + { + 'include': '#link-email' + } + { + 'include': '#image-ref' + } + { + 'include': '#link-ref-literal' + } + { + 'include': '#link-ref' + } + ] + 'line-break': + 'match': ' {2,}$' + 'name': 'meta.dummy.line-break' + 'link-email': + 'captures': + '1': + 'name': 'punctuation.definition.link.markdown' + '2': + 'name': 'markup.underline.link.markdown' + '4': + 'name': 'punctuation.definition.link.markdown' + 'match': '(<)((?:mailto:)?[-.\\w]+@[-a-z0-9]+(\\.[-a-z0-9]+)*\\.[a-z]+)(>)' + 'name': 'meta.link.email.lt-gt.markdown' + 'link-inet': + 'captures': + '1': + 'name': 'punctuation.definition.link.markdown' + '2': + 'name': 'markup.underline.link.markdown' + '3': + 'name': 'punctuation.definition.link.markdown' + 'match': '(<)((?:https?|ftp)://.*?)(>)' + 'name': 'meta.link.inet.markdown' + 'link-inline': + 'captures': + '1': + 'name': 'punctuation.definition.string.begin.markdown' + '2': + 'name': 'string.other.link.title.markdown' + '4': + 'name': 'punctuation.definition.string.end.markdown' + '5': + 'name': 'invalid.illegal.whitespace.markdown' + '6': + 'name': 'punctuation.definition.metadata.markdown' + '7': + 'name': 'punctuation.definition.link.markdown' + '8': + 'name': 'markup.underline.link.markdown' + '9': + 'name': 'punctuation.definition.link.markdown' + '10': + 'name': 'string.other.link.description.title.markdown' + '11': + 'name': 'punctuation.definition.string.begin.markdown' + '12': + 'name': 'punctuation.definition.string.end.markdown' + '13': + 'name': 'string.other.link.description.title.markdown' + '14': + 'name': 'punctuation.definition.string.begin.markdown' + '15': + 'name': 'punctuation.definition.string.end.markdown' + '16': + 'name': 'punctuation.definition.metadata.markdown' + 'match': ''' + (?x) + (\\[)((?<square>[^\\[\\]\\\\]|\\\\.|\\[\\g<square>*+\\])*+)(\\]) # Match the link text. + ([ ])? # Space not allowed + (\\() # Opening paren for url + (<?)(.*?)(>?) # The url + [ \\t]* # Optional whitespace + (?: + ((\\().+?(\\))) # Match title in parens… + | ((").+?(")) # or in quotes. + )? # Title is optional + \\s* # Optional whitespace + (\\)) + ''' + 'name': 'meta.link.inline.markdown' + 'link-ref': + 'captures': + '1': + 'name': 'punctuation.definition.string.begin.markdown' + '2': + 'name': 'string.other.link.title.markdown' + '4': + 'name': 'punctuation.definition.string.end.markdown' + '5': + 'name': 'punctuation.definition.constant.begin.markdown' + '6': + 'name': 'constant.other.reference.link.markdown' + '7': + 'name': 'punctuation.definition.constant.end.markdown' + 'match': '(\\[)((?<square>[^\\[\\]\\\\]|\\\\.|\\[\\g<square>*+\\])*+)(\\])[ ]?(\\[)([^\\]]*+)(\\])' + 'name': 'meta.link.reference.markdown' + 'link-ref-literal': + 'captures': + '1': + 'name': 'punctuation.definition.string.begin.markdown' + '2': + 'name': 'string.other.link.title.markdown' + '4': + 'name': 'punctuation.definition.string.end.markdown' + '5': + 'name': 'punctuation.definition.constant.begin.markdown' + '6': + 'name': 'punctuation.definition.constant.end.markdown' + 'match': '(\\[)((?<square>[^\\[\\]\\\\]|\\\\.|\\[\\g<square>*+\\])*+)(\\])[ ]?(\\[)(\\])' + 'name': 'meta.link.reference.literal.markdown' + 'list-paragraph': + 'patterns': [ + { + 'begin': '\\G\\s+(?=\\S)' + 'end': '^\\s*$' + 'name': 'meta.paragraph.list.markdown' + 'patterns': [ + { + 'include': '#inline' + } + { + 'captures': + '1': + 'name': 'punctuation.definition.list_item.markdown' + 'comment': 'Match the list punctuation' + 'match': '^\\s*([*+-]|[0-9]+\\.)' + } + ] + } + ] + 'raw': + 'captures': + '1': + 'name': 'punctuation.definition.raw.markdown' + '3': + 'name': 'punctuation.definition.raw.markdown' + 'match': '(`+)([^`]|(?!(?<!`)\\1(?!`))`)*+(\\1)' + 'name': 'markup.raw.inline.markdown' + 'separator': + 'match': '\\G[ ]{0,3}([-*_])([ ]{0,2}\\1){2,}[ \\t]*$\\n?' + 'name': 'meta.separator.markdown' diff --git a/packages/language-coffee-script/grammars/coffeescript.cson b/packages/language-coffee-script/grammars/coffeescript.cson new file mode 100644 index 000000000..aba77dce0 --- /dev/null +++ b/packages/language-coffee-script/grammars/coffeescript.cson @@ -0,0 +1,1235 @@ +'scopeName': 'source.coffee' +'name': 'CoffeeScript' +'fileTypes': [ + 'coffee' + 'Cakefile' + 'coffee.erb' + 'cson' + '_coffee' + 'cjsx' +] +'firstLineMatch': '''(?x) + # Hashbang + ^\\#!.*(?:\\s|\\/) + coffee + (?:$|\\s) + | + # Modeline + (?i: + # Emacs + -\\*-(?:\\s*(?=[^:;\\s]+\\s*-\\*-)|(?:.*?[;\\s]|(?<=-\\*-))mode\\s*:\\s*) + coffee + (?=[\\s;]|(?<![-*])-\\*-).*?-\\*- + | + # Vim + (?:(?:\\s|^)vi(?:m[<=>]?\\d+|m)?|\\sex)(?=:(?=\\s*set?\\s[^\\n:]+:)|:(?!\\s*set?\\s))(?:(?:\\s|\\s*:\\s*)\\w*(?:\\s*=(?:[^\\n\\\\\\s]|\\\\.)*)?)*[\\s:](?:filetype|ft|syntax)\\s*= + coffee + (?=\\s|:|$) + ) +''' +'patterns': [ + { + 'include': '#jsx' + } + { + 'match': '(new)\\s+(?:(?:(class)\\s+(\\w+(?:\\.\\w*)*)?)|(\\w+(?:\\.\\w*)*))' + 'name': 'meta.class.instance.constructor.coffee' + 'captures': + '1': + 'name': 'keyword.operator.new.coffee' + '2': + 'name': 'storage.type.class.coffee' + '3': + 'name': 'entity.name.type.instance.coffee' + '4': + 'name': 'entity.name.type.instance.coffee' + } + { + 'begin': '\'\'\'' + 'beginCaptures': + '0': + 'name': 'punctuation.definition.string.begin.coffee' + 'end': '\'\'\'' + 'endCaptures': + '0': + 'name': 'punctuation.definition.string.end.coffee' + 'name': 'string.quoted.single.heredoc.coffee' + 'patterns': [ + { + 'captures': + '1': + 'name': 'punctuation.definition.escape.backslash.coffee' + 'match': '(\\\\).' + 'name': 'constant.character.escape.backslash.coffee' + } + ] + } + { + 'begin': '"""' + 'beginCaptures': + '0': + 'name': 'punctuation.definition.string.begin.coffee' + 'end': '"""' + 'endCaptures': + '0': + 'name': 'punctuation.definition.string.end.coffee' + 'name': 'string.quoted.double.heredoc.coffee' + 'patterns': [ + { + 'captures': + '1': + 'name': 'punctuation.definition.escape.backslash.coffee' + 'match': '(\\\\).' + 'name': 'constant.character.escape.backslash.coffee' + } + { + 'include': '#interpolated_coffee' + } + ] + } + { + 'match': '(`)(.*)(`)' + 'name': 'string.quoted.script.coffee' + 'captures': + '1': + 'name': 'punctuation.definition.string.begin.coffee' + '2': + 'name': 'source.js.embedded.coffee' + 'patterns': [ + { + 'include': 'source.js' + } + ] + '3': + 'name': 'punctuation.definition.string.end.coffee' + } + { + 'begin': '(?<!#)###(?!#)' + 'beginCaptures': + '0': + 'name': 'punctuation.definition.comment.coffee' + 'end': '###' + 'endCaptures': + '0': + 'name': 'punctuation.definition.comment.coffee' + 'name': 'comment.block.coffee' + 'patterns': [ + { + 'match': '(?<=^|\\s)@\\w*(?=\\s)' + 'name': 'storage.type.annotation.coffee' + } + ] + } + { + 'begin': '#' + 'beginCaptures': + '0': + 'name': 'punctuation.definition.comment.coffee' + 'end': '$' + 'name': 'comment.line.number-sign.coffee' + } + { + 'begin': '///' + 'end': '(///)[gimuy]*' + 'name': 'string.regexp.multiline.coffee' + 'beginCaptures': + '0': + 'name': 'punctuation.definition.string.begin.coffee' + 'endCaptures': + '1': + 'name': 'punctuation.definition.string.end.coffee' + 'patterns': [ + { + 'include': '#heregexp' + } + ] + } + { + 'begin': '(?<![\\w$])(/)(?=(?![/*+?])(.+)(/)[gimuy]*(?!\\s*[\\w$/(]))' + 'beginCaptures': + '1': + 'name': 'punctuation.definition.string.begin.coffee' + 'end': '(/)[gimuy]*(?!\\s*[\\w$/(])' + 'endCaptures': + '1': + 'name': 'punctuation.definition.string.end.coffee' + 'name': 'string.regexp.coffee' + 'patterns': [ + { + 'include': 'source.js.regexp' + } + ] + } + { + 'match': '\\b(?<![\\.\\$])(break|by|catch|continue|else|finally|for|in|of|if|return|switch|then|throw|try|unless|when|while|until|loop|do|export|import|default|from|as|yield|async|await|(?<=for)\\s+own)(?!\\s*:)\\b' + 'name': 'keyword.control.coffee' + } + { + 'match': '\\b(?<![\\.\\$])(delete|instanceof|new|typeof)(?!\\s*:)\\b' + 'name': 'keyword.operator.$1.coffee' + } + { + 'match': '\\b(?<![\\.\\$])(case|function|var|void|with|const|let|enum|native|__hasProp|__extends|__slice|__bind|__indexOf|implements|interface|package|private|protected|public|static)(?!\\s*:)\\b' + 'name': 'keyword.reserved.coffee' + } + { + # a: -> ... + # a: (args) -> ... + 'begin': '''(?x) + (?<=\\s|^)((@)?[a-zA-Z_$][\\w$]*) + \\s*([:=])\\s* + (?=(\\([^\\(\\)]*\\)\\s*)?[=-]>) + ''' + 'beginCaptures': + '1': + 'name': 'entity.name.function.coffee' + '2': + 'name': 'variable.other.readwrite.instance.coffee' + '3': + 'name': 'keyword.operator.assignment.coffee' + 'end': '[=-]>' + 'endCaptures': + '0': + 'name': 'storage.type.function.coffee' + 'name': 'meta.function.coffee' + 'patterns': [ + { + 'include': '#function_params' + } + ] + } + { + # "a": -> ... + # "a": (args) -> ... + 'begin': '''(?x) + (?<=\\s|^)(?:((\')([^\']*?)(\'))|((")([^"]*?)("))) + \\s*([:=])\\s* + (?=(\\([^\\(\\)]*\\)\\s*)?[=-]>) + ''' + 'beginCaptures': + '1': + 'name': 'string.quoted.single.coffee' + '2': + 'name': 'punctuation.definition.string.begin.coffee' + '3': + 'name': 'entity.name.function.coffee' + '4': + 'name': 'punctuation.definition.string.end.coffee' + '5': + 'name': 'string.quoted.double.coffee' + '6': + 'name': 'punctuation.definition.string.begin.coffee' + '7': + 'name': 'entity.name.function.coffee' + '8': + 'name': 'punctuation.definition.string.end.coffee' + '9': + 'name': 'keyword.operator.assignment.coffee' + 'end': '[=-]>' + 'endCaptures': + '0': + 'name': 'storage.type.function.coffee' + 'name': 'meta.function.coffee' + 'patterns': [ + { + 'include': '#function_params' + } + ] + } + { + # (args) -> ... + # -> ... + 'begin': '(?=(\\([^\\(\\)]*\\)\\s*)?[=-]>)' + 'end': '[=-]>' + 'endCaptures': + '0': + 'name': 'storage.type.function.coffee' + 'name': 'meta.function.inline.coffee' + 'patterns': [ + { + 'include': '#function_params' + } + ] + } + { + 'begin': '(?<=\\s|^)({)(?=[^\'"#]+?}[\\s\\]}]*=)' + 'beginCaptures': + '1': + 'name': 'punctuation.definition.destructuring.begin.bracket.curly.coffee' + 'end': '}' + 'endCaptures': + '0': + 'name': 'punctuation.definition.destructuring.end.bracket.curly.coffee' + 'name': 'meta.variable.assignment.destructured.object.coffee' + 'patterns': [ + { + 'include': '$self' + } + { + 'match': '[a-zA-Z$_]\\w*' + 'name': 'variable.assignment.coffee' + } + ] + } + { + 'begin': '(?<=\\s|^)(\\[)(?=[^\'"#]+?\\][\\s\\]}]*=)' + 'beginCaptures': + '1': + 'name': 'punctuation.definition.destructuring.begin.bracket.square.coffee' + 'end': '\\]' + 'endCaptures': + '0': + 'name': 'punctuation.definition.destructuring.end.bracket.square.coffee' + 'name': 'meta.variable.assignment.destructured.array.coffee' + 'patterns': [ + { + 'include': '$self' + } + { + 'match': '[a-zA-Z$_]\\w*' + 'name': 'variable.assignment.coffee' + } + ] + } + { + 'match': '\\b(?<!\\.|::)(true|on|yes)(?!\\s*[:=][^=])\\b' + 'name': 'constant.language.boolean.true.coffee' + } + { + 'match': '\\b(?<!\\.|::)(false|off|no)(?!\\s*[:=][^=])\\b' + 'name': 'constant.language.boolean.false.coffee' + } + { + 'match': '\\b(?<!\\.|::)null(?!\\s*[:=][^=])\\b' + 'name': 'constant.language.null.coffee' + } + { + 'match': '\\b(?<!\\.|::)extends(?!\\s*[:=])\\b' + 'name': 'variable.language.coffee' + } + { + 'match': '(?<!\\.)\\b(?<!\\$)(super|this|arguments)(?!\\s*[:=][^=]|\\$)\\b' + 'name': 'variable.language.$1.coffee' + } + { + 'captures': + '1': + 'name': 'storage.type.class.coffee' + '2': + 'name': 'keyword.control.inheritance.coffee' + '3': + 'name': 'entity.other.inherited-class.coffee' + 'match': '(?<=\\s|^|\\[|\\()(class)\\s+(extends)\\s+(@?[a-zA-Z\\$\\._][\\w\\.]*)' + 'name': 'meta.class.coffee' + } + { + 'captures': + '1': + 'name': 'storage.type.class.coffee' + '2': + 'name': 'entity.name.type.class.coffee' + '3': + 'name': 'keyword.control.inheritance.coffee' + '4': + 'name': 'entity.other.inherited-class.coffee' + 'match': '(?<=\\s|^|\\[|\\()(class\\b)\\s+(@?[a-zA-Z\\$_][\\w\\.]*)?(?:\\s+(extends)\\s+(@?[a-zA-Z\\$\\._][\\w\\.]*))?' + 'name': 'meta.class.coffee' + } + { + 'match': '\\b(debugger|\\\\)\\b' + 'name': 'keyword.other.coffee' + } + { + 'match': '\\b(Array|ArrayBuffer|Blob|Boolean|Date|document|Function|Int(8|16|32|64)Array|Math|Map|Number|Object|Proxy|RegExp|Set|String|WeakMap|window|Uint(8|16|32|64)Array|XMLHttpRequest)\\b' + 'name': 'support.class.coffee' + } + { + 'match': '\\b(console)\\b' + 'name': 'entity.name.type.object.coffee' + } + { + 'match': '((?<=console\\.)(debug|warn|info|log|error|time|timeEnd|assert))\\b' + 'name': 'support.function.console.coffee' + } + { + 'match': '((?<=\\.)(apply|call|concat|every|filter|forEach|from|hasOwnProperty|indexOf|isPrototypeOf|join|lastIndexOf|map|of|pop|propertyIsEnumerable|push|reduce(Right)?|reverse|shift|slice|some|sort|splice|to(Locale)?String|unshift|valueOf))\\b' + 'name': 'support.function.method.array.coffee' + } + { + 'match': '((?<=Array\\.)(isArray))\\b' + 'name': 'support.function.static.array.coffee' + } + { + 'match': '((?<=Object\\.)(create|definePropert(ies|y)|freeze|getOwnProperty(Descriptors?|Names)|getProperty(Descriptor|Names)|getPrototypeOf|is(Extensible|Frozen|Sealed)?|isnt|keys|preventExtensions|seal))\\b' + 'name': 'support.function.static.object.coffee' + } + { + 'match': '((?<=Math\\.)(abs|acos|acosh|asin|asinh|atan|atan2|atanh|ceil|cos|cosh|exp|expm1|floor|hypot|log|log10|log1p|log2|max|min|pow|random|round|sign|sin|sinh|sqrt|tan|tanh|trunc))\\b' + 'name': 'support.function.static.math.coffee' + } + { + 'match': '((?<=Number\\.)(is(Finite|Integer|NaN)|toInteger))\\b' + 'name': 'support.function.static.number.coffee' + } + { + 'match': '(?<!\\.)\\b(module|exports|__filename|__dirname|global|process)(?!\\s*:)\\b' + 'name': 'support.variable.coffee' + } + { + 'match': '\\b(Infinity|NaN|undefined)\\b' + 'name': 'constant.language.coffee' + } + { + 'include': '#operators' + } + { + 'include': '#method_calls' + } + { + 'include': '#function_calls' + } + { + 'include': '#numbers' + } + { + 'include': '#objects' + } + { + 'include': '#properties' + } + { + 'match': '::' + 'name': 'keyword.operator.prototype.coffee' + } + { + 'match': '(?<!\\$)\\b[0-9]+[\\w$]*' + 'name': 'invalid.illegal.identifier.coffee' + } + { + 'match': ';' + 'name': 'punctuation.terminator.statement.coffee' + } + { + 'match': ',' + 'name': 'punctuation.separator.delimiter.coffee' + } + { + 'begin': '{' + 'beginCaptures': + '0': + 'name': 'meta.brace.curly.coffee' + 'end': '}' + 'endCaptures': + '0': + 'name': 'meta.brace.curly.coffee' + 'patterns': [ + { + 'include': '$self' + } + ] + } + { + 'begin': '\\[' + 'beginCaptures': + '0': + 'name': 'punctuation.definition.array.begin.bracket.square.coffee' + 'end': '\\]' + 'endCaptures': + '0': + 'name': 'punctuation.definition.array.end.bracket.square.coffee' + 'patterns': [ + { + 'match': '(?<!\\.)\\.{3}' # ... + 'name': 'keyword.operator.slice.exclusive.coffee' + } + { + 'match': '(?<!\\.)\\.{2}' # .. + 'name': 'keyword.operator.slice.inclusive.coffee' + } + { + 'include': '$self' + } + ] + } + { + 'begin': '\\(' + 'beginCaptures': + '0': + 'name': 'meta.brace.round.coffee' + 'end': '\\)' + 'endCaptures': + '0': + 'name': 'meta.brace.round.coffee' + 'patterns': [ + { + 'include': '$self' + } + ] + } + { + 'include': '#instance_variable' + } + { + 'include': '#single_quoted_string' + } + { + 'include': '#double_quoted_string' + } +] +'repository': + 'arguments': + 'patterns': [ + { + 'begin': '\\(' + 'beginCaptures': + '0': + 'name': 'punctuation.definition.arguments.begin.bracket.round.coffee' + 'end': '\\)' + 'endCaptures': + '0': + 'name': 'punctuation.definition.arguments.end.bracket.round.coffee' + 'name': 'meta.arguments.coffee' + 'patterns': [ + { + 'include': '$self' + } + ] + } + { + 'begin': '(?=(@|@?[\\w$]+|[=-]>|\\-\\d|\\[|{|\"|\'))' + 'end': '(?=\\s*(?<![\\w$])(of|in|then|is|isnt|and|or|for|else|when|if|unless|by|instanceof)(?![\\w$]))|(?=\\s*(}|\\]|\\)|#|$))' + 'name': 'meta.arguments.coffee' + 'patterns': [ + { + 'include': '$self' + } + ] + } + ] + 'double_quoted_string': + 'patterns': [ + { + 'begin': '"' + 'beginCaptures': + '0': + 'name': 'punctuation.definition.string.begin.coffee' + 'end': '"' + 'endCaptures': + '0': + 'name': 'punctuation.definition.string.end.coffee' + 'name': 'string.quoted.double.coffee' + 'patterns': [ + { + 'captures': + '1': + 'name': 'punctuation.definition.escape.backslash.coffee' + 'match': '(\\\\)(x[0-9A-Fa-f]{2}|[0-2][0-7]{0,2}|3[0-6][0-7]|37[0-7]?|[4-7][0-7]?|.)' + 'name': 'constant.character.escape.backslash.coffee' + } + { + 'include': '#interpolated_coffee' + } + ] + } + ] + 'function_calls': + 'patterns': [ + { + # functionCall(arg1, "arg2", [...]) + 'begin': '(@)?([\\w$]+)(?=\\()' + 'beginCaptures': + '1': + 'name': 'variable.other.readwrite.instance.coffee' + '2': + 'patterns': [ + { + 'include': '#function_names' + } + ] + 'end': '(?<=\\))' + 'name': 'meta.function-call.coffee' + 'patterns': [ + { + 'include': '#arguments' + } + ] + } + { + # functionCall arg1, "arg2", [...] + 'begin': '''(?x) + (@)?([\\w$]+) + \\s* + (?=\\s+(?!(?<![\\w$])(of|in|then|is|isnt|and|or|for|else|when|if|unless|by|instanceof)(?![\\w$]))(?=(@?[\\w$]+|[=-]>|\\-\\d|\\[|{|\"|\'))) + ''' + 'beginCaptures': + '1': + 'name': 'variable.other.readwrite.instance.coffee' + '2': + 'patterns': [ + { + 'include': '#function_names' + } + ] + 'end': '(?=\\s*(?<![\\w$])(of|in|then|is|isnt|and|or|for|else|when|if|unless|by|instanceof)(?![\\w$]))|(?=\\s*(}|\\]|\\)|#|$))' + 'name': 'meta.function-call.coffee' + 'patterns': [ + { + 'include': '#arguments' + } + ] + } + ] + 'function_names': + 'patterns': [ + { + 'match': '''(?x) + \\b(isNaN|isFinite|eval|uneval|parseInt|parseFloat|decodeURI| + decodeURIComponent|encodeURI|encodeURIComponent|escape|unescape| + require|set(Interval|Timeout)|clear(Interval|Timeout))\\b + ''' + 'name': 'support.function.coffee' + } + { + 'match': "[a-zA-Z_$][\\w$]*" + 'name': 'entity.name.function.coffee' + } + { + 'match': '\\d[\\w$]*' + 'name': 'invalid.illegal.identifier.coffee' + } + ] + 'function_params': + 'patterns': [ + { + 'begin': '\\(' + 'beginCaptures': + '0': + 'name': 'punctuation.definition.parameters.begin.bracket.round.coffee' + 'end': '\\)' + 'endCaptures': + '0': + 'name': 'punctuation.definition.parameters.end.bracket.round.coffee' + 'name': 'meta.parameters.coffee' + 'patterns': [ + { + 'match': '([a-zA-Z_$][\\w$]*)(\\.\\.\\.)?' + 'captures': + '1': + 'name': 'variable.parameter.function.coffee' + '2': + 'name': 'keyword.operator.splat.coffee' + } + { + 'match': '(@(?:[a-zA-Z_$][\\w$]*)?)(\\.\\.\\.)?' + 'captures': + '1': + 'name': 'variable.parameter.function.readwrite.instance.coffee' + '2': + 'name': 'keyword.operator.splat.coffee' + } + { + 'include': '$self' + } + ] + } + ] + 'embedded_comment': + 'patterns': [ + { + 'captures': + '1': + 'name': 'punctuation.definition.comment.coffee' + 'match': '(?<!\\\\)(#).*$\\n?' + 'name': 'comment.line.number-sign.coffee' + } + ] + 'instance_variable': + 'patterns': [ + { + 'match': '(@)([a-zA-Z_\\$]\\w*)?' + 'name': 'variable.other.readwrite.instance.coffee' + } + ] + 'interpolated_coffee': + 'patterns': [ + { + 'begin': '\\#\\{' + 'captures': + '0': + 'name': 'punctuation.section.embedded.coffee' + 'end': '\\}' + 'name': 'source.coffee.embedded.source' + 'patterns': [ + { + 'include': '$self' + } + ] + } + ] + 'method_calls': + 'patterns': [ + { + # .methodCall(arg1, "arg2", [...]) + 'begin': '(?:(\\.)|(::))\\s*([\\w$]+)\\s*(?=\\()' + 'beginCaptures': + '1': + 'name': 'punctuation.separator.method.period.coffee' + '2': + 'name': 'keyword.operator.prototype.coffee' + '3': + 'patterns': [ + { + 'include': '#method_names' + } + ] + 'end': '(?<=\\))' + 'name': 'meta.method-call.coffee' + 'patterns': [ + { + 'include': '#arguments' + } + ] + } + { + # .methodCall arg1, "arg2", [...] + 'begin': '(?:(\\.)|(::))\\s*([\\w$]+)\\s*(?=\\s+(?!(?<![\\w$])(of|in|then|is|isnt|and|or|for|else|when|if|unless|by|instanceof)(?![\\w$]))(?=(@|@?[\\w$]+|[=-]>|\\-\\d|\\[|{|\"|\')))' + 'beginCaptures': + '1': + 'name': 'punctuation.separator.method.period.coffee' + '2': + 'name': 'keyword.operator.prototype.coffee' + '3': + 'patterns': [ + { + 'include': '#method_names' + } + ] + 'end': '(?=\\s*(?<![\\w$])(of|in|then|is|isnt|and|or|for|else|when|if|unless|by|instanceof)(?![\\w$]))|(?=\\s*(}|\\]|\\)|#|$))' + 'name': 'meta.method-call.coffee' + 'patterns': [ + { + 'include': '#arguments' + } + ] + } + ] + 'method_names': + 'patterns': [ + { + 'match': '''(?x) + \\bon(Rowsinserted|Rowsdelete|Rowenter|Rowexit|Resize|Resizestart|Resizeend|Reset| + Readystatechange|Mouseout|Mouseover|Mousedown|Mouseup|Mousemove| + Before(cut|deactivate|unload|update|paste|print|editfocus|activate)| + Blur|Scrolltop|Submit|Select|Selectstart|Selectionchange|Hover|Help| + Change|Contextmenu|Controlselect|Cut|Cellchange|Clock|Close|Deactivate| + Datasetchanged|Datasetcomplete|Dataavailable|Drop|Drag|Dragstart|Dragover| + Dragdrop|Dragenter|Dragend|Dragleave|Dblclick|Unload|Paste|Propertychange|Error| + Errorupdate|Keydown|Keyup|Keypress|Focus|Load|Activate|Afterupdate|Afterprint|Abort)\\b + ''' + 'name': 'support.function.event-handler.coffee' + } + { + 'match': '''(?x) + \\b(shift|showModelessDialog|showModalDialog|showHelp|scroll|scrollX|scrollByPages| + scrollByLines|scrollY|scrollTo|stop|strike|sizeToContent|sidebar|signText|sort| + sup|sub|substr|substring|splice|split|send|set(Milliseconds|Seconds|Minutes|Hours| + Month|Year|FullYear|Date|UTC(Milliseconds|Seconds|Minutes|Hours|Month|FullYear|Date)| + Time|Hotkeys|Cursor|ZOptions|Active|Resizable|RequestHeader)|search|slice| + savePreferences|small|home|handleEvent|navigate|char|charCodeAt|charAt|concat| + contextual|confirm|compile|clear|captureEvents|call|createStyleSheet|createPopup| + createEventObject|to(GMTString|UTCString|String|Source|UpperCase|LowerCase|LocaleString)| + test|taint|taintEnabled|indexOf|italics|disableExternalCapture|dump|detachEvent|unshift| + untaint|unwatch|updateCommands|join|javaEnabled|pop|push|plugins.refresh|paddings|parse| + print|prompt|preference|enableExternalCapture|exec|execScript|valueOf|UTC|find|file| + fileModifiedDate|fileSize|fileCreatedDate|fileUpdatedDate|fixed|fontsize|fontcolor| + forward|fromCharCode|watch|link|load|lastIndexOf|anchor|attachEvent|atob|apply|alert| + abort|routeEvents|resize|resizeBy|resizeTo|recalc|returnValue|replace|reverse|reload| + releaseCapture|releaseEvents|go|get(Milliseconds|Seconds|Minutes|Hours|Month|Day|Year|FullYear| + Time|Date|TimezoneOffset|UTC(Milliseconds|Seconds|Minutes|Hours|Day|Month|FullYear|Date)| + Attention|Selection|ResponseHeader|AllResponseHeaders)|moveBy|moveBelow|moveTo| + moveToAbsolute|moveAbove|mergeAttributes|match|margins|btoa|big|bold|borderWidths|blink|back)\\b + ''' + 'name': 'support.function.coffee' + } + { + 'match': '''(?x) + \\b(acceptNode|add|addEventListener|addTextTrack|adoptNode|after|animate|append| + appendChild|appendData|before|blur|canPlayType|captureStream| + caretPositionFromPoint|caretRangeFromPoint|checkValidity|clear|click| + cloneContents|cloneNode|cloneRange|close|closest|collapse| + compareBoundaryPoints|compareDocumentPosition|comparePoint|contains| + convertPointFromNode|convertQuadFromNode|convertRectFromNode|createAttribute| + createAttributeNS|createCaption|createCDATASection|createComment| + createContextualFragment|createDocument|createDocumentFragment| + createDocumentType|createElement|createElementNS|createEntityReference| + createEvent|createExpression|createHTMLDocument|createNodeIterator| + createNSResolver|createProcessingInstruction|createRange|createShadowRoot| + createTBody|createTextNode|createTFoot|createTHead|createTreeWalker|delete| + deleteCaption|deleteCell|deleteContents|deleteData|deleteRow|deleteTFoot| + deleteTHead|detach|disconnect|dispatchEvent|elementFromPoint|elementsFromPoint| + enableStyleSheetsForSet|entries|evaluate|execCommand|exitFullscreen| + exitPointerLock|expand|extractContents|fastSeek|firstChild|focus|forEach|get| + getAll|getAnimations|getAttribute|getAttributeNames|getAttributeNode| + getAttributeNodeNS|getAttributeNS|getBoundingClientRect|getBoxQuads| + getClientRects|getContext|getDestinationInsertionPoints|getElementById| + getElementsByClassName|getElementsByName|getElementsByTagName| + getElementsByTagNameNS|getItem|getNamedItem|getSelection|getStartDate| + getVideoPlaybackQuality|has|hasAttribute|hasAttributeNS|hasAttributes| + hasChildNodes|hasFeature|hasFocus|importNode|initEvent|insertAdjacentElement| + insertAdjacentHTML|insertAdjacentText|insertBefore|insertCell|insertData| + insertNode|insertRow|intersectsNode|isDefaultNamespace|isEqualNode| + isPointInRange|isSameNode|item|key|keys|lastChild|load|lookupNamespaceURI| + lookupPrefix|matches|move|moveAttribute|moveAttributeNode|moveChild| + moveNamedItem|namedItem|nextNode|nextSibling|normalize|observe|open| + parentNode|pause|play|postMessage|prepend|preventDefault|previousNode| + previousSibling|probablySupportsContext|queryCommandEnabled| + queryCommandIndeterm|queryCommandState|queryCommandSupported|queryCommandValue| + querySelector|querySelectorAll|registerContentHandler|registerElement| + registerProtocolHandler|releaseCapture|releaseEvents|remove|removeAttribute| + removeAttributeNode|removeAttributeNS|removeChild|removeEventListener| + removeItem|replace|replaceChild|replaceData|replaceWith|reportValidity| + requestFullscreen|requestPointerLock|reset|scroll|scrollBy|scrollIntoView| + scrollTo|seekToNextFrame|select|selectNode|selectNodeContents|set|setAttribute| + setAttributeNode|setAttributeNodeNS|setAttributeNS|setCapture| + setCustomValidity|setEnd|setEndAfter|setEndBefore|setItem|setNamedItem| + setRangeText|setSelectionRange|setSinkId|setStart|setStartAfter|setStartBefore| + slice|splitText|stepDown|stepUp|stopImmediatePropagation|stopPropagation| + submit|substringData|supports|surroundContents|takeRecords|terminate|toBlob| + toDataURL|toggle|toString|values|write|writeln)\\b + ''' + 'name': 'support.function.dom.coffee' + } + { + 'match': "[a-zA-Z_$][\\w$]*" + 'name': 'entity.name.function.coffee' + } + { + 'match': '\\d[\\w$]*' + 'name': 'invalid.illegal.identifier.coffee' + } + ] + 'numbers': + 'patterns': [ + { + 'match': '\\b(?<!\\$)0(x|X)[0-9a-fA-F]+\\b(?!\\$)' + 'name': 'constant.numeric.hex.coffee' + } + { + 'match': '\\b(?<!\\$)0(b|B)[01]+\\b(?!\\$)' + 'name': 'constant.numeric.binary.coffee' + } + { + 'match': '\\b(?<!\\$)0(o|O)?[0-7]+\\b(?!\\$)' + 'name': 'constant.numeric.octal.coffee' + } + { + 'match': '''(?x) + (?<!\\$)(?: + (?:\\b[0-9]+(\\.)[0-9]+[eE][+-]?[0-9]+\\b)| # 1.1E+3 + (?:\\b[0-9]+(\\.)[eE][+-]?[0-9]+\\b)| # 1.E+3 + (?:\\B(\\.)[0-9]+[eE][+-]?[0-9]+\\b)| # .1E+3 + (?:\\b[0-9]+[eE][+-]?[0-9]+\\b)| # 1E+3 + (?:\\b[0-9]+(\\.)[0-9]+\\b)| # 1.1 + (?:\\b[0-9]+(?=\\.{2,3}))| # 1 followed by a slice + (?:\\b[0-9]+(\\.)\\B)| # 1. + (?:\\B(\\.)[0-9]+\\b)| # .1 + (?:\\b[0-9]+\\b(?!\\.)) # 1 + )(?!\\$) + ''' + 'captures': + '0': + 'name': 'constant.numeric.decimal.coffee' + '1': + 'name': 'punctuation.separator.decimal.period.coffee' + '2': + 'name': 'punctuation.separator.decimal.period.coffee' + '3': + 'name': 'punctuation.separator.decimal.period.coffee' + '4': + 'name': 'punctuation.separator.decimal.period.coffee' + '5': + 'name': 'punctuation.separator.decimal.period.coffee' + '6': + 'name': 'punctuation.separator.decimal.period.coffee' + } + ] + 'objects': + 'patterns': [ + { + # OBJ in OBJ.prop, OBJ.methodCall() + 'match': '[A-Z][A-Z0-9_$]*(?=\\s*\\??(\\.\\s*[a-zA-Z_$]\\w*|::))' + 'name': 'constant.other.object.coffee' + } + { + # obj in obj.prop, obj.methodCall() + 'match': '[a-zA-Z_$][\\w$]*(?=\\s*\\??(\\.\\s*[a-zA-Z_$]\\w*|::))' + 'name': 'variable.other.object.coffee' + } + ] + 'operators': + 'patterns': [ + { + 'match': '(?:([a-zA-Z$_][\\w$]*)?\\s+|(?<![\\w$]))(and=|or=)' + 'captures': + '1': + 'name': 'variable.assignment.coffee' + '2': + 'name': 'keyword.operator.assignment.compound.coffee' + } + { + 'match': '([a-zA-Z$_][\\w$]*)?\\s*(%=|\\+=|-=|\\*=|&&=|\\|\\|=|\\?=|(?<!\\()/=)' + 'captures': + '1': + 'name': 'variable.assignment.coffee' + '2': + 'name': 'keyword.operator.assignment.compound.coffee' + } + { + 'match': '([a-zA-Z$_][\\w$]*)?\\s*(&=|\\^=|<<=|>>=|>>>=|\\|=)' + 'captures': + '1': + 'name': 'variable.assignment.coffee' + '2': + 'name': 'keyword.operator.assignment.compound.bitwise.coffee' + } + { + 'match': '<<|>>>|>>' + 'name': 'keyword.operator.bitwise.shift.coffee' + } + { + 'match': '!=|<=|>=|==|<|>' + 'name': 'keyword.operator.comparison.coffee' + } + { + 'match': '&&|!|\\|\\|' + 'name': 'keyword.operator.logical.coffee' + } + { + 'match': '&|\\||\\^|~' + 'name': 'keyword.operator.bitwise.coffee' + } + { + 'match': '([a-zA-Z$_][\\w$]*)?\\s*(=|:(?!:))(?![>=])' + 'captures': + '1': + 'name': 'variable.assignment.coffee' + '2': + 'name': 'keyword.operator.assignment.coffee' + } + { + 'match': '--' + 'name': 'keyword.operator.decrement.coffee' + } + { + 'match': '\\+\\+' + 'name': 'keyword.operator.increment.coffee' + } + { + 'match': '\\.\\.\\.' + 'name': 'keyword.operator.splat.coffee' + } + { + 'match': '\\?' + 'name': 'keyword.operator.existential.coffee' + } + { + 'match': '%|\\*|/|-|\\+' + 'name': 'keyword.operator.coffee' + } + { + 'match': '''(?x) + \\b(?<![\\.\\$]) + (?: + (and|or|not) # logical + | + (is|isnt) # comparison + ) + (?!\\s*:)\\b + ''' + 'captures': + '1': + 'name': 'keyword.operator.logical.coffee' + '2': + 'name': 'keyword.operator.comparison.coffee' + } + ] + 'properties': + 'patterns': [ + { + # PROP1 in obj.PROP1.prop2, func().PROP1.prop2 + 'match': '(?:(\\.)|(::))\\s*([A-Z][A-Z0-9_$]*\\b\\$*)(?=\\s*\\??(\\.\\s*[a-zA-Z_$]\\w*|::))' + 'captures': + '1': + 'name': 'punctuation.separator.property.period.coffee' + '2': + 'name': 'keyword.operator.prototype.coffee' + '3': + 'name': 'constant.other.object.property.coffee' + } + { + # prop1 in obj.prop1.prop2, func().prop1.prop2 + 'match': '(?:(\\.)|(::))\\s*(\\$*[a-zA-Z_$][\\w$]*)(?=\\s*\\??(\\.\\s*[a-zA-Z_$]\\w*|::))' + 'captures': + '1': + 'name': 'punctuation.separator.property.period.coffee' + '2': + 'name': 'keyword.operator.prototype.coffee' + '3': + 'name': 'variable.other.object.property.coffee' + } + { + # PROP in obj.PROP, func().PROP + 'match': '(?:(\\.)|(::))\\s*([A-Z][A-Z0-9_$]*\\b\\$*)' + 'captures': + '1': + 'name': 'punctuation.separator.property.period.coffee' + '2': + 'name': 'keyword.operator.prototype.coffee' + '3': + 'name': 'constant.other.property.coffee' + } + { + # prop in obj.prop, func().prop + 'match': '(?:(\\.)|(::))\\s*(\\$*[a-zA-Z_$][\\w$]*)' + 'captures': + '1': + 'name': 'punctuation.separator.property.period.coffee' + '2': + 'name': 'keyword.operator.prototype.coffee' + '3': + 'name': 'variable.other.property.coffee' + } + { + # 123illegal in obj.123illegal, func().123illegal + 'match': '(?:(\\.)|(::))\\s*([0-9][\\w$]*)' + 'captures': + '1': + 'name': 'punctuation.separator.property.period.coffee' + '2': + 'name': 'keyword.operator.prototype.coffee' + '3': + 'name': 'invalid.illegal.identifier.coffee' + } + ] + 'single_quoted_string': + 'patterns': [ + { + 'begin': '\'' + 'beginCaptures': + '0': + 'name': 'punctuation.definition.string.begin.coffee' + 'end': '\'' + 'endCaptures': + '0': + 'name': 'punctuation.definition.string.end.coffee' + 'name': 'string.quoted.single.coffee' + 'patterns': [ + { + 'captures': + '1': + 'name': 'punctuation.definition.escape.backslash.coffee' + 'match': '(\\\\)(x[0-9A-Fa-f]{2}|[0-2][0-7]{0,2}|3[0-6][0-7]?|37[0-7]?|[4-7][0-7]?|.)' + 'name': 'constant.character.escape.backslash.coffee' + } + ] + } + ] + 'regex-character-class': + 'patterns': [ + { + 'match': '\\\\[wWsSdD]|\\.' + 'name': 'constant.character.character-class.regexp' + } + { + 'match': '\\\\([0-7]{3}|x[0-9A-Fa-f]{2}|u[0-9A-Fa-f]{4})' + 'name': 'constant.character.numeric.regexp' + } + { + 'match': '\\\\c[A-Z]' + 'name': 'constant.character.control.regexp' + } + { + 'match': '\\\\.' + 'name': 'constant.character.escape.backslash.regexp' + } + ] + 'heregexp': + 'patterns': [ + { + 'match': '\\\\[bB]|\\^|\\$' + 'name': 'keyword.control.anchor.regexp' + } + { + 'match': '\\\\[1-9]\\d*' + 'name': 'keyword.other.back-reference.regexp' + } + { + 'match': '[?+*]|\\{(\\d+,\\d+|\\d+,|,\\d+|\\d+)\\}\\??' + 'name': 'keyword.operator.quantifier.regexp' + } + { + 'match': '\\|' + 'name': 'keyword.operator.or.regexp' + } + { + 'begin': '(\\()((\\?=)|(\\?!))' + 'beginCaptures': + '1': + 'name': 'punctuation.definition.group.regexp' + '3': + 'name': 'meta.assertion.look-ahead.regexp' + '4': + 'name': 'meta.assertion.negative-look-ahead.regexp' + 'end': '(\\))' + 'endCaptures': + '1': + 'name': 'punctuation.definition.group.regexp' + 'name': 'meta.group.assertion.regexp' + 'patterns': [ + { + 'include': '#heregexp' + } + ] + } + { + 'begin': '\\((\\?:)?' + 'beginCaptures': + '0': + 'name': 'punctuation.definition.group.regexp' + 'end': '\\)' + 'endCaptures': + '0': + 'name': 'punctuation.definition.group.regexp' + 'name': 'meta.group.regexp' + 'patterns': [ + { + 'include': '#heregexp' + } + ] + } + { + 'begin': '(\\[)(\\^)?' + 'beginCaptures': + '1': + 'name': 'punctuation.definition.character-class.regexp' + '2': + 'name': 'keyword.operator.negation.regexp' + 'end': '(\\])' + 'endCaptures': + '1': + 'name': 'punctuation.definition.character-class.regexp' + 'name': 'constant.other.character-class.set.regexp' + 'patterns': [ + { + 'captures': + '1': + 'name': 'constant.character.numeric.regexp' + '2': + 'name': 'constant.character.control.regexp' + '3': + 'name': 'constant.character.escape.backslash.regexp' + '4': + 'name': 'constant.character.numeric.regexp' + '5': + 'name': 'constant.character.control.regexp' + '6': + 'name': 'constant.character.escape.backslash.regexp' + 'match': '(?:.|(\\\\(?:[0-7]{3}|x[0-9A-Fa-f]{2}|u[0-9A-Fa-f]{4}))|(\\\\c[A-Z])|(\\\\.))\\-(?:[^\\]\\\\]|(\\\\(?:[0-7]{3}|x[0-9A-Fa-f]{2}|u[0-9A-Fa-f]{4}))|(\\\\c[A-Z])|(\\\\.))' + 'name': 'constant.other.character-class.range.regexp' + } + { + 'include': '#regex-character-class' + } + ] + } + { + 'include': '#regex-character-class' + } + { + 'include': '#interpolated_coffee' + } + { + 'include': '#embedded_comment' + } + ] + + 'jsx': + 'patterns': [ + { + 'include': '#jsx-tag' + } + { + 'include': '#jsx-end-tag' + } + ] + + 'jsx-expression': + 'begin': '{' + 'beginCaptures': + '0': + 'name': 'meta.brace.curly.coffee' + 'end': '}' + 'endCaptures': + '0': + 'name': 'meta.brace.curly.coffee' + + 'patterns': [ + { + 'include': '#double_quoted_string' + } + { + 'include': '$self' + } + ] + + 'jsx-attribute': + 'patterns': [ + { + 'captures': + '1': + 'name': 'entity.other.attribute-name.coffee' + '2': + 'name': 'keyword.operator.assignment.coffee' + 'match': '(?:^|\\s+)([-\\w.]+)\\s*(=)' + } + { + 'include': '#double_quoted_string' + } + { + 'include': '#single_quoted_string' + } + { + 'include': '#jsx-expression' + } + ] + + 'jsx-tag': + 'patterns': [ + { + 'begin': '(<)([-\\w\\.]+)' + 'beginCaptures': + '1': + 'name': 'punctuation.definition.tag.coffee' + '2': + 'name': 'entity.name.tag.coffee' + 'end': '(/?>)' + 'name': 'meta.tag.coffee' + 'patterns': [ + 'include': '#jsx-attribute' + ] + } + ] + + 'jsx-end-tag': + 'patterns': [ + { + 'begin': '(</)([-\\w\\.]+)' + 'beginCaptures': + '1': + 'name': 'punctuation.definition.tag.coffee' + '2': + 'name': 'entity.name.tag.coffee' + 'end': '(/?>)' + 'name': 'meta.tag.coffee' + } + ] diff --git a/packages/language-coffee-script/package-lock.json b/packages/language-coffee-script/package-lock.json new file mode 100644 index 000000000..d86793177 --- /dev/null +++ b/packages/language-coffee-script/package-lock.json @@ -0,0 +1,156 @@ +{ + "name": "language-coffee-script", + "version": "0.50.0", + "lockfileVersion": 1, + "requires": true, + "dependencies": { + "balanced-match": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.0.tgz", + "integrity": "sha1-ibTRmasr7kneFk6gK4nORi1xt2c=", + "dev": true + }, + "brace-expansion": { + "version": "1.1.11", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz", + "integrity": "sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==", + "dev": true, + "requires": { + "balanced-match": "^1.0.0", + "concat-map": "0.0.1" + } + }, + "coffee-script": { + "version": "1.11.1", + "resolved": "https://registry.npmjs.org/coffee-script/-/coffee-script-1.11.1.tgz", + "integrity": "sha1-vxxHrWREOg2V0S3ysUfMCk2q1uk=", + "dev": true + }, + "coffeelint": { + "version": "1.16.2", + "resolved": "https://registry.npmjs.org/coffeelint/-/coffeelint-1.16.2.tgz", + "integrity": "sha512-6mzgOo4zb17WfdrSui/cSUEgQ0AQkW3gXDht+6lHkfkqGUtSYKwGdGcXsDfAyuScVzTlTtKdfwkAlJWfqul7zg==", + "dev": true, + "requires": { + "coffee-script": "~1.11.0", + "glob": "^7.0.6", + "ignore": "^3.0.9", + "optimist": "^0.6.1", + "resolve": "^0.6.3", + "strip-json-comments": "^1.0.2" + } + }, + "concat-map": { + "version": "0.0.1", + "resolved": "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz", + "integrity": "sha1-2Klr13/Wjfd5OnMDajug1UBdR3s=", + "dev": true + }, + "fs.realpath": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/fs.realpath/-/fs.realpath-1.0.0.tgz", + "integrity": "sha1-FQStJSMVjKpA20onh8sBQRmU6k8=", + "dev": true + }, + "glob": { + "version": "7.1.3", + "resolved": "https://registry.npmjs.org/glob/-/glob-7.1.3.tgz", + "integrity": "sha512-vcfuiIxogLV4DlGBHIUOwI0IbrJ8HWPc4MU7HzviGeNho/UJDfi6B5p3sHeWIQ0KGIU0Jpxi5ZHxemQfLkkAwQ==", + "dev": true, + "requires": { + "fs.realpath": "^1.0.0", + "inflight": "^1.0.4", + "inherits": "2", + "minimatch": "^3.0.4", + "once": "^1.3.0", + "path-is-absolute": "^1.0.0" + } + }, + "ignore": { + "version": "3.3.10", + "resolved": "https://registry.npmjs.org/ignore/-/ignore-3.3.10.tgz", + "integrity": "sha512-Pgs951kaMm5GXP7MOvxERINe3gsaVjUWFm+UZPSq9xYriQAksyhg0csnS0KXSNRD5NmNdapXEpjxG49+AKh/ug==", + "dev": true + }, + "inflight": { + "version": "1.0.6", + "resolved": "https://registry.npmjs.org/inflight/-/inflight-1.0.6.tgz", + "integrity": "sha1-Sb1jMdfQLQwJvJEKEHW6gWW1bfk=", + "dev": true, + "requires": { + "once": "^1.3.0", + "wrappy": "1" + } + }, + "inherits": { + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.3.tgz", + "integrity": "sha1-Yzwsg+PaQqUC9SRmAiSA9CCCYd4=", + "dev": true + }, + "minimatch": { + "version": "3.0.4", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.0.4.tgz", + "integrity": "sha512-yJHVQEhyqPLUTgt9B83PXu6W3rx4MvvHvSUvToogpwoGDOUQ+yDrR0HRot+yOCdCO7u4hX3pWft6kWBBcqh0UA==", + "dev": true, + "requires": { + "brace-expansion": "^1.1.7" + } + }, + "minimist": { + "version": "0.0.10", + "resolved": "https://registry.npmjs.org/minimist/-/minimist-0.0.10.tgz", + "integrity": "sha1-3j+YVD2/lggr5IrRoMfNqDYwHc8=", + "dev": true + }, + "once": { + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/once/-/once-1.4.0.tgz", + "integrity": "sha1-WDsap3WWHUsROsF9nFC6753Xa9E=", + "dev": true, + "requires": { + "wrappy": "1" + } + }, + "optimist": { + "version": "0.6.1", + "resolved": "https://registry.npmjs.org/optimist/-/optimist-0.6.1.tgz", + "integrity": "sha1-2j6nRob6IaGaERwybpDrFaAZZoY=", + "dev": true, + "requires": { + "minimist": "~0.0.1", + "wordwrap": "~0.0.2" + } + }, + "path-is-absolute": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/path-is-absolute/-/path-is-absolute-1.0.1.tgz", + "integrity": "sha1-F0uSaHNVNP+8es5r9TpanhtcX18=", + "dev": true + }, + "resolve": { + "version": "0.6.3", + "resolved": "https://registry.npmjs.org/resolve/-/resolve-0.6.3.tgz", + "integrity": "sha1-3ZV5gufnNt699TtYpN2RdUV13UY=", + "dev": true + }, + "strip-json-comments": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/strip-json-comments/-/strip-json-comments-1.0.4.tgz", + "integrity": "sha1-HhX7ysl9Pumb8tc7TGVrCCu6+5E=", + "dev": true + }, + "wordwrap": { + "version": "0.0.3", + "resolved": "https://registry.npmjs.org/wordwrap/-/wordwrap-0.0.3.tgz", + "integrity": "sha1-o9XabNXAvAAI03I0u68b7WMFkQc=", + "dev": true + }, + "wrappy": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz", + "integrity": "sha1-tSQ9jz7BqjXxNkYFvA0QNuMKtp8=", + "dev": true + } + } +} diff --git a/packages/language-coffee-script/package.json b/packages/language-coffee-script/package.json new file mode 100644 index 000000000..f6a5e4b20 --- /dev/null +++ b/packages/language-coffee-script/package.json @@ -0,0 +1,21 @@ +{ + "version": "0.50.0", + "name": "language-coffee-script", + "description": "CoffeeScript language support in Atom", + "license": "MIT", + "engines": { + "atom": "*", + "node": "*" + }, + "homepage": "http://atom.github.io/language-coffee-script", + "repository": { + "type": "git", + "url": "https://github.com/atom/language-coffee-script.git" + }, + "bugs": { + "url": "https://github.com/atom/language-coffee-script/issues" + }, + "devDependencies": { + "coffeelint": "^1.10.1" + } +} diff --git a/packages/language-coffee-script/settings/language-coffee-script.cson b/packages/language-coffee-script/settings/language-coffee-script.cson new file mode 100644 index 000000000..9aeb77a27 --- /dev/null +++ b/packages/language-coffee-script/settings/language-coffee-script.cson @@ -0,0 +1,19 @@ +'.source.coffee, .source.litcoffee, .source.coffee.md': + 'editor': + 'commentStart': '# ' +'.source.coffee': + 'editor': + 'autoIndentOnPaste': false + 'increaseIndentPattern': '(?x) + ^\\s* + ( + .*\\b(?<!\\.)class(\\s|$) + | [a-zA-Z\\$_](\\w|\\$|:|\\.)*\\s*(?=\\:(\\s*\\(.*\\))?\\s*((=|-)>\\s*$)) + | [a-zA-Z\\$_](\\w|\\$|\\.)*\\s*(:|=)\\s*((if|while)(?!.*?then)|for|$) + | \\b(if|else|unless|while|when)\\b(?!.*?then)|\\b(for|loop)\\b + | \\b(try|finally|catch|((catch|switch)\\s+\\S.*))\\b\\s*$ + | .*[-=]>\\s*$ + | .*[\\{\\[]\\s*$ + | .*:\\s*$ + )' + 'decreaseIndentPattern': '^\\s*(\\}|\\]|\\b(else|catch|finally)\\b)$' diff --git a/packages/language-coffee-script/snippets/language-coffee-script.cson b/packages/language-coffee-script/snippets/language-coffee-script.cson new file mode 100644 index 000000000..a27cdeaa4 --- /dev/null +++ b/packages/language-coffee-script/snippets/language-coffee-script.cson @@ -0,0 +1,89 @@ +'.source.coffee': + 'Array Comprehension': + 'prefix': 'fora' + 'body': 'for ${1:name} in ${2:array}\n ${0:# body...}' + 'Function (bound)': + 'prefix': 'bfun' + 'body': '(${1:args}) =>\n ${0:# body...}' + 'Class': + 'prefix': 'cla' + 'body': 'class ${1:ClassName}${2: extends ${3:Ancestor}}\n\n ${4:constructor: (${5:args}) ->\n ${6:# body...}}\n $7' + 'Else if': + 'prefix': 'elif' + 'body': 'else if ${1:condition}\n ${0:# body...}' + 'Function': + 'prefix': 'fun' + 'body': '(${1:args}) ->\n ${0:# body...}\n\n' + 'If .. Else': + 'prefix': 'ife' + 'body': 'if ${1:condition}\n ${2:# body...}\nelse\n ${3:# body...}' + 'If': + 'prefix': 'if' + 'body': 'if ${1:condition}\n ${0:# body...}' + 'Object comprehension': + 'prefix': 'foro' + 'body': 'for ${1:key}, ${2:value} of ${3:Object}\n ${0:# body...}' + 'Range comprehension (exclusive)': + 'prefix': 'forrex' + 'body': 'for ${1:name} in [${2:start}...${3:finish}]${4: by ${5:step}}\n ${0:# body...}' + 'Range comprehension (inclusive)': + 'prefix': 'forr' + 'body': 'for ${1:name} in [${2:start}..${3:finish}]${4: by ${5:step}}\n ${0:# body...}' + 'Switch': + 'prefix': 'swi' + 'body': 'switch ${1:object}\n when ${2:value}\n ${0:# body...}' + 'Ternary If': + 'prefix': 'ifte' + 'body': 'if ${1:condition} then ${2:value} else ${3:other}' + 'Try .. Catch': + 'prefix': 'try' + 'body': 'try\n $1\ncatch ${2:error}\n $3' + 'Unless': + 'prefix': 'unl' + 'body': '${1:action} unless ${2:condition}' + 'Subheader': + 'prefix': '/3' + 'body': '# $1\n# -------------------------\n$0' + 'log': + 'prefix': 'log' + 'body': 'console.log $0' + 'warn': + 'prefix': 'warn' + 'body': 'console.warn $0' + 'error': + 'prefix': 'error' + 'body': 'console.error $0' + 'require': + 'prefix': 'req' + 'body': '${1:sys} $3= require \'${2:${1:sys}}\'$4' + 'Describe block': + 'prefix': 'de', + 'body': 'describe "${1:description}", ->\n ${2:body}' + 'It block': + 'prefix': 'i', + 'body': 'it "$1", ->\n $2' + 'Before each': + 'prefix': 'be', + 'body': 'beforeEach ->\n $1' + 'After each': + 'prefix': 'af', + 'body': 'afterEach ->\n $1' + 'Expectation': + 'prefix': 'ex', + 'body': 'expect($1).to$2' + 'Range array': + 'prefix': 'ra', + 'body': '[[$1, $2], [$3, $4]]' + 'Point array': + 'prefix': 'pt', + 'body': '[$1, $2]' + 'Key-value pair': + 'prefix': 'kv', + 'body': '${1:\'${2:key}\'}: ${3:value}' + 'Create Jasmine spy': + 'prefix': 'spy', + 'body': 'jasmine.createSpy(\'${1:description}\')$2' +'.string.quoted.double.coffee:not(.string .source), .string.quoted.double.heredoc.coffee:not(.string .source)': + 'Interpolated Code': + 'prefix': '#' + 'body': '#{$1}$2' diff --git a/packages/language-coffee-script/spec/coffee-script-literate-spec.coffee b/packages/language-coffee-script/spec/coffee-script-literate-spec.coffee new file mode 100644 index 000000000..1ed4d721b --- /dev/null +++ b/packages/language-coffee-script/spec/coffee-script-literate-spec.coffee @@ -0,0 +1,122 @@ +describe "CoffeeScript (Literate) grammar", -> + grammar = null + + beforeEach -> + waitsForPromise -> + atom.packages.activatePackage("language-coffee-script") + + runs -> + grammar = atom.grammars.grammarForScopeName("source.litcoffee") + + it "parses the grammar", -> + expect(grammar).toBeTruthy() + expect(grammar.scopeName).toBe "source.litcoffee" + + it "recognizes a code block after a list", -> + tokens = grammar.tokenizeLines ''' + 1. Example + 2. List + + 1 + 2 + ''' + expect(tokens[3][1]).toEqual value: "1", scopes: ["source.litcoffee", "markup.raw.block.markdown", "constant.numeric.decimal.coffee"] + + describe "firstLineMatch", -> + it "recognises interpreter directives", -> + valid = """ + #!/usr/local/bin/coffee --no-header --literate -w + #!/usr/local/bin/coffee -l + #!/usr/local/bin/env coffee --literate -w + """ + for line in valid.split /\n/ + expect(grammar.firstLineRegex.scanner.findNextMatchSync(line)).not.toBeNull() + + invalid = """ + #!/usr/local/bin/coffee --no-head -literate -w + #!/usr/local/bin/coffee --wl + #!/usr/local/bin/env coffee --illiterate -w=l + """ + for line in invalid.split /\n/ + expect(grammar.firstLineRegex.scanner.findNextMatchSync(line)).toBeNull() + + it "recognises Emacs modelines", -> + valid = """ + #-*- litcoffee -*- + #-*- mode: litcoffee -*- + /* -*-litcoffee-*- */ + // -*- litcoffee -*- + /* -*- mode:LITCOFFEE -*- */ + // -*- font:bar;mode:LitCoffee -*- + // -*- font:bar;mode:litcoffee;foo:bar; -*- + // -*-font:mode;mode:litcoffee-*- + // -*- foo:bar mode: litcoffee bar:baz -*- + " -*-foo:bar;mode:litcoffee;bar:foo-*- "; + " -*-font-mode:foo;mode:LITcofFEE;foo-bar:quux-*-" + "-*-font:x;foo:bar; mode : litCOFFEE; bar:foo;foooooo:baaaaar;fo:ba;-*-"; + "-*- font:x;foo : bar ; mode : LiTcOFFEe ; bar : foo ; foooooo:baaaaar;fo:ba-*-"; + """ + for line in valid.split /\n/ + expect(grammar.firstLineRegex.scanner.findNextMatchSync(line)).not.toBeNull() + + invalid = """ + /* --*litcoffee-*- */ + /* -*-- litcoffee -*- + /* -*- -- litcoffee -*- + /* -*- LITCOFFEE -;- -*- + // -*- itsLitCoffeeFam -*- + // -*- litcoffee; -*- + // -*- litcoffee-stuff -*- + /* -*- model:litcoffee -*- + /* -*- indent-mode:litcoffee -*- + // -*- font:mode;litcoffee -*- + // -*- mode: -*- litcoffee + // -*- mode: burnt-because-litcoffee -*- + // -*-font:mode;mode:litcoffee--*- + """ + for line in invalid.split /\n/ + expect(grammar.firstLineRegex.scanner.findNextMatchSync(line)).toBeNull() + + it "recognises Vim modelines", -> + valid = """ + vim: se filetype=litcoffee: + # vim: se ft=litcoffee: + # vim: set ft=LITCOFFEE: + # vim: set filetype=litcoffee: + # vim: ft=LITCOFFEE + # vim: syntax=litcoffee + # vim: se syntax=litcoffee: + # ex: syntax=litcoffee + # vim:ft=LitCoffee + # vim600: ft=litcoffee + # vim>600: set ft=litcoffee: + # vi:noai:sw=3 ts=6 ft=litcoffee + # vi::::::::::noai:::::::::::: ft=litcoffee + # vim:ts=4:sts=4:sw=4:noexpandtab:ft=LITCOFFEE + # vi:: noai : : : : sw =3 ts =6 ft =litCoffee + # vim: ts=4: pi sts=4: ft=litcoffee: noexpandtab: sw=4: + # vim: ts=4 sts=4: ft=litcoffee noexpandtab: + # vim:noexpandtab sts=4 ft=LitCOffEE ts=4 + # vim:noexpandtab:ft=litcoffee + # vim:ts=4:sts=4 ft=litcoffee:noexpandtab:\x20 + # vim:noexpandtab titlestring=hi\|there\\\\ ft=litcoffee ts=4 + """ + for line in valid.split /\n/ + expect(grammar.firstLineRegex.scanner.findNextMatchSync(line)).not.toBeNull() + + invalid = """ + ex: se filetype=litcoffee: + _vi: se filetype=litcoffee: + vi: se filetype=litcoffee + # vim set ft=illitcoffee + # vim: soft=litcoffee + # vim: clean-syntax=litcoffee: + # vim set ft=litcoffee: + # vim: setft=litcoffee: + # vim: se ft=litcoffee backupdir=tmp + # vim: set ft=LITCOFFEE set cmdheight=1 + # vim:noexpandtab sts:4 ft:litcoffee ts:4 + # vim:noexpandtab titlestring=hi\\|there\\ ft=litcoffee ts=4 + # vim:noexpandtab titlestring=hi\\|there\\\\\\ ft=litcoffee ts=4 + """ + for line in invalid.split /\n/ + expect(grammar.firstLineRegex.scanner.findNextMatchSync(line)).toBeNull() diff --git a/packages/language-coffee-script/spec/coffee-script-spec.coffee b/packages/language-coffee-script/spec/coffee-script-spec.coffee new file mode 100644 index 000000000..138959779 --- /dev/null +++ b/packages/language-coffee-script/spec/coffee-script-spec.coffee @@ -0,0 +1,1489 @@ +fs = require 'fs' +path = require 'path' + +describe "CoffeeScript grammar", -> + grammar = null + + beforeEach -> + waitsForPromise -> + atom.packages.activatePackage("language-coffee-script") + + runs -> + grammar = atom.grammars.grammarForScopeName("source.coffee") + + it "parses the grammar", -> + expect(grammar).toBeTruthy() + expect(grammar.scopeName).toBe "source.coffee" + + it "tokenizes classes", -> + {tokens} = grammar.tokenizeLine("class Foo") + + expect(tokens[0]).toEqual value: "class", scopes: ["source.coffee", "meta.class.coffee", "storage.type.class.coffee"] + expect(tokens[1]).toEqual value: " ", scopes: ["source.coffee", "meta.class.coffee"] + expect(tokens[2]).toEqual value: "Foo", scopes: ["source.coffee", "meta.class.coffee", "entity.name.type.class.coffee"] + + {tokens} = grammar.tokenizeLine("class_ Foo") + expect(tokens[0]).toEqual value: "class_", scopes: ["source.coffee", "meta.function-call.coffee", "entity.name.function.coffee"] + + {tokens} = grammar.tokenizeLine("_class Foo") + expect(tokens[0]).toEqual value: "_class", scopes: ["source.coffee", "meta.function-call.coffee", "entity.name.function.coffee"] + + {tokens} = grammar.tokenizeLine("[class Foo]") + expect(tokens[0]).toEqual value: "[", scopes: ["source.coffee", "punctuation.definition.array.begin.bracket.square.coffee"] + expect(tokens[1]).toEqual value: "class", scopes: ["source.coffee", "meta.class.coffee", "storage.type.class.coffee"] + expect(tokens[2]).toEqual value: " ", scopes: ["source.coffee", "meta.class.coffee"] + expect(tokens[3]).toEqual value: "Foo", scopes: ["source.coffee", "meta.class.coffee", "entity.name.type.class.coffee"] + expect(tokens[4]).toEqual value: "]", scopes: ["source.coffee", "punctuation.definition.array.end.bracket.square.coffee"] + + {tokens} = grammar.tokenizeLine("bar(class Foo)") + expect(tokens[0]).toEqual value: "bar", scopes: ["source.coffee", "meta.function-call.coffee", "entity.name.function.coffee"] + expect(tokens[1]).toEqual value: "(", scopes: ["source.coffee", "meta.function-call.coffee", "meta.arguments.coffee", "punctuation.definition.arguments.begin.bracket.round.coffee"] + expect(tokens[2]).toEqual value: "class", scopes: ["source.coffee", "meta.function-call.coffee", "meta.arguments.coffee", "meta.class.coffee", "storage.type.class.coffee"] + expect(tokens[3]).toEqual value: " ", scopes: ["source.coffee", "meta.function-call.coffee", "meta.arguments.coffee", "meta.class.coffee"] + expect(tokens[4]).toEqual value: "Foo", scopes: ["source.coffee", "meta.function-call.coffee", "meta.arguments.coffee", "meta.class.coffee", "entity.name.type.class.coffee"] + expect(tokens[5]).toEqual value: ")", scopes: ["source.coffee", "meta.function-call.coffee", "meta.arguments.coffee", "punctuation.definition.arguments.end.bracket.round.coffee"] + + it "tokenizes named subclasses", -> + {tokens} = grammar.tokenizeLine("class Foo extends Bar") + + expect(tokens[0]).toEqual value: "class", scopes: ["source.coffee", "meta.class.coffee", "storage.type.class.coffee"] + expect(tokens[1]).toEqual value: " ", scopes: ["source.coffee", "meta.class.coffee"] + expect(tokens[2]).toEqual value: "Foo", scopes: ["source.coffee", "meta.class.coffee", "entity.name.type.class.coffee"] + expect(tokens[3]).toEqual value: " ", scopes: ["source.coffee", "meta.class.coffee"] + expect(tokens[4]).toEqual value: "extends", scopes: ["source.coffee", "meta.class.coffee", "keyword.control.inheritance.coffee"] + expect(tokens[5]).toEqual value: " ", scopes: ["source.coffee", "meta.class.coffee"] + expect(tokens[6]).toEqual value: "Bar", scopes: ["source.coffee", "meta.class.coffee", "entity.other.inherited-class.coffee"] + + it "tokenizes anonymous subclasses", -> + {tokens} = grammar.tokenizeLine("class extends Foo") + + expect(tokens[0]).toEqual value: "class", scopes: ["source.coffee", "meta.class.coffee", "storage.type.class.coffee"] + expect(tokens[1]).toEqual value: " ", scopes: ["source.coffee", "meta.class.coffee"] + expect(tokens[2]).toEqual value: "extends", scopes: ["source.coffee", "meta.class.coffee", "keyword.control.inheritance.coffee"] + expect(tokens[3]).toEqual value: " ", scopes: ["source.coffee", "meta.class.coffee"] + expect(tokens[4]).toEqual value: "Foo", scopes: ["source.coffee", "meta.class.coffee", "entity.other.inherited-class.coffee"] + + it "tokenizes instantiated anonymous classes", -> + {tokens} = grammar.tokenizeLine("new class") + + expect(tokens[0]).toEqual value: "new", scopes: ["source.coffee", "meta.class.instance.constructor.coffee", "keyword.operator.new.coffee"] + expect(tokens[1]).toEqual value: " ", scopes: ["source.coffee", "meta.class.instance.constructor.coffee"] + expect(tokens[2]).toEqual value: "class", scopes: ["source.coffee", "meta.class.instance.constructor.coffee", "storage.type.class.coffee"] + + it "tokenizes instantiated named classes", -> + {tokens} = grammar.tokenizeLine("new class Foo") + + expect(tokens[0]).toEqual value: "new", scopes: ["source.coffee", "meta.class.instance.constructor.coffee", "keyword.operator.new.coffee"] + expect(tokens[1]).toEqual value: " ", scopes: ["source.coffee", "meta.class.instance.constructor.coffee"] + expect(tokens[2]).toEqual value: "class", scopes: ["source.coffee", "meta.class.instance.constructor.coffee", "storage.type.class.coffee"] + expect(tokens[3]).toEqual value: " ", scopes: ["source.coffee", "meta.class.instance.constructor.coffee"] + expect(tokens[4]).toEqual value: "Foo", scopes: ["source.coffee", "meta.class.instance.constructor.coffee", "entity.name.type.instance.coffee"] + + {tokens} = grammar.tokenizeLine("new Foo") + + expect(tokens[0]).toEqual value: "new", scopes: ["source.coffee", "meta.class.instance.constructor.coffee", "keyword.operator.new.coffee"] + expect(tokens[1]).toEqual value: " ", scopes: ["source.coffee", "meta.class.instance.constructor.coffee"] + expect(tokens[2]).toEqual value: "Foo", scopes: ["source.coffee", "meta.class.instance.constructor.coffee", "entity.name.type.instance.coffee"] + + it "tokenizes class names that start with `class` correctly", -> + {tokens} = grammar.tokenizeLine("new classTest") + + expect(tokens[0]).toEqual value: "new", scopes: ["source.coffee", "meta.class.instance.constructor.coffee", "keyword.operator.new.coffee"] + expect(tokens[2]).toEqual value: "classTest", scopes: ["source.coffee", "meta.class.instance.constructor.coffee", "entity.name.type.instance.coffee"] + + it "tokenizes comments", -> + {tokens} = grammar.tokenizeLine("# I am a comment") + + expect(tokens[0]).toEqual value: "#", scopes: ["source.coffee", "comment.line.number-sign.coffee", "punctuation.definition.comment.coffee"] + expect(tokens[1]).toEqual value: " I am a comment", scopes: ["source.coffee", "comment.line.number-sign.coffee"] + + {tokens} = grammar.tokenizeLine("\#{Comment}") + + expect(tokens[0]).toEqual value: "#", scopes: ["source.coffee", "comment.line.number-sign.coffee", "punctuation.definition.comment.coffee"] + expect(tokens[1]).toEqual value: "{Comment}", scopes: ["source.coffee", "comment.line.number-sign.coffee"] + + it "tokenizes block comments", -> + lines = grammar.tokenizeLines """ + ### I am a block comment + Very blocky + Until here + ### + """ + expect(lines[0][0]).toEqual value: '###', scopes: ['source.coffee', 'comment.block.coffee', 'punctuation.definition.comment.coffee'] + expect(lines[0][1]).toEqual value: ' I am a block comment', scopes: ['source.coffee', 'comment.block.coffee'] + expect(lines[2][0]).toEqual value: 'Until here', scopes: ['source.coffee', 'comment.block.coffee'] + expect(lines[3][0]).toEqual value: '###', scopes: ['source.coffee', 'comment.block.coffee', 'punctuation.definition.comment.coffee'] + + {tokens} = grammar.tokenizeLine "identity = ###::<T>### (value ###: T ###) ###: T ### ->" + expect(tokens[0]).toEqual value: 'identity', scopes: ['source.coffee', 'variable.assignment.coffee'] + expect(tokens[4]).toEqual value: '###', scopes: ['source.coffee', 'comment.block.coffee', 'punctuation.definition.comment.coffee'] + expect(tokens[5]).toEqual value: '::<T>', scopes: ['source.coffee', 'comment.block.coffee'] + expect(tokens[6]).toEqual value: '###', scopes: ['source.coffee', 'comment.block.coffee', 'punctuation.definition.comment.coffee'] + expect(tokens[9]).toEqual value: 'value ', scopes: ['source.coffee'] # TODO: These scopes are incorrect and should be fixed + expect(tokens[10]).toEqual value: '###', scopes: ['source.coffee', 'comment.block.coffee', 'punctuation.definition.comment.coffee'] + expect(tokens[11]).toEqual value: ': T ', scopes: ['source.coffee', 'comment.block.coffee'] + expect(tokens[12]).toEqual value: '###', scopes: ['source.coffee', 'comment.block.coffee', 'punctuation.definition.comment.coffee'] + expect(tokens[14]).toEqual value: ' ', scopes: ['source.coffee'] # TODO: These scopes are incorrect and should be fixed + expect(tokens[15]).toEqual value: '###', scopes: ['source.coffee', 'comment.block.coffee', 'punctuation.definition.comment.coffee'] + expect(tokens[16]).toEqual value: ': T ', scopes: ['source.coffee', 'comment.block.coffee'] + expect(tokens[17]).toEqual value: '###', scopes: ['source.coffee', 'comment.block.coffee', 'punctuation.definition.comment.coffee'] + expect(tokens[19]).toEqual value: '->', scopes: ['source.coffee', 'meta.function.inline.coffee', 'storage.type.function.coffee'] + + it "tokenizes annotations in block comments", -> + lines = grammar.tokenizeLines """ + ### + @foo - food + @bar - bart + """ + + expect(lines[1][0]).toEqual value: ' ', scopes: ["source.coffee", "comment.block.coffee"] + expect(lines[1][1]).toEqual value: '@foo', scopes: ["source.coffee", "comment.block.coffee", "storage.type.annotation.coffee"] + expect(lines[2][0]).toEqual value: '@bar', scopes: ["source.coffee", "comment.block.coffee", "storage.type.annotation.coffee"] + + describe "numbers", -> + it "tokenizes hexadecimals", -> + {tokens} = grammar.tokenizeLine('0x1D306') + expect(tokens[0]).toEqual value: '0x1D306', scopes: ['source.coffee', 'constant.numeric.hex.coffee'] + + {tokens} = grammar.tokenizeLine('0X1D306') + expect(tokens[0]).toEqual value: '0X1D306', scopes: ['source.coffee', 'constant.numeric.hex.coffee'] + + it "tokenizes binary literals", -> + {tokens} = grammar.tokenizeLine('0b011101110111010001100110') + expect(tokens[0]).toEqual value: '0b011101110111010001100110', scopes: ['source.coffee', 'constant.numeric.binary.coffee'] + + {tokens} = grammar.tokenizeLine('0B011101110111010001100110') + expect(tokens[0]).toEqual value: '0B011101110111010001100110', scopes: ['source.coffee', 'constant.numeric.binary.coffee'] + + it "tokenizes octal literals", -> + {tokens} = grammar.tokenizeLine('0o1411') + expect(tokens[0]).toEqual value: '0o1411', scopes: ['source.coffee', 'constant.numeric.octal.coffee'] + + {tokens} = grammar.tokenizeLine('0O1411') + expect(tokens[0]).toEqual value: '0O1411', scopes: ['source.coffee', 'constant.numeric.octal.coffee'] + + {tokens} = grammar.tokenizeLine('0010') + expect(tokens[0]).toEqual value: '0010', scopes: ['source.coffee', 'constant.numeric.octal.coffee'] + + it "tokenizes decimals", -> + {tokens} = grammar.tokenizeLine('1234') + expect(tokens[0]).toEqual value: '1234', scopes: ['source.coffee', 'constant.numeric.decimal.coffee'] + + {tokens} = grammar.tokenizeLine('5e-10') + expect(tokens[0]).toEqual value: '5e-10', scopes: ['source.coffee', 'constant.numeric.decimal.coffee'] + + {tokens} = grammar.tokenizeLine('5E+5') + expect(tokens[0]).toEqual value: '5E+5', scopes: ['source.coffee', 'constant.numeric.decimal.coffee'] + + {tokens} = grammar.tokenizeLine('9.') + expect(tokens[0]).toEqual value: '9', scopes: ['source.coffee', 'constant.numeric.decimal.coffee'] + expect(tokens[1]).toEqual value: '.', scopes: ['source.coffee', 'constant.numeric.decimal.coffee', 'punctuation.separator.decimal.period.coffee'] + + {tokens} = grammar.tokenizeLine('.9') + expect(tokens[0]).toEqual value: '.', scopes: ['source.coffee', 'constant.numeric.decimal.coffee', 'punctuation.separator.decimal.period.coffee'] + expect(tokens[1]).toEqual value: '9', scopes: ['source.coffee', 'constant.numeric.decimal.coffee'] + + {tokens} = grammar.tokenizeLine('9.9') + expect(tokens[0]).toEqual value: '9', scopes: ['source.coffee', 'constant.numeric.decimal.coffee'] + expect(tokens[1]).toEqual value: '.', scopes: ['source.coffee', 'constant.numeric.decimal.coffee', 'punctuation.separator.decimal.period.coffee'] + expect(tokens[2]).toEqual value: '9', scopes: ['source.coffee', 'constant.numeric.decimal.coffee'] + + {tokens} = grammar.tokenizeLine('.1e-23') + expect(tokens[0]).toEqual value: '.', scopes: ['source.coffee', 'constant.numeric.decimal.coffee', 'punctuation.separator.decimal.period.coffee'] + expect(tokens[1]).toEqual value: '1e-23', scopes: ['source.coffee', 'constant.numeric.decimal.coffee'] + + {tokens} = grammar.tokenizeLine('1.E3') + expect(tokens[0]).toEqual value: '1', scopes: ['source.coffee', 'constant.numeric.decimal.coffee'] + expect(tokens[1]).toEqual value: '.', scopes: ['source.coffee', 'constant.numeric.decimal.coffee', 'punctuation.separator.decimal.period.coffee'] + expect(tokens[2]).toEqual value: 'E3', scopes: ['source.coffee', 'constant.numeric.decimal.coffee'] + + it "does not tokenize numbers that are part of a variable", -> + {tokens} = grammar.tokenizeLine('hi$1') + expect(tokens[0]).toEqual value: 'hi$1', scopes: ['source.coffee'] + + {tokens} = grammar.tokenizeLine('hi_1') + expect(tokens[0]).toEqual value: 'hi_1', scopes: ['source.coffee'] + + it "tokenizes variable assignments", -> + {tokens} = grammar.tokenizeLine("something = b") + expect(tokens[0]).toEqual value: "something", scopes: ["source.coffee", "variable.assignment.coffee"] + expect(tokens[2]).toEqual value: "=", scopes: ["source.coffee", "keyword.operator.assignment.coffee"] + expect(tokens[3]).toEqual value: " b", scopes: ["source.coffee"] + + {tokens} = grammar.tokenizeLine("something : b") + expect(tokens[0]).toEqual value: "something", scopes: ["source.coffee", "variable.assignment.coffee"] + expect(tokens[2]).toEqual value: ":", scopes: ["source.coffee", "keyword.operator.assignment.coffee"] + expect(tokens[3]).toEqual value: " b", scopes: ["source.coffee"] + + {tokens} = grammar.tokenizeLine("a and= b") + expect(tokens[0]).toEqual value: "a", scopes: ["source.coffee", "variable.assignment.coffee"] + expect(tokens[2]).toEqual value: "and=", scopes: ["source.coffee", "keyword.operator.assignment.compound.coffee"] + expect(tokens[3]).toEqual value: " b", scopes: ["source.coffee"] + + # Should NOT be tokenized as and= + {tokens} = grammar.tokenizeLine("operand=true") + expect(tokens[0]).toEqual value: "operand", scopes: ["source.coffee", "variable.assignment.coffee"] + expect(tokens[1]).toEqual value: "=", scopes: ["source.coffee", "keyword.operator.assignment.coffee"] + + {tokens} = grammar.tokenizeLine("a or= b") + expect(tokens[0]).toEqual value: "a", scopes: ["source.coffee", "variable.assignment.coffee"] + expect(tokens[2]).toEqual value: "or=", scopes: ["source.coffee", "keyword.operator.assignment.compound.coffee"] + expect(tokens[3]).toEqual value: " b", scopes: ["source.coffee"] + + # Should NOT be tokenized as or= + {tokens} = grammar.tokenizeLine("editor=false") + expect(tokens[0]).toEqual value: "editor", scopes: ["source.coffee", "variable.assignment.coffee"] + expect(tokens[1]).toEqual value: "=", scopes: ["source.coffee", "keyword.operator.assignment.coffee"] + + {tokens} = grammar.tokenizeLine("a -= b") + expect(tokens[0]).toEqual value: "a", scopes: ["source.coffee", "variable.assignment.coffee"] + expect(tokens[2]).toEqual value: "-=", scopes: ["source.coffee", "keyword.operator.assignment.compound.coffee"] + expect(tokens[3]).toEqual value: " b", scopes: ["source.coffee"] + + {tokens} = grammar.tokenizeLine("a += b") + expect(tokens[0]).toEqual value: "a", scopes: ["source.coffee", "variable.assignment.coffee"] + expect(tokens[2]).toEqual value: "+=", scopes: ["source.coffee", "keyword.operator.assignment.compound.coffee"] + expect(tokens[3]).toEqual value: " b", scopes: ["source.coffee"] + + {tokens} = grammar.tokenizeLine("a /= b") + expect(tokens[0]).toEqual value: "a", scopes: ["source.coffee", "variable.assignment.coffee"] + expect(tokens[2]).toEqual value: "/=", scopes: ["source.coffee", "keyword.operator.assignment.compound.coffee"] + expect(tokens[3]).toEqual value: " b", scopes: ["source.coffee"] + + {tokens} = grammar.tokenizeLine("a &= b") + expect(tokens[0]).toEqual value: "a", scopes: ["source.coffee", "variable.assignment.coffee"] + expect(tokens[2]).toEqual value: "&=", scopes: ["source.coffee", "keyword.operator.assignment.compound.bitwise.coffee"] + expect(tokens[3]).toEqual value: " b", scopes: ["source.coffee"] + + {tokens} = grammar.tokenizeLine("a %= b") + expect(tokens[0]).toEqual value: "a", scopes: ["source.coffee", "variable.assignment.coffee"] + expect(tokens[2]).toEqual value: "%=", scopes: ["source.coffee", "keyword.operator.assignment.compound.coffee"] + expect(tokens[3]).toEqual value: " b", scopes: ["source.coffee"] + + {tokens} = grammar.tokenizeLine("a *= b") + expect(tokens[0]).toEqual value: "a", scopes: ["source.coffee", "variable.assignment.coffee"] + expect(tokens[2]).toEqual value: "*=", scopes: ["source.coffee", "keyword.operator.assignment.compound.coffee"] + expect(tokens[3]).toEqual value: " b", scopes: ["source.coffee"] + + {tokens} = grammar.tokenizeLine("a ?= b") + expect(tokens[0]).toEqual value: "a", scopes: ["source.coffee", "variable.assignment.coffee"] + expect(tokens[2]).toEqual value: "?=", scopes: ["source.coffee", "keyword.operator.assignment.compound.coffee"] + expect(tokens[3]).toEqual value: " b", scopes: ["source.coffee"] + + {tokens} = grammar.tokenizeLine("a == b") + expect(tokens[0]).toEqual value: "a ", scopes: ["source.coffee"] + expect(tokens[1]).toEqual value: "==", scopes: ["source.coffee", "keyword.operator.comparison.coffee"] + expect(tokens[2]).toEqual value: " b", scopes: ["source.coffee"] + + {tokens} = grammar.tokenizeLine("false == b") + expect(tokens[0]).toEqual value: "false", scopes: ["source.coffee", "constant.language.boolean.false.coffee"] + expect(tokens[1]).toEqual value: " ", scopes: ["source.coffee"] + expect(tokens[2]).toEqual value: "==", scopes: ["source.coffee", "keyword.operator.comparison.coffee"] + expect(tokens[3]).toEqual value: " b", scopes: ["source.coffee"] + + {tokens} = grammar.tokenizeLine("true == b") + expect(tokens[0]).toEqual value: "true", scopes: ["source.coffee", "constant.language.boolean.true.coffee"] + expect(tokens[1]).toEqual value: " ", scopes: ["source.coffee"] + expect(tokens[2]).toEqual value: "==", scopes: ["source.coffee", "keyword.operator.comparison.coffee"] + expect(tokens[3]).toEqual value: " b", scopes: ["source.coffee"] + + {tokens} = grammar.tokenizeLine("null == b") + expect(tokens[0]).toEqual value: "null", scopes: ["source.coffee", "constant.language.null.coffee"] + expect(tokens[1]).toEqual value: " ", scopes: ["source.coffee"] + expect(tokens[2]).toEqual value: "==", scopes: ["source.coffee", "keyword.operator.comparison.coffee"] + expect(tokens[3]).toEqual value: " b", scopes: ["source.coffee"] + + {tokens} = grammar.tokenizeLine("this == b") + expect(tokens[0]).toEqual value: "this", scopes: ["source.coffee", "variable.language.this.coffee"] + expect(tokens[1]).toEqual value: " ", scopes: ["source.coffee"] + expect(tokens[2]).toEqual value: "==", scopes: ["source.coffee", "keyword.operator.comparison.coffee"] + expect(tokens[3]).toEqual value: " b", scopes: ["source.coffee"] + + it "tokenizes compound operators properly", -> + assignmentOperators = ["and=", "or=", "&&=", "||=", "/=", "*=", "%=", "+=", "-="] + bitwiseOperators = ["<<=", ">>=", ">>>=", "&=", "|=", "^="] + comparisonOperators = ["==", "!=", "<=", ">="] + + for assignmentOperator in assignmentOperators + {tokens} = grammar.tokenizeLine(assignmentOperator) + expect(tokens[0]).toEqual value: assignmentOperator, scopes: ["source.coffee", "keyword.operator.assignment.compound.coffee"] + + for bitwiseOperator in bitwiseOperators + {tokens} = grammar.tokenizeLine(bitwiseOperator) + expect(tokens[0]).toEqual value: bitwiseOperator, scopes: ["source.coffee", "keyword.operator.assignment.compound.bitwise.coffee"] + + for comparisonOperator in comparisonOperators + {tokens} = grammar.tokenizeLine(comparisonOperator) + expect(tokens[0]).toEqual value: comparisonOperator, scopes: ["source.coffee", "keyword.operator.comparison.coffee"] + + it "tokenizes operators properly", -> + logicalOperators = ["!", "&&", "||", "and", "or", "not"] + bitwiseOperators = ["^", "~", "&", "|"] + comparisonOperators = ["<", ">", "is", "isnt"] + decrementOperators = ["--"] + incrementOperators = ["++"] + splatOperators = ["..."] + existentialOperators = ["?"] + operators = ["%", "*", "/", "-", "+"] + keywords = ["delete", "instanceof", "new", "typeof"] + + for logicalOperator in logicalOperators + {tokens} = grammar.tokenizeLine(logicalOperator) + expect(tokens[0]).toEqual value: logicalOperator, scopes: ["source.coffee", "keyword.operator.logical.coffee"] + + for bitwiseOperator in bitwiseOperators + {tokens} = grammar.tokenizeLine(bitwiseOperator) + expect(tokens[0]).toEqual value: bitwiseOperator, scopes: ["source.coffee", "keyword.operator.bitwise.coffee"] + + for comparisonOperator in comparisonOperators + {tokens} = grammar.tokenizeLine(comparisonOperator) + expect(tokens[0]).toEqual value: comparisonOperator, scopes: ["source.coffee", "keyword.operator.comparison.coffee"] + + for decrementOperator in decrementOperators + {tokens} = grammar.tokenizeLine(decrementOperator) + expect(tokens[0]).toEqual value: decrementOperator, scopes: ["source.coffee", "keyword.operator.decrement.coffee"] + + for incrementOperator in incrementOperators + {tokens} = grammar.tokenizeLine(incrementOperator) + expect(tokens[0]).toEqual value: incrementOperator, scopes: ["source.coffee", "keyword.operator.increment.coffee"] + + for splatOperator in splatOperators + {tokens} = grammar.tokenizeLine(splatOperator) + expect(tokens[0]).toEqual value: splatOperator, scopes: ["source.coffee", "keyword.operator.splat.coffee"] + + for existentialOperator in existentialOperators + {tokens} = grammar.tokenizeLine(existentialOperator) + expect(tokens[0]).toEqual value: existentialOperator, scopes: ["source.coffee", "keyword.operator.existential.coffee"] + + for operator in operators + {tokens} = grammar.tokenizeLine(operator) + expect(tokens[0]).toEqual value: operator, scopes: ["source.coffee", "keyword.operator.coffee"] + + for keyword in keywords + {tokens} = grammar.tokenizeLine(keyword) + expect(tokens[0]).toEqual value: keyword, scopes: ["source.coffee", "keyword.operator.#{keyword}.coffee"] + + it "does not tokenize non-operators as operators", -> + notOperators = ["(/=", "-->", "=>", "->"] + + for notOperator in notOperators + {tokens} = grammar.tokenizeLine(notOperator) + expect(tokens[0]).not.toEqual value: notOperator, scopes: ["source.coffee", "keyword.operator.coffee"] + + describe "properties", -> + it "tokenizes properties", -> + {tokens} = grammar.tokenizeLine('obj.property') + expect(tokens[0]).toEqual value: 'obj', scopes: ['source.coffee', 'variable.other.object.coffee'] + expect(tokens[1]).toEqual value: '.', scopes: ['source.coffee', 'punctuation.separator.property.period.coffee'] + expect(tokens[2]).toEqual value: 'property', scopes: ['source.coffee', 'variable.other.property.coffee'] + + {tokens} = grammar.tokenizeLine('obj.property instanceof Object') + expect(tokens[0]).toEqual value: 'obj', scopes: ['source.coffee', 'variable.other.object.coffee'] + expect(tokens[1]).toEqual value: '.', scopes: ['source.coffee', 'punctuation.separator.property.period.coffee'] + expect(tokens[2]).toEqual value: 'property', scopes: ['source.coffee', 'variable.other.property.coffee'] + + {tokens} = grammar.tokenizeLine('obj.property.property') + expect(tokens[0]).toEqual value: 'obj', scopes: ['source.coffee', 'variable.other.object.coffee'] + expect(tokens[1]).toEqual value: '.', scopes: ['source.coffee', 'punctuation.separator.property.period.coffee'] + expect(tokens[2]).toEqual value: 'property', scopes: ['source.coffee', 'variable.other.object.property.coffee'] + + {tokens} = grammar.tokenizeLine('obj.Property') + expect(tokens[0]).toEqual value: 'obj', scopes: ['source.coffee', 'variable.other.object.coffee'] + expect(tokens[1]).toEqual value: '.', scopes: ['source.coffee', 'punctuation.separator.property.period.coffee'] + expect(tokens[2]).toEqual value: 'Property', scopes: ['source.coffee', 'variable.other.property.coffee'] + + {tokens} = grammar.tokenizeLine('obj.prop1?.prop2?') + expect(tokens[0]).toEqual value: 'obj', scopes: ['source.coffee', 'variable.other.object.coffee'] + expect(tokens[1]).toEqual value: '.', scopes: ['source.coffee', 'punctuation.separator.property.period.coffee'] + expect(tokens[2]).toEqual value: 'prop1', scopes: ['source.coffee', 'variable.other.object.property.coffee'] + expect(tokens[3]).toEqual value: '?', scopes: ['source.coffee', 'keyword.operator.existential.coffee'] + expect(tokens[4]).toEqual value: '.', scopes: ['source.coffee', 'punctuation.separator.property.period.coffee'] + expect(tokens[5]).toEqual value: 'prop2', scopes: ['source.coffee', 'variable.other.property.coffee'] + expect(tokens[6]).toEqual value: '?', scopes: ['source.coffee', 'keyword.operator.existential.coffee'] + + {tokens} = grammar.tokenizeLine('obj.$abc$') + expect(tokens[2]).toEqual value: '$abc$', scopes: ['source.coffee', 'variable.other.property.coffee'] + + {tokens} = grammar.tokenizeLine('obj.$$') + expect(tokens[2]).toEqual value: '$$', scopes: ['source.coffee', 'variable.other.property.coffee'] + + {tokens} = grammar.tokenizeLine('a().b') + expect(tokens[2]).toEqual value: ')', scopes: ['source.coffee', 'meta.function-call.coffee', 'meta.arguments.coffee', 'punctuation.definition.arguments.end.bracket.round.coffee'] + expect(tokens[3]).toEqual value: '.', scopes: ['source.coffee', 'punctuation.separator.property.period.coffee'] + expect(tokens[4]).toEqual value: 'b', scopes: ['source.coffee', 'variable.other.property.coffee'] + + {tokens} = grammar.tokenizeLine('a.123illegal') + expect(tokens[0]).toEqual value: 'a', scopes: ['source.coffee'] + expect(tokens[1]).toEqual value: '.', scopes: ['source.coffee', 'punctuation.separator.property.period.coffee'] + expect(tokens[2]).toEqual value: '123illegal', scopes: ['source.coffee', 'invalid.illegal.identifier.coffee'] + + it "tokenizes constant properties", -> + {tokens} = grammar.tokenizeLine('obj.MY_CONSTANT') + expect(tokens[0]).toEqual value: 'obj', scopes: ['source.coffee', 'variable.other.object.coffee'] + expect(tokens[1]).toEqual value: '.', scopes: ['source.coffee', 'punctuation.separator.property.period.coffee'] + expect(tokens[2]).toEqual value: 'MY_CONSTANT', scopes: ['source.coffee', 'constant.other.property.coffee'] + + {tokens} = grammar.tokenizeLine('obj.MY_CONSTANT.prop') + expect(tokens[0]).toEqual value: 'obj', scopes: ['source.coffee', 'variable.other.object.coffee'] + expect(tokens[1]).toEqual value: '.', scopes: ['source.coffee', 'punctuation.separator.property.period.coffee'] + expect(tokens[2]).toEqual value: 'MY_CONSTANT', scopes: ['source.coffee', 'constant.other.object.property.coffee'] + + {tokens} = grammar.tokenizeLine('a.C') + expect(tokens[0]).toEqual value: 'a', scopes: ['source.coffee', 'variable.other.object.coffee'] + expect(tokens[1]).toEqual value: '.', scopes: ['source.coffee', 'punctuation.separator.property.period.coffee'] + expect(tokens[2]).toEqual value: 'C', scopes: ['source.coffee', 'constant.other.property.coffee'] + + it "tokenizes objects, methods, and properties using :: prototype syntax", -> + {tokens} = grammar.tokenizeLine("Foo::") + expect(tokens[0]).toEqual value: "Foo", scopes: ["source.coffee", "variable.other.object.coffee"] + expect(tokens[1]).toEqual value: "::", scopes: ["source.coffee", "keyword.operator.prototype.coffee"] + + {tokens} = grammar.tokenizeLine("Foo::true") + expect(tokens[0]).toEqual value: "Foo", scopes: ["source.coffee", "variable.other.object.coffee"] + expect(tokens[1]).toEqual value: "::", scopes: ["source.coffee", "keyword.operator.prototype.coffee"] + expect(tokens[2]).toEqual value: "true", scopes: ["source.coffee", "variable.other.property.coffee"] + + {tokens} = grammar.tokenizeLine("Foo::on") + expect(tokens[0]).toEqual value: "Foo", scopes: ["source.coffee", "variable.other.object.coffee"] + expect(tokens[1]).toEqual value: "::", scopes: ["source.coffee", "keyword.operator.prototype.coffee"] + expect(tokens[2]).toEqual value: "on", scopes: ["source.coffee", "variable.other.property.coffee"] + + {tokens} = grammar.tokenizeLine("Foo::yes") + expect(tokens[0]).toEqual value: "Foo", scopes: ["source.coffee", "variable.other.object.coffee"] + expect(tokens[1]).toEqual value: "::", scopes: ["source.coffee", "keyword.operator.prototype.coffee"] + expect(tokens[2]).toEqual value: "yes", scopes: ["source.coffee", "variable.other.property.coffee"] + + {tokens} = grammar.tokenizeLine("Foo::false") + expect(tokens[0]).toEqual value: "Foo", scopes: ["source.coffee", "variable.other.object.coffee"] + expect(tokens[1]).toEqual value: "::", scopes: ["source.coffee", "keyword.operator.prototype.coffee"] + expect(tokens[2]).toEqual value: "false", scopes: ["source.coffee", "variable.other.property.coffee"] + + {tokens} = grammar.tokenizeLine("Foo::off") + expect(tokens[0]).toEqual value: "Foo", scopes: ["source.coffee", "variable.other.object.coffee"] + expect(tokens[1]).toEqual value: "::", scopes: ["source.coffee", "keyword.operator.prototype.coffee"] + expect(tokens[2]).toEqual value: "off", scopes: ["source.coffee", "variable.other.property.coffee"] + + {tokens} = grammar.tokenizeLine("Foo::no") + expect(tokens[0]).toEqual value: "Foo", scopes: ["source.coffee", "variable.other.object.coffee"] + expect(tokens[1]).toEqual value: "::", scopes: ["source.coffee", "keyword.operator.prototype.coffee"] + expect(tokens[2]).toEqual value: "no", scopes: ["source.coffee", "variable.other.property.coffee"] + + {tokens} = grammar.tokenizeLine("Foo::null") + expect(tokens[0]).toEqual value: "Foo", scopes: ["source.coffee", "variable.other.object.coffee"] + expect(tokens[1]).toEqual value: "::", scopes: ["source.coffee", "keyword.operator.prototype.coffee"] + expect(tokens[2]).toEqual value: "null", scopes: ["source.coffee", "variable.other.property.coffee"] + + {tokens} = grammar.tokenizeLine("Foo::extends") + expect(tokens[0]).toEqual value: "Foo", scopes: ["source.coffee", "variable.other.object.coffee"] + expect(tokens[1]).toEqual value: "::", scopes: ["source.coffee", "keyword.operator.prototype.coffee"] + expect(tokens[2]).toEqual value: "extends", scopes: ["source.coffee", "variable.other.property.coffee"] + + {tokens} = grammar.tokenizeLine("Foo :: something :: else") + expect(tokens[0]).toEqual value: "Foo", scopes: ["source.coffee", "variable.other.object.coffee"] + expect(tokens[1]).toEqual value: " ", scopes: ["source.coffee"] + expect(tokens[2]).toEqual value: "::", scopes: ["source.coffee", "keyword.operator.prototype.coffee"] + expect(tokens[3]).toEqual value: " ", scopes: ["source.coffee"] + expect(tokens[4]).toEqual value: "something", scopes: ["source.coffee", "variable.other.object.property.coffee"] + expect(tokens[5]).toEqual value: " ", scopes: ["source.coffee"] + expect(tokens[6]).toEqual value: "::", scopes: ["source.coffee", "keyword.operator.prototype.coffee"] + expect(tokens[7]).toEqual value: " ", scopes: ["source.coffee"] + expect(tokens[8]).toEqual value: "else", scopes: ["source.coffee", "variable.other.property.coffee"] + + {tokens} = grammar.tokenizeLine("Foo::toString()") + expect(tokens[0]).toEqual value: "Foo", scopes: ["source.coffee", "variable.other.object.coffee"] + expect(tokens[1]).toEqual value: "::", scopes: ["source.coffee", "meta.method-call.coffee", "keyword.operator.prototype.coffee"] + expect(tokens[2]).toEqual value: "toString", scopes: ["source.coffee", "meta.method-call.coffee", "support.function.coffee"] + + describe "variables", -> + it "tokenizes 'this'", -> + {tokens} = grammar.tokenizeLine('this') + expect(tokens[0]).toEqual value: 'this', scopes: ['source.coffee', 'variable.language.this.coffee'] + + {tokens} = grammar.tokenizeLine('this.obj.prototype = new El()') + expect(tokens[0]).toEqual value: 'this', scopes: ['source.coffee', 'variable.language.this.coffee'] + + {tokens} = grammar.tokenizeLine('$this') + expect(tokens[0]).toEqual value: '$this', scopes: ['source.coffee'] + + {tokens} = grammar.tokenizeLine('this$') + expect(tokens[0]).toEqual value: 'this$', scopes: ['source.coffee'] + + it "tokenizes 'super'", -> + {tokens} = grammar.tokenizeLine('super') + expect(tokens[0]).toEqual value: 'super', scopes: ['source.coffee', 'variable.language.super.coffee'] + + it "tokenizes 'arguments'", -> + {tokens} = grammar.tokenizeLine('arguments') + expect(tokens[0]).toEqual value: 'arguments', scopes: ['source.coffee', 'variable.language.arguments.coffee'] + + {tokens} = grammar.tokenizeLine('arguments[0]') + expect(tokens[0]).toEqual value: 'arguments', scopes: ['source.coffee', 'variable.language.arguments.coffee'] + + {tokens} = grammar.tokenizeLine('arguments.length') + expect(tokens[0]).toEqual value: 'arguments', scopes: ['source.coffee', 'variable.language.arguments.coffee'] + + it "tokenizes illegal identifiers", -> + {tokens} = grammar.tokenizeLine('0illegal') + expect(tokens[0]).toEqual value: '0illegal', scopes: ['source.coffee', 'invalid.illegal.identifier.coffee'] + + {tokens} = grammar.tokenizeLine('123illegal') + expect(tokens[0]).toEqual value: '123illegal', scopes: ['source.coffee', 'invalid.illegal.identifier.coffee'] + + {tokens} = grammar.tokenizeLine('123$illegal') + expect(tokens[0]).toEqual value: '123$illegal', scopes: ['source.coffee', 'invalid.illegal.identifier.coffee'] + + describe "objects", -> + it "tokenizes them", -> + {tokens} = grammar.tokenizeLine('obj.prop') + expect(tokens[0]).toEqual value: 'obj', scopes: ['source.coffee', 'variable.other.object.coffee'] + + {tokens} = grammar.tokenizeLine('$abc$.prop') + expect(tokens[0]).toEqual value: '$abc$', scopes: ['source.coffee', 'variable.other.object.coffee'] + + {tokens} = grammar.tokenizeLine('$$.prop') + expect(tokens[0]).toEqual value: '$$', scopes: ['source.coffee', 'variable.other.object.coffee'] + + {tokens} = grammar.tokenizeLine('obj?.prop') + expect(tokens[0]).toEqual value: 'obj', scopes: ['source.coffee', 'variable.other.object.coffee'] + expect(tokens[1]).toEqual value: '?', scopes: ['source.coffee', 'keyword.operator.existential.coffee'] + + it "tokenizes illegal objects", -> + {tokens} = grammar.tokenizeLine('1.prop') + expect(tokens[0]).toEqual value: '1', scopes: ['source.coffee', 'invalid.illegal.identifier.coffee'] + + {tokens} = grammar.tokenizeLine('123.prop') + expect(tokens[0]).toEqual value: '123', scopes: ['source.coffee', 'invalid.illegal.identifier.coffee'] + + {tokens} = grammar.tokenizeLine('123a.prop') + expect(tokens[0]).toEqual value: '123a', scopes: ['source.coffee', 'invalid.illegal.identifier.coffee'] + + describe "arrays", -> + it "tokenizes basic arrays", -> + {tokens} = grammar.tokenizeLine('[a, "b", 3]') + expect(tokens[0]).toEqual value: '[', scopes: ['source.coffee', 'punctuation.definition.array.begin.bracket.square.coffee'] + expect(tokens[1]).toEqual value: 'a', scopes: ['source.coffee'] + expect(tokens[2]).toEqual value: ',', scopes: ['source.coffee', 'punctuation.separator.delimiter.coffee'] + expect(tokens[3]).toEqual value: ' ', scopes: ['source.coffee'] + expect(tokens[9]).toEqual value: '3', scopes: ['source.coffee', 'constant.numeric.decimal.coffee'] + expect(tokens[10]).toEqual value: ']', scopes: ['source.coffee', 'punctuation.definition.array.end.bracket.square.coffee'] + + it "tokenizes inclusive and exclusive slices", -> + {tokens} = grammar.tokenizeLine('[a..3]') + expect(tokens[0]).toEqual value: '[', scopes: ['source.coffee', 'punctuation.definition.array.begin.bracket.square.coffee'] + expect(tokens[1]).toEqual value: 'a', scopes: ['source.coffee'] + expect(tokens[2]).toEqual value: '..', scopes: ['source.coffee', 'keyword.operator.slice.inclusive.coffee'] + expect(tokens[3]).toEqual value: '3', scopes: ['source.coffee', 'constant.numeric.decimal.coffee'] + expect(tokens[4]).toEqual value: ']', scopes: ['source.coffee', 'punctuation.definition.array.end.bracket.square.coffee'] + + {tokens} = grammar.tokenizeLine('[3...b]') + expect(tokens[0]).toEqual value: '[', scopes: ['source.coffee', 'punctuation.definition.array.begin.bracket.square.coffee'] + expect(tokens[1]).toEqual value: '3', scopes: ['source.coffee', 'constant.numeric.decimal.coffee'] + expect(tokens[2]).toEqual value: '...', scopes: ['source.coffee', 'keyword.operator.slice.exclusive.coffee'] + expect(tokens[3]).toEqual value: 'b', scopes: ['source.coffee'] + expect(tokens[4]).toEqual value: ']', scopes: ['source.coffee', 'punctuation.definition.array.end.bracket.square.coffee'] + + it "verifies that regular expressions have explicit count modifiers", -> + source = fs.readFileSync(path.resolve(__dirname, '..', 'grammars', 'coffeescript.cson'), 'utf8') + expect(source.search /{,/).toEqual -1 + + source = fs.readFileSync(path.resolve(__dirname, '..', 'grammars', 'coffeescript (literate).cson'), 'utf8') + expect(source.search /{,/).toEqual -1 + + it "tokenizes embedded JavaScript", -> + waitsForPromise -> + atom.packages.activatePackage("language-javascript") + + runs -> + {tokens} = grammar.tokenizeLine("`;`") + expect(tokens[0]).toEqual value: "`", scopes: ["source.coffee", "string.quoted.script.coffee", "punctuation.definition.string.begin.coffee"] + expect(tokens[1]).toEqual value: ";", scopes: ["source.coffee", "string.quoted.script.coffee", "source.js.embedded.coffee", "punctuation.terminator.statement.js"] + expect(tokens[2]).toEqual value: "`", scopes: ["source.coffee", "string.quoted.script.coffee", "punctuation.definition.string.end.coffee"] + + lines = grammar.tokenizeLines """ + `var a = 1;` + a = 2 + """ + expect(lines[0][0]).toEqual value: '`', scopes: ["source.coffee", "string.quoted.script.coffee", "punctuation.definition.string.begin.coffee"] + expect(lines[0][1]).toEqual value: 'var', scopes: ["source.coffee", "string.quoted.script.coffee", "source.js.embedded.coffee", "storage.type.var.js"] + expect(lines[0][6]).toEqual value: ';', scopes: ["source.coffee", "string.quoted.script.coffee", "source.js.embedded.coffee", "punctuation.terminator.statement.js"] + expect(lines[0][7]).toEqual value: '`', scopes: ["source.coffee", "string.quoted.script.coffee", "punctuation.definition.string.end.coffee"] + expect(lines[1][0]).toEqual value: 'a', scopes: ["source.coffee", "variable.assignment.coffee"] + + {tokens} = grammar.tokenizeLine("`// comment` a = 2") + expect(tokens[0]).toEqual value: '`', scopes: ["source.coffee", "string.quoted.script.coffee", "punctuation.definition.string.begin.coffee"] + expect(tokens[1]).toEqual value: '//', scopes: ["source.coffee", "string.quoted.script.coffee", "source.js.embedded.coffee", "comment.line.double-slash.js", "punctuation.definition.comment.js"] + expect(tokens[2]).toEqual value: ' comment', scopes: ["source.coffee", "string.quoted.script.coffee", "source.js.embedded.coffee", "comment.line.double-slash.js"] + expect(tokens[3]).toEqual value: '`', scopes: ["source.coffee", "string.quoted.script.coffee", "punctuation.definition.string.end.coffee"] + expect(tokens[5]).toEqual value: 'a', scopes: ["source.coffee", "variable.assignment.coffee"] + + describe "function calls", -> + it "tokenizes function calls", -> + {tokens} = grammar.tokenizeLine('functionCall()') + expect(tokens[0]).toEqual value: 'functionCall', scopes: ['source.coffee', 'meta.function-call.coffee', 'entity.name.function.coffee'] + expect(tokens[1]).toEqual value: '(', scopes: ['source.coffee', 'meta.function-call.coffee', 'meta.arguments.coffee', 'punctuation.definition.arguments.begin.bracket.round.coffee'] + expect(tokens[2]).toEqual value: ')', scopes: ['source.coffee', 'meta.function-call.coffee', 'meta.arguments.coffee', 'punctuation.definition.arguments.end.bracket.round.coffee'] + + {tokens} = grammar.tokenizeLine('functionCall(arg1, "test", {a: 123})') + expect(tokens[0]).toEqual value: 'functionCall', scopes: ['source.coffee', 'meta.function-call.coffee', 'entity.name.function.coffee'] + expect(tokens[1]).toEqual value: '(', scopes: ['source.coffee', 'meta.function-call.coffee', 'meta.arguments.coffee', 'punctuation.definition.arguments.begin.bracket.round.coffee'] + expect(tokens[2]).toEqual value: 'arg1', scopes: ['source.coffee', 'meta.function-call.coffee', 'meta.arguments.coffee'] + expect(tokens[3]).toEqual value: ',', scopes: ['source.coffee', 'meta.function-call.coffee', 'meta.arguments.coffee', 'punctuation.separator.delimiter.coffee'] + expect(tokens[5]).toEqual value: '"', scopes: ['source.coffee', 'meta.function-call.coffee', 'meta.arguments.coffee', 'string.quoted.double.coffee', 'punctuation.definition.string.begin.coffee'] + expect(tokens[6]).toEqual value: 'test', scopes: ['source.coffee', 'meta.function-call.coffee', 'meta.arguments.coffee', 'string.quoted.double.coffee'] + expect(tokens[7]).toEqual value: '"', scopes: ['source.coffee', 'meta.function-call.coffee', 'meta.arguments.coffee', 'string.quoted.double.coffee', 'punctuation.definition.string.end.coffee'] + expect(tokens[8]).toEqual value: ',', scopes: ['source.coffee', 'meta.function-call.coffee', 'meta.arguments.coffee', 'punctuation.separator.delimiter.coffee'] + expect(tokens[10]).toEqual value: '{', scopes: ['source.coffee', 'meta.function-call.coffee', 'meta.arguments.coffee', 'meta.brace.curly.coffee'] + expect(tokens[11]).toEqual value: 'a', scopes: ['source.coffee', 'meta.function-call.coffee', 'meta.arguments.coffee', 'variable.assignment.coffee'] + expect(tokens[12]).toEqual value: ':', scopes: ['source.coffee', 'meta.function-call.coffee', 'meta.arguments.coffee', 'keyword.operator.assignment.coffee'] + expect(tokens[14]).toEqual value: '123', scopes: ['source.coffee', 'meta.function-call.coffee', 'meta.arguments.coffee', 'constant.numeric.decimal.coffee'] + expect(tokens[15]).toEqual value: '}', scopes: ['source.coffee', 'meta.function-call.coffee', 'meta.arguments.coffee', 'meta.brace.curly.coffee'] + expect(tokens[16]).toEqual value: ')', scopes: ['source.coffee', 'meta.function-call.coffee', 'meta.arguments.coffee', 'punctuation.definition.arguments.end.bracket.round.coffee'] + + {tokens} = grammar.tokenizeLine('functionCall((123).toString())') + expect(tokens[1]).toEqual value: '(', scopes: ['source.coffee', 'meta.function-call.coffee', 'meta.arguments.coffee', 'punctuation.definition.arguments.begin.bracket.round.coffee'] + expect(tokens[2]).toEqual value: '(', scopes: ['source.coffee', 'meta.function-call.coffee', 'meta.arguments.coffee', 'meta.brace.round.coffee'] + expect(tokens[3]).toEqual value: '123', scopes: ['source.coffee', 'meta.function-call.coffee', 'meta.arguments.coffee', 'constant.numeric.decimal.coffee'] + expect(tokens[4]).toEqual value: ')', scopes: ['source.coffee', 'meta.function-call.coffee', 'meta.arguments.coffee', 'meta.brace.round.coffee'] + expect(tokens[9]).toEqual value: ')', scopes: ['source.coffee', 'meta.function-call.coffee', 'meta.arguments.coffee', 'punctuation.definition.arguments.end.bracket.round.coffee'] + + {tokens} = grammar.tokenizeLine('$abc$()') + expect(tokens[0]).toEqual value: '$abc$', scopes: ['source.coffee', 'meta.function-call.coffee', 'entity.name.function.coffee'] + + {tokens} = grammar.tokenizeLine('$$()') + expect(tokens[0]).toEqual value: '$$', scopes: ['source.coffee', 'meta.function-call.coffee', 'entity.name.function.coffee'] + + {tokens} = grammar.tokenizeLine('ABC()') + expect(tokens[0]).toEqual value: 'ABC', scopes: ['source.coffee', 'meta.function-call.coffee', 'entity.name.function.coffee'] + + {tokens} = grammar.tokenizeLine('$ABC$()') + expect(tokens[0]).toEqual value: '$ABC$', scopes: ['source.coffee', 'meta.function-call.coffee', 'entity.name.function.coffee'] + + {tokens} = grammar.tokenizeLine('@$()') + expect(tokens[0]).toEqual value: '@', scopes: ['source.coffee', 'meta.function-call.coffee', 'variable.other.readwrite.instance.coffee'] + expect(tokens[1]).toEqual value: '$', scopes: ['source.coffee', 'meta.function-call.coffee', 'entity.name.function.coffee'] + + {tokens} = grammar.tokenizeLine('functionCall arg1, "test", {a: 123}') + expect(tokens[0]).toEqual value: 'functionCall', scopes: ['source.coffee', 'meta.function-call.coffee', 'entity.name.function.coffee'] + expect(tokens[2]).toEqual value: 'arg1', scopes: ['source.coffee', 'meta.function-call.coffee', 'meta.arguments.coffee'] + expect(tokens[3]).toEqual value: ',', scopes: ['source.coffee', 'meta.function-call.coffee', 'meta.arguments.coffee', 'punctuation.separator.delimiter.coffee'] + expect(tokens[5]).toEqual value: '"', scopes: ['source.coffee', 'meta.function-call.coffee', 'meta.arguments.coffee', 'string.quoted.double.coffee', 'punctuation.definition.string.begin.coffee'] + expect(tokens[6]).toEqual value: 'test', scopes: ['source.coffee', 'meta.function-call.coffee', 'meta.arguments.coffee', 'string.quoted.double.coffee'] + expect(tokens[7]).toEqual value: '"', scopes: ['source.coffee', 'meta.function-call.coffee', 'meta.arguments.coffee', 'string.quoted.double.coffee', 'punctuation.definition.string.end.coffee'] + expect(tokens[8]).toEqual value: ',', scopes: ['source.coffee', 'meta.function-call.coffee', 'meta.arguments.coffee', 'punctuation.separator.delimiter.coffee'] + expect(tokens[10]).toEqual value: '{', scopes: ['source.coffee', 'meta.function-call.coffee', 'meta.arguments.coffee', 'meta.brace.curly.coffee'] + expect(tokens[11]).toEqual value: 'a', scopes: ['source.coffee', 'meta.function-call.coffee', 'meta.arguments.coffee', 'variable.assignment.coffee'] + expect(tokens[12]).toEqual value: ':', scopes: ['source.coffee', 'meta.function-call.coffee', 'meta.arguments.coffee', 'keyword.operator.assignment.coffee'] + expect(tokens[14]).toEqual value: '123', scopes: ['source.coffee', 'meta.function-call.coffee', 'meta.arguments.coffee', 'constant.numeric.decimal.coffee'] + expect(tokens[15]).toEqual value: '}', scopes: ['source.coffee', 'meta.function-call.coffee', 'meta.arguments.coffee', 'meta.brace.curly.coffee'] + + {tokens} = grammar.tokenizeLine("foo bar") + expect(tokens[0]).toEqual value: "foo", scopes: ["source.coffee", "meta.function-call.coffee", "entity.name.function.coffee"] + expect(tokens[1]).toEqual value: " ", scopes: ["source.coffee", "meta.function-call.coffee"] + expect(tokens[2]).toEqual value: "bar", scopes: ["source.coffee", "meta.function-call.coffee", "meta.arguments.coffee"] + + {tokens} = grammar.tokenizeLine("foo not food") + expect(tokens[0]).toEqual value: "foo", scopes: ["source.coffee", "meta.function-call.coffee", "entity.name.function.coffee"] + expect(tokens[1]).toEqual value: " ", scopes: ["source.coffee", "meta.function-call.coffee"] + expect(tokens[2]).toEqual value: "not", scopes: ["source.coffee", "meta.function-call.coffee", "meta.arguments.coffee", "keyword.operator.logical.coffee"] + expect(tokens[3]).toEqual value: " food", scopes: ["source.coffee", "meta.function-call.coffee", "meta.arguments.coffee"] + + {tokens} = grammar.tokenizeLine("eat food for food in foods") + expect(tokens[0]).toEqual value: "eat", scopes: ["source.coffee", "meta.function-call.coffee", "entity.name.function.coffee"] + expect(tokens[2]).toEqual value: "food", scopes: ["source.coffee", "meta.function-call.coffee", "meta.arguments.coffee"] + expect(tokens[4]).toEqual value: "for", scopes: ["source.coffee", "keyword.control.coffee"] + expect(tokens[5]).toEqual value: " food ", scopes: ["source.coffee"] + expect(tokens[6]).toEqual value: "in", scopes: ["source.coffee", "keyword.control.coffee"] + expect(tokens[7]).toEqual value: " foods", scopes: ["source.coffee"] + + {tokens} = grammar.tokenizeLine("(eat food for food in get foods)") + expect(tokens[0]).toEqual value: "(", scopes: ["source.coffee", "meta.brace.round.coffee"] + expect(tokens[1]).toEqual value: "eat", scopes: ["source.coffee", "meta.function-call.coffee", "entity.name.function.coffee"] + expect(tokens[3]).toEqual value: "food", scopes: ["source.coffee", "meta.function-call.coffee", "meta.arguments.coffee"] + expect(tokens[5]).toEqual value: "for", scopes: ["source.coffee", "keyword.control.coffee"] + expect(tokens[6]).toEqual value: " food ", scopes: ["source.coffee"] + expect(tokens[7]).toEqual value: "in", scopes: ["source.coffee", "keyword.control.coffee"] + expect(tokens[9]).toEqual value: "get", scopes: ["source.coffee", "meta.function-call.coffee", "entity.name.function.coffee"] + expect(tokens[11]).toEqual value: "foods", scopes: ["source.coffee", "meta.function-call.coffee", "meta.arguments.coffee"] + expect(tokens[12]).toEqual value: ")", scopes: ["source.coffee", "meta.brace.round.coffee"] + + {tokens} = grammar.tokenizeLine("[eat food]") + expect(tokens[0]).toEqual value: "[", scopes: ["source.coffee", "punctuation.definition.array.begin.bracket.square.coffee"] + expect(tokens[1]).toEqual value: "eat", scopes: ["source.coffee", "meta.function-call.coffee", "entity.name.function.coffee"] + expect(tokens[3]).toEqual value: "food", scopes: ["source.coffee", "meta.function-call.coffee", "meta.arguments.coffee"] + expect(tokens[4]).toEqual value: "]", scopes: ["source.coffee", "punctuation.definition.array.end.bracket.square.coffee"] + + {tokens} = grammar.tokenizeLine("foo @bar") + expect(tokens[0]).toEqual value: "foo", scopes: ["source.coffee", "meta.function-call.coffee", "entity.name.function.coffee"] + expect(tokens[2]).toEqual value: "@bar", scopes: ["source.coffee", "meta.function-call.coffee", "meta.arguments.coffee", "variable.other.readwrite.instance.coffee"] + + {tokens} = grammar.tokenizeLine("@foo bar") + expect(tokens[0]).toEqual value: "@", scopes: ["source.coffee", "meta.function-call.coffee", "variable.other.readwrite.instance.coffee"] + expect(tokens[1]).toEqual value: "foo", scopes: ["source.coffee", "meta.function-call.coffee", "entity.name.function.coffee"] + expect(tokens[3]).toEqual value: "bar", scopes: ["source.coffee", "meta.function-call.coffee", "meta.arguments.coffee"] + + {tokens} = grammar.tokenizeLine("foo baz, @bar") + expect(tokens[0]).toEqual value: "foo", scopes: ["source.coffee", "meta.function-call.coffee", "entity.name.function.coffee"] + expect(tokens[2]).toEqual value: "baz", scopes: ["source.coffee", "meta.function-call.coffee", "meta.arguments.coffee"] + expect(tokens[3]).toEqual value: ",", scopes: ["source.coffee", "meta.function-call.coffee", "meta.arguments.coffee", "punctuation.separator.delimiter.coffee"] + expect(tokens[5]).toEqual value: "@bar", scopes: ["source.coffee", "meta.function-call.coffee", "meta.arguments.coffee", "variable.other.readwrite.instance.coffee"] + + {tokens} = grammar.tokenizeLine("$ @$") + expect(tokens[0]).toEqual value: "$", scopes: ["source.coffee", "meta.function-call.coffee", "entity.name.function.coffee"] + expect(tokens[2]).toEqual value: "@$", scopes: ["source.coffee", "meta.function-call.coffee", "meta.arguments.coffee", "variable.other.readwrite.instance.coffee"] + + it "tokenizes function calls when they are arguments", -> + {tokens} = grammar.tokenizeLine('a(b(c))') + expect(tokens[0]).toEqual value: 'a', scopes: ['source.coffee', 'meta.function-call.coffee', 'entity.name.function.coffee'] + expect(tokens[1]).toEqual value: '(', scopes: ['source.coffee', 'meta.function-call.coffee', 'meta.arguments.coffee', 'punctuation.definition.arguments.begin.bracket.round.coffee'] + expect(tokens[2]).toEqual value: 'b', scopes: ['source.coffee', 'meta.function-call.coffee', 'meta.arguments.coffee', 'meta.function-call.coffee', 'entity.name.function.coffee'] + expect(tokens[3]).toEqual value: '(', scopes: ['source.coffee', 'meta.function-call.coffee', 'meta.arguments.coffee', 'meta.function-call.coffee', 'meta.arguments.coffee', 'punctuation.definition.arguments.begin.bracket.round.coffee'] + expect(tokens[4]).toEqual value: 'c', scopes: ['source.coffee', 'meta.function-call.coffee', 'meta.arguments.coffee', 'meta.function-call.coffee', 'meta.arguments.coffee'] + expect(tokens[5]).toEqual value: ')', scopes: ['source.coffee', 'meta.function-call.coffee', 'meta.arguments.coffee', 'meta.function-call.coffee', 'meta.arguments.coffee', 'punctuation.definition.arguments.end.bracket.round.coffee'] + expect(tokens[6]).toEqual value: ')', scopes: ['source.coffee', 'meta.function-call.coffee', 'meta.arguments.coffee', 'punctuation.definition.arguments.end.bracket.round.coffee'] + + {tokens} = grammar.tokenizeLine('a b c') + expect(tokens[0]).toEqual value: 'a', scopes: ['source.coffee', 'meta.function-call.coffee', 'entity.name.function.coffee'] + expect(tokens[2]).toEqual value: 'b', scopes: ['source.coffee', 'meta.function-call.coffee', 'meta.arguments.coffee', 'meta.function-call.coffee', 'entity.name.function.coffee'] + expect(tokens[4]).toEqual value: 'c', scopes: ['source.coffee', 'meta.function-call.coffee', 'meta.arguments.coffee', 'meta.function-call.coffee', 'meta.arguments.coffee'] + + it "tokenizes illegal function calls", -> + {tokens} = grammar.tokenizeLine('0illegal()') + expect(tokens[0]).toEqual value: '0illegal', scopes: ['source.coffee', 'meta.function-call.coffee', 'invalid.illegal.identifier.coffee'] + expect(tokens[1]).toEqual value: '(', scopes: ['source.coffee', 'meta.function-call.coffee', 'meta.arguments.coffee', 'punctuation.definition.arguments.begin.bracket.round.coffee'] + expect(tokens[2]).toEqual value: ')', scopes: ['source.coffee', 'meta.function-call.coffee', 'meta.arguments.coffee', 'punctuation.definition.arguments.end.bracket.round.coffee'] + + it "tokenizes illegal arguments", -> + {tokens} = grammar.tokenizeLine('a(1a)') + expect(tokens[0]).toEqual value: 'a', scopes: ['source.coffee', 'meta.function-call.coffee', 'entity.name.function.coffee'] + expect(tokens[1]).toEqual value: '(', scopes: ['source.coffee', 'meta.function-call.coffee', 'meta.arguments.coffee', 'punctuation.definition.arguments.begin.bracket.round.coffee'] + expect(tokens[2]).toEqual value: '1a', scopes: ['source.coffee', 'meta.function-call.coffee', 'meta.arguments.coffee', 'invalid.illegal.identifier.coffee'] + expect(tokens[3]).toEqual value: ')', scopes: ['source.coffee', 'meta.function-call.coffee', 'meta.arguments.coffee', 'punctuation.definition.arguments.end.bracket.round.coffee'] + + {tokens} = grammar.tokenizeLine('a(123a)') + expect(tokens[2]).toEqual value: '123a', scopes: ['source.coffee', 'meta.function-call.coffee', 'meta.arguments.coffee', 'invalid.illegal.identifier.coffee'] + + {tokens} = grammar.tokenizeLine('a(1.prop)') + expect(tokens[2]).toEqual value: '1', scopes: ['source.coffee', 'meta.function-call.coffee', 'meta.arguments.coffee', 'invalid.illegal.identifier.coffee'] + expect(tokens[3]).toEqual value: '.', scopes: ['source.coffee', 'meta.function-call.coffee', 'meta.arguments.coffee', 'punctuation.separator.property.period.coffee'] + expect(tokens[4]).toEqual value: 'prop', scopes: ['source.coffee', 'meta.function-call.coffee', 'meta.arguments.coffee', 'variable.other.property.coffee'] + + {tokens} = grammar.tokenizeLine('a 1a') + expect(tokens[0]).toEqual value: 'a', scopes: ['source.coffee', 'meta.function-call.coffee', 'entity.name.function.coffee'] + expect(tokens[2]).toEqual value: '1a', scopes: ['source.coffee', 'meta.function-call.coffee', 'meta.arguments.coffee', 'invalid.illegal.identifier.coffee'] + + it "tokenizes function declaration as an argument", -> + {tokens} = grammar.tokenizeLine('a((p) -> return p )') + expect(tokens[0]).toEqual value: 'a', scopes: ['source.coffee', 'meta.function-call.coffee', 'entity.name.function.coffee'] + expect(tokens[1]).toEqual value: '(', scopes: ['source.coffee', 'meta.function-call.coffee', 'meta.arguments.coffee', 'punctuation.definition.arguments.begin.bracket.round.coffee'] + expect(tokens[2]).toEqual value: '(', scopes: ['source.coffee', 'meta.function-call.coffee', 'meta.arguments.coffee', 'meta.function.inline.coffee', 'meta.parameters.coffee', 'punctuation.definition.parameters.begin.bracket.round.coffee'] + expect(tokens[3]).toEqual value: 'p', scopes: ['source.coffee', 'meta.function-call.coffee', 'meta.arguments.coffee', 'meta.function.inline.coffee', 'meta.parameters.coffee', 'variable.parameter.function.coffee'] + expect(tokens[4]).toEqual value: ')', scopes: ['source.coffee', 'meta.function-call.coffee', 'meta.arguments.coffee', 'meta.function.inline.coffee', 'meta.parameters.coffee', 'punctuation.definition.parameters.end.bracket.round.coffee'] + expect(tokens[8]).toEqual value: 'return', scopes: ['source.coffee', 'meta.function-call.coffee', 'meta.arguments.coffee', 'keyword.control.coffee'] + expect(tokens[9]).toEqual value: ' p ', scopes: ['source.coffee', 'meta.function-call.coffee', 'meta.arguments.coffee'] + expect(tokens[10]).toEqual value: ')', scopes: ['source.coffee', 'meta.function-call.coffee', 'meta.arguments.coffee', 'punctuation.definition.arguments.end.bracket.round.coffee'] + + it "does not tokenize booleans as function calls", -> + {tokens} = grammar.tokenizeLine("false unless true") + expect(tokens[0]).toEqual value: "false", scopes: ["source.coffee", "constant.language.boolean.false.coffee"] + expect(tokens[2]).toEqual value: "unless", scopes: ["source.coffee", "keyword.control.coffee"] + expect(tokens[4]).toEqual value: "true", scopes: ["source.coffee", "constant.language.boolean.true.coffee"] + + {tokens} = grammar.tokenizeLine("true if false") + expect(tokens[0]).toEqual value: "true", scopes: ["source.coffee", "constant.language.boolean.true.coffee"] + expect(tokens[2]).toEqual value: "if", scopes: ["source.coffee", "keyword.control.coffee"] + expect(tokens[4]).toEqual value: "false", scopes: ["source.coffee", "constant.language.boolean.false.coffee"] + + it "does not tokenize comparison operators as function calls", -> + {tokens} = grammar.tokenizeLine("if a is b") + expect(tokens[1]).toEqual value: " a ", scopes: ["source.coffee"] + expect(tokens[2]).toEqual value: "is", scopes: ["source.coffee", "keyword.operator.comparison.coffee"] + expect(tokens[3]).toEqual value: " b", scopes: ["source.coffee"] + + describe "functions", -> + it "tokenizes regular functions", -> + {tokens} = grammar.tokenizeLine("foo = -> 1") + expect(tokens[0]).toEqual value: "foo", scopes: ["source.coffee", "meta.function.coffee", "entity.name.function.coffee"] + expect(tokens[1]).toEqual value: " ", scopes: ["source.coffee", "meta.function.coffee"] + expect(tokens[2]).toEqual value: "=", scopes: ["source.coffee", "meta.function.coffee", "keyword.operator.assignment.coffee"] + expect(tokens[3]).toEqual value: " ", scopes: ["source.coffee", "meta.function.coffee"] + expect(tokens[4]).toEqual value: "->", scopes: ["source.coffee", "meta.function.coffee", "storage.type.function.coffee"] + expect(tokens[5]).toEqual value: " ", scopes: ["source.coffee"] + expect(tokens[6]).toEqual value: "1", scopes: ["source.coffee", "constant.numeric.decimal.coffee"] + + {tokens} = grammar.tokenizeLine("@foo = -> 1") + expect(tokens[0]).toEqual value: "@", scopes: ["source.coffee", "meta.function.coffee", "entity.name.function.coffee", "variable.other.readwrite.instance.coffee"] + expect(tokens[1]).toEqual value: "foo", scopes: ["source.coffee", "meta.function.coffee", "entity.name.function.coffee"] + expect(tokens[3]).toEqual value: "=", scopes: ["source.coffee", "meta.function.coffee", "keyword.operator.assignment.coffee"] + + {tokens} = grammar.tokenizeLine("$ = => 1") + expect(tokens[0]).toEqual value: "$", scopes: ["source.coffee", "meta.function.coffee", "entity.name.function.coffee"] + expect(tokens[2]).toEqual value: "=", scopes: ["source.coffee", "meta.function.coffee", "keyword.operator.assignment.coffee"] + + {tokens} = grammar.tokenizeLine("foo: -> 1") + expect(tokens[0]).toEqual value: "foo", scopes: ["source.coffee", "meta.function.coffee", "entity.name.function.coffee"] + expect(tokens[1]).toEqual value: ":", scopes: ["source.coffee", "meta.function.coffee", "keyword.operator.assignment.coffee"] + expect(tokens[2]).toEqual value: " ", scopes: ["source.coffee", "meta.function.coffee"] + expect(tokens[3]).toEqual value: "->", scopes: ["source.coffee", "meta.function.coffee", "storage.type.function.coffee"] + expect(tokens[4]).toEqual value: " ", scopes: ["source.coffee"] + expect(tokens[5]).toEqual value: "1", scopes: ["source.coffee", "constant.numeric.decimal.coffee"] + + {tokens} = grammar.tokenizeLine("'quoted': (a) => true") + expect(tokens[0]).toEqual value: "'", scopes: ["source.coffee", "meta.function.coffee", "string.quoted.single.coffee", "punctuation.definition.string.begin.coffee"] + expect(tokens[1]).toEqual value: "quoted", scopes: ["source.coffee", "meta.function.coffee", "string.quoted.single.coffee", "entity.name.function.coffee"] + expect(tokens[2]).toEqual value: "'", scopes: ["source.coffee", "meta.function.coffee", "string.quoted.single.coffee", "punctuation.definition.string.end.coffee"] + expect(tokens[3]).toEqual value: ":", scopes: ["source.coffee", "meta.function.coffee", "keyword.operator.assignment.coffee"] + expect(tokens[4]).toEqual value: " ", scopes: ["source.coffee", "meta.function.coffee"] + expect(tokens[5]).toEqual value: "(", scopes: ["source.coffee", "meta.function.coffee", "meta.parameters.coffee", "punctuation.definition.parameters.begin.bracket.round.coffee"] + expect(tokens[6]).toEqual value: "a", scopes: ["source.coffee", "meta.function.coffee", "meta.parameters.coffee", "variable.parameter.function.coffee"] + expect(tokens[7]).toEqual value: ")", scopes: ["source.coffee", "meta.function.coffee", "meta.parameters.coffee", "punctuation.definition.parameters.end.bracket.round.coffee"] + expect(tokens[8]).toEqual value: " ", scopes: ["source.coffee", "meta.function.coffee"] + expect(tokens[9]).toEqual value: "=>", scopes: ["source.coffee", "meta.function.coffee", "storage.type.function.coffee"] + expect(tokens[10]).toEqual value: " ", scopes: ["source.coffee"] + expect(tokens[11]).toEqual value: "true", scopes: ["source.coffee", "constant.language.boolean.true.coffee"] + + {tokens} = grammar.tokenizeLine('"quoted": (a) -> true') + expect(tokens[0]).toEqual value: '"', scopes: ["source.coffee", "meta.function.coffee", "string.quoted.double.coffee", "punctuation.definition.string.begin.coffee"] + expect(tokens[1]).toEqual value: "quoted", scopes: ["source.coffee", "meta.function.coffee", "string.quoted.double.coffee", "entity.name.function.coffee"] + expect(tokens[2]).toEqual value: '"', scopes: ["source.coffee", "meta.function.coffee", "string.quoted.double.coffee", "punctuation.definition.string.end.coffee"] + expect(tokens[3]).toEqual value: ":", scopes: ["source.coffee", "meta.function.coffee", "keyword.operator.assignment.coffee"] + expect(tokens[4]).toEqual value: " ", scopes: ["source.coffee", "meta.function.coffee"] + expect(tokens[5]).toEqual value: "(", scopes: ["source.coffee", "meta.function.coffee", "meta.parameters.coffee", "punctuation.definition.parameters.begin.bracket.round.coffee"] + expect(tokens[6]).toEqual value: "a", scopes: ["source.coffee", "meta.function.coffee", "meta.parameters.coffee", "variable.parameter.function.coffee"] + expect(tokens[7]).toEqual value: ")", scopes: ["source.coffee", "meta.function.coffee", "meta.parameters.coffee", "punctuation.definition.parameters.end.bracket.round.coffee"] + expect(tokens[8]).toEqual value: " ", scopes: ["source.coffee", "meta.function.coffee"] + expect(tokens[9]).toEqual value: "->", scopes: ["source.coffee", "meta.function.coffee", "storage.type.function.coffee"] + expect(tokens[10]).toEqual value: " ", scopes: ["source.coffee"] + expect(tokens[11]).toEqual value: "true", scopes: ["source.coffee", "constant.language.boolean.true.coffee"] + + {tokens} = grammar.tokenizeLine("hello: (a) -> 1") + expect(tokens[0]).toEqual value: "hello", scopes: ["source.coffee", "meta.function.coffee", "entity.name.function.coffee"] + expect(tokens[1]).toEqual value: ":", scopes: ["source.coffee", "meta.function.coffee", "keyword.operator.assignment.coffee"] + expect(tokens[2]).toEqual value: " ", scopes: ["source.coffee", "meta.function.coffee"] + expect(tokens[3]).toEqual value: "(", scopes: ["source.coffee", "meta.function.coffee", "meta.parameters.coffee", "punctuation.definition.parameters.begin.bracket.round.coffee"] + expect(tokens[4]).toEqual value: "a", scopes: ["source.coffee", "meta.function.coffee", "meta.parameters.coffee", "variable.parameter.function.coffee"] + expect(tokens[5]).toEqual value: ")", scopes: ["source.coffee", "meta.function.coffee", "meta.parameters.coffee", "punctuation.definition.parameters.end.bracket.round.coffee"] + expect(tokens[6]).toEqual value: " ", scopes: ["source.coffee", "meta.function.coffee"] + expect(tokens[7]).toEqual value: "->", scopes: ["source.coffee", "meta.function.coffee", "storage.type.function.coffee"] + expect(tokens[9]).toEqual value: "1", scopes: ["source.coffee", "constant.numeric.decimal.coffee"] + + {tokens} = grammar.tokenizeLine("hello: (a, b, {c, d}, e = 'test', f = 3, g = -> 4) -> 1") + expect(tokens[0]).toEqual value: "hello", scopes: ["source.coffee", "meta.function.coffee", "entity.name.function.coffee"] + expect(tokens[1]).toEqual value: ":", scopes: ["source.coffee", "meta.function.coffee", "keyword.operator.assignment.coffee"] + expect(tokens[3]).toEqual value: "(", scopes: ["source.coffee", "meta.function.coffee", "meta.parameters.coffee", "punctuation.definition.parameters.begin.bracket.round.coffee"] + expect(tokens[4]).toEqual value: "a", scopes: ["source.coffee", "meta.function.coffee", "meta.parameters.coffee", "variable.parameter.function.coffee"] + expect(tokens[5]).toEqual value: ",", scopes: ["source.coffee", "meta.function.coffee", "meta.parameters.coffee", "punctuation.separator.delimiter.coffee"] + expect(tokens[6]).toEqual value: " ", scopes: ["source.coffee", "meta.function.coffee", "meta.parameters.coffee"] + expect(tokens[7]).toEqual value: "b", scopes: ["source.coffee", "meta.function.coffee", "meta.parameters.coffee", "variable.parameter.function.coffee"] + expect(tokens[8]).toEqual value: ",", scopes: ["source.coffee", "meta.function.coffee", "meta.parameters.coffee", "punctuation.separator.delimiter.coffee"] + expect(tokens[10]).toEqual value: "{", scopes: ["source.coffee", "meta.function.coffee", "meta.parameters.coffee", "meta.brace.curly.coffee"] + expect(tokens[11]).toEqual value: "c", scopes: ["source.coffee", "meta.function.coffee", "meta.parameters.coffee"] + expect(tokens[12]).toEqual value: ",", scopes: ["source.coffee", "meta.function.coffee", "meta.parameters.coffee", "punctuation.separator.delimiter.coffee"] + expect(tokens[13]).toEqual value: " d", scopes: ["source.coffee", "meta.function.coffee", "meta.parameters.coffee"] + expect(tokens[14]).toEqual value: "}", scopes: ["source.coffee", "meta.function.coffee", "meta.parameters.coffee", "meta.brace.curly.coffee"] + expect(tokens[17]).toEqual value: "e", scopes: ["source.coffee", "meta.function.coffee", "meta.parameters.coffee", "variable.parameter.function.coffee"] + expect(tokens[19]).toEqual value: "=", scopes: ["source.coffee", "meta.function.coffee", "meta.parameters.coffee", "keyword.operator.assignment.coffee"] + expect(tokens[21]).toEqual value: "'", scopes: ["source.coffee", "meta.function.coffee", "meta.parameters.coffee", "string.quoted.single.coffee", "punctuation.definition.string.begin.coffee"] + expect(tokens[24]).toEqual value: ",", scopes: ["source.coffee", "meta.function.coffee", "meta.parameters.coffee", "punctuation.separator.delimiter.coffee"] + expect(tokens[26]).toEqual value: "f", scopes: ["source.coffee", "meta.function.coffee", "meta.parameters.coffee", "variable.parameter.function.coffee"] + expect(tokens[30]).toEqual value: "3", scopes: ["source.coffee", "meta.function.coffee", "meta.parameters.coffee", "constant.numeric.decimal.coffee"] + expect(tokens[33]).toEqual value: "g", scopes: ["source.coffee", "meta.function.coffee", "meta.parameters.coffee", "variable.parameter.function.coffee"] + expect(tokens[35]).toEqual value: "=", scopes: ["source.coffee", "meta.function.coffee", "meta.parameters.coffee", "keyword.operator.assignment.coffee"] + expect(tokens[37]).toEqual value: "->", scopes: ["source.coffee", "meta.function.coffee", "meta.parameters.coffee", "meta.function.inline.coffee", "storage.type.function.coffee"] + expect(tokens[40]).toEqual value: ")", scopes: ["source.coffee", "meta.function.coffee", "meta.parameters.coffee", "punctuation.definition.parameters.end.bracket.round.coffee"] + expect(tokens[42]).toEqual value: "->", scopes: ["source.coffee", "meta.function.coffee", "storage.type.function.coffee"] + + it "tokenizes inline functions", -> + {tokens} = grammar.tokenizeLine("-> true") + expect(tokens[0]).toEqual value: "->", scopes: ["source.coffee", "meta.function.inline.coffee", "storage.type.function.coffee"] + expect(tokens[1]).toEqual value: " ", scopes: ["source.coffee"] + + {tokens} = grammar.tokenizeLine(" -> true") + expect(tokens[0]).toEqual value: " ", scopes: ["source.coffee"] + expect(tokens[1]).toEqual value: "->", scopes: ["source.coffee", "meta.function.inline.coffee", "storage.type.function.coffee"] + expect(tokens[2]).toEqual value: " ", scopes: ["source.coffee"] + + {tokens} = grammar.tokenizeLine("->true") + expect(tokens[0]).toEqual value: "->", scopes: ["source.coffee", "meta.function.inline.coffee", "storage.type.function.coffee"] + + {tokens} = grammar.tokenizeLine("(arg) -> true") + expect(tokens[0]).toEqual value: "(", scopes: ["source.coffee", "meta.function.inline.coffee", "meta.parameters.coffee", "punctuation.definition.parameters.begin.bracket.round.coffee"] + expect(tokens[1]).toEqual value: "arg", scopes: ["source.coffee", "meta.function.inline.coffee", "meta.parameters.coffee", "variable.parameter.function.coffee"] + expect(tokens[2]).toEqual value: ")", scopes: ["source.coffee", "meta.function.inline.coffee", "meta.parameters.coffee", "punctuation.definition.parameters.end.bracket.round.coffee"] + expect(tokens[3]).toEqual value: " ", scopes: ["source.coffee", "meta.function.inline.coffee"] + expect(tokens[4]).toEqual value: "->", scopes: ["source.coffee", "meta.function.inline.coffee", "storage.type.function.coffee"] + expect(tokens[5]).toEqual value: " ", scopes: ["source.coffee"] + + {tokens} = grammar.tokenizeLine("(arg1, arg2) -> true") + expect(tokens[0]).toEqual value: "(", scopes: ["source.coffee", "meta.function.inline.coffee", "meta.parameters.coffee", "punctuation.definition.parameters.begin.bracket.round.coffee"] + expect(tokens[1]).toEqual value: "arg1", scopes: ["source.coffee", "meta.function.inline.coffee", "meta.parameters.coffee", "variable.parameter.function.coffee"] + expect(tokens[2]).toEqual value: ",", scopes: ["source.coffee", "meta.function.inline.coffee", "meta.parameters.coffee", "punctuation.separator.delimiter.coffee"] + expect(tokens[3]).toEqual value: " ", scopes: ["source.coffee", "meta.function.inline.coffee", "meta.parameters.coffee"] + expect(tokens[4]).toEqual value: "arg2", scopes: ["source.coffee", "meta.function.inline.coffee", "meta.parameters.coffee", "variable.parameter.function.coffee"] + expect(tokens[5]).toEqual value: ")", scopes: ["source.coffee", "meta.function.inline.coffee", "meta.parameters.coffee", "punctuation.definition.parameters.end.bracket.round.coffee"] + expect(tokens[6]).toEqual value: " ", scopes: ["source.coffee", "meta.function.inline.coffee"] + expect(tokens[7]).toEqual value: "->", scopes: ["source.coffee", "meta.function.inline.coffee", "storage.type.function.coffee"] + expect(tokens[8]).toEqual value: " ", scopes: ["source.coffee"] + + {tokens} = grammar.tokenizeLine("( arg1, arg2 )-> true") + expect(tokens[0]).toEqual value: "(", scopes: ["source.coffee", "meta.function.inline.coffee", "meta.parameters.coffee", "punctuation.definition.parameters.begin.bracket.round.coffee"] + expect(tokens[1]).toEqual value: " ", scopes: ["source.coffee", "meta.function.inline.coffee", "meta.parameters.coffee"] + expect(tokens[2]).toEqual value: "arg1", scopes: ["source.coffee", "meta.function.inline.coffee", "meta.parameters.coffee", "variable.parameter.function.coffee"] + expect(tokens[3]).toEqual value: ",", scopes: ["source.coffee", "meta.function.inline.coffee", "meta.parameters.coffee", "punctuation.separator.delimiter.coffee"] + expect(tokens[4]).toEqual value: " ", scopes: ["source.coffee", "meta.function.inline.coffee", "meta.parameters.coffee"] + expect(tokens[5]).toEqual value: "arg2", scopes: ["source.coffee", "meta.function.inline.coffee", "meta.parameters.coffee", "variable.parameter.function.coffee"] + expect(tokens[6]).toEqual value: " ", scopes: ["source.coffee", "meta.function.inline.coffee", "meta.parameters.coffee"] + expect(tokens[7]).toEqual value: ")", scopes: ["source.coffee", "meta.function.inline.coffee", "meta.parameters.coffee", "punctuation.definition.parameters.end.bracket.round.coffee"] + expect(tokens[8]).toEqual value: "->", scopes: ["source.coffee", "meta.function.inline.coffee", "storage.type.function.coffee"] + expect(tokens[9]).toEqual value: " ", scopes: ["source.coffee"] + + describe "method calls", -> + it "tokenizes method calls", -> + {tokens} = grammar.tokenizeLine('a.b(1+1)') + expect(tokens[0]).toEqual value: 'a', scopes: ['source.coffee', 'variable.other.object.coffee'] + expect(tokens[1]).toEqual value: '.', scopes: ['source.coffee', 'meta.method-call.coffee', 'punctuation.separator.method.period.coffee'] + expect(tokens[2]).toEqual value: 'b', scopes: ['source.coffee', 'meta.method-call.coffee', 'entity.name.function.coffee'] + expect(tokens[3]).toEqual value: '(', scopes: ['source.coffee', 'meta.method-call.coffee', 'meta.arguments.coffee', 'punctuation.definition.arguments.begin.bracket.round.coffee'] + expect(tokens[4]).toEqual value: '1', scopes: ['source.coffee', 'meta.method-call.coffee', 'meta.arguments.coffee', 'constant.numeric.decimal.coffee'] + expect(tokens[5]).toEqual value: '+', scopes: ['source.coffee', 'meta.method-call.coffee', 'meta.arguments.coffee', 'keyword.operator.coffee'] + expect(tokens[6]).toEqual value: '1', scopes: ['source.coffee', 'meta.method-call.coffee', 'meta.arguments.coffee', 'constant.numeric.decimal.coffee'] + expect(tokens[7]).toEqual value: ')', scopes: ['source.coffee', 'meta.method-call.coffee', 'meta.arguments.coffee', 'punctuation.definition.arguments.end.bracket.round.coffee'] + + {tokens} = grammar.tokenizeLine('a . b(1+1)') + expect(tokens[2]).toEqual value: '.', scopes: ['source.coffee', 'meta.method-call.coffee', 'punctuation.separator.method.period.coffee'] + expect(tokens[4]).toEqual value: 'b', scopes: ['source.coffee', 'meta.method-call.coffee', 'entity.name.function.coffee'] + expect(tokens[5]).toEqual value: '(', scopes: ['source.coffee', 'meta.method-call.coffee', 'meta.arguments.coffee', 'punctuation.definition.arguments.begin.bracket.round.coffee'] + + {tokens} = grammar.tokenizeLine('a.$abc$()') + expect(tokens[2]).toEqual value: '$abc$', scopes: ['source.coffee', 'meta.method-call.coffee', 'entity.name.function.coffee'] + + {tokens} = grammar.tokenizeLine('a.$$()') + expect(tokens[2]).toEqual value: '$$', scopes: ['source.coffee', 'meta.method-call.coffee', 'entity.name.function.coffee'] + + {tokens} = grammar.tokenizeLine('a.b c') + expect(tokens[0]).toEqual value: 'a', scopes: ['source.coffee', 'variable.other.object.coffee'] + expect(tokens[1]).toEqual value: '.', scopes: ['source.coffee', 'meta.method-call.coffee', 'punctuation.separator.method.period.coffee'] + expect(tokens[2]).toEqual value: 'b', scopes: ['source.coffee', 'meta.method-call.coffee', 'entity.name.function.coffee'] + expect(tokens[3]).toEqual value: ' ', scopes: ['source.coffee', 'meta.method-call.coffee'] + expect(tokens[4]).toEqual value: 'c', scopes: ['source.coffee', 'meta.method-call.coffee', 'meta.arguments.coffee'] + + {tokens} = grammar.tokenizeLine('(a.b c)') + expect(tokens[0]).toEqual value: '(', scopes: ['source.coffee', 'meta.brace.round.coffee'] + expect(tokens[1]).toEqual value: 'a', scopes: ['source.coffee', 'variable.other.object.coffee'] + expect(tokens[2]).toEqual value: '.', scopes: ['source.coffee', 'meta.method-call.coffee', 'punctuation.separator.method.period.coffee'] + expect(tokens[3]).toEqual value: 'b', scopes: ['source.coffee', 'meta.method-call.coffee', 'entity.name.function.coffee'] + expect(tokens[4]).toEqual value: ' ', scopes: ['source.coffee', 'meta.method-call.coffee'] + expect(tokens[5]).toEqual value: 'c', scopes: ['source.coffee', 'meta.method-call.coffee', 'meta.arguments.coffee'] + expect(tokens[6]).toEqual value: ')', scopes: ['source.coffee', 'meta.brace.round.coffee'] + + {tokens} = grammar.tokenizeLine("[a.b c]") + expect(tokens[0]).toEqual value: "[", scopes: ["source.coffee", "punctuation.definition.array.begin.bracket.square.coffee"] + expect(tokens[1]).toEqual value: 'a', scopes: ['source.coffee', 'variable.other.object.coffee'] + expect(tokens[2]).toEqual value: '.', scopes: ['source.coffee', 'meta.method-call.coffee', 'punctuation.separator.method.period.coffee'] + expect(tokens[3]).toEqual value: 'b', scopes: ['source.coffee', 'meta.method-call.coffee', 'entity.name.function.coffee'] + expect(tokens[4]).toEqual value: ' ', scopes: ['source.coffee', 'meta.method-call.coffee'] + expect(tokens[5]).toEqual value: 'c', scopes: ['source.coffee', 'meta.method-call.coffee', 'meta.arguments.coffee'] + expect(tokens[6]).toEqual value: "]", scopes: ["source.coffee", "punctuation.definition.array.end.bracket.square.coffee"] + + {tokens} = grammar.tokenizeLine('a.b not c') + expect(tokens[0]).toEqual value: 'a', scopes: ['source.coffee', 'variable.other.object.coffee'] + expect(tokens[1]).toEqual value: '.', scopes: ['source.coffee', 'meta.method-call.coffee', 'punctuation.separator.method.period.coffee'] + expect(tokens[2]).toEqual value: 'b', scopes: ['source.coffee', 'meta.method-call.coffee', 'entity.name.function.coffee'] + expect(tokens[3]).toEqual value: ' ', scopes: ['source.coffee', 'meta.method-call.coffee'] + expect(tokens[4]).toEqual value: 'not', scopes: ['source.coffee', 'meta.method-call.coffee', 'meta.arguments.coffee', 'keyword.operator.logical.coffee'] + expect(tokens[5]).toEqual value: ' c', scopes: ['source.coffee', 'meta.method-call.coffee', 'meta.arguments.coffee'] + + {tokens} = grammar.tokenizeLine('a.b 1+1') + expect(tokens[0]).toEqual value: 'a', scopes: ['source.coffee', 'variable.other.object.coffee'] + expect(tokens[1]).toEqual value: '.', scopes: ['source.coffee', 'meta.method-call.coffee', 'punctuation.separator.method.period.coffee'] + expect(tokens[2]).toEqual value: 'b', scopes: ['source.coffee', 'meta.method-call.coffee', 'entity.name.function.coffee'] + expect(tokens[3]).toEqual value: ' ', scopes: ['source.coffee', 'meta.method-call.coffee'] + expect(tokens[4]).toEqual value: '1', scopes: ['source.coffee', 'meta.method-call.coffee', 'meta.arguments.coffee', 'constant.numeric.decimal.coffee'] + expect(tokens[5]).toEqual value: '+', scopes: ['source.coffee', 'meta.method-call.coffee', 'meta.arguments.coffee', 'keyword.operator.coffee'] + expect(tokens[6]).toEqual value: '1', scopes: ['source.coffee', 'meta.method-call.coffee', 'meta.arguments.coffee', 'constant.numeric.decimal.coffee'] + + {tokens} = grammar.tokenizeLine('a.b @') + expect(tokens[2]).toEqual value: 'b', scopes: ['source.coffee', 'meta.method-call.coffee', 'entity.name.function.coffee'] + expect(tokens[4]).toEqual value: '@', scopes: ['source.coffee', 'meta.method-call.coffee', 'meta.arguments.coffee', 'variable.other.readwrite.instance.coffee'] + + {tokens} = grammar.tokenizeLine('a.$abc$ "q"') + expect(tokens[2]).toEqual value: '$abc$', scopes: ['source.coffee', 'meta.method-call.coffee', 'entity.name.function.coffee'] + + {tokens} = grammar.tokenizeLine('a.$$ 4') + expect(tokens[2]).toEqual value: '$$', scopes: ['source.coffee', 'meta.method-call.coffee', 'entity.name.function.coffee'] + + {tokens} = grammar.tokenizeLine('a.b @$') + expect(tokens[0]).toEqual value: 'a', scopes: ['source.coffee', 'variable.other.object.coffee'] + expect(tokens[1]).toEqual value: '.', scopes: ['source.coffee', 'meta.method-call.coffee', 'punctuation.separator.method.period.coffee'] + expect(tokens[2]).toEqual value: 'b', scopes: ['source.coffee', 'meta.method-call.coffee', 'entity.name.function.coffee'] + expect(tokens[3]).toEqual value: ' ', scopes: ['source.coffee', 'meta.method-call.coffee'] + expect(tokens[4]).toEqual value: '@$', scopes: ['source.coffee', 'meta.method-call.coffee', 'meta.arguments.coffee', 'variable.other.readwrite.instance.coffee'] + + describe "destructuring assignments", -> + it "tokenizes object and array destructuring", -> + {tokens} = grammar.tokenizeLine("{something} = hi") + expect(tokens[0]).toEqual value: "{", scopes: ["source.coffee", "meta.variable.assignment.destructured.object.coffee", "punctuation.definition.destructuring.begin.bracket.curly.coffee"] + expect(tokens[1]).toEqual value: "something", scopes: ["source.coffee", "meta.variable.assignment.destructured.object.coffee", "variable.assignment.coffee"] + expect(tokens[2]).toEqual value: "}", scopes: ["source.coffee", "meta.variable.assignment.destructured.object.coffee", "punctuation.definition.destructuring.end.bracket.curly.coffee"] + expect(tokens[3]).toEqual value: " ", scopes: ["source.coffee"] + expect(tokens[4]).toEqual value: "=", scopes: ["source.coffee", "keyword.operator.assignment.coffee"] + expect(tokens[5]).toEqual value: " hi", scopes: ["source.coffee"] + + {tokens} = grammar.tokenizeLine("[x, y] = browserWindow.getPosition()") + expect(tokens[0]).toEqual value: "[", scopes: ["source.coffee", "meta.variable.assignment.destructured.array.coffee", "punctuation.definition.destructuring.begin.bracket.square.coffee"] + expect(tokens[1]).toEqual value: "x", scopes: ["source.coffee", "meta.variable.assignment.destructured.array.coffee", "variable.assignment.coffee"] + expect(tokens[2]).toEqual value: ",", scopes: ["source.coffee", "meta.variable.assignment.destructured.array.coffee", "punctuation.separator.delimiter.coffee"] + expect(tokens[3]).toEqual value: " ", scopes: ["source.coffee", "meta.variable.assignment.destructured.array.coffee"] + expect(tokens[4]).toEqual value: "y", scopes: ["source.coffee", "meta.variable.assignment.destructured.array.coffee", "variable.assignment.coffee"] + expect(tokens[5]).toEqual value: "]", scopes: ["source.coffee", "meta.variable.assignment.destructured.array.coffee", "punctuation.definition.destructuring.end.bracket.square.coffee"] + expect(tokens[6]).toEqual value: " ", scopes: ["source.coffee"] + expect(tokens[7]).toEqual value: "=", scopes: ["source.coffee", "keyword.operator.assignment.coffee"] + expect(tokens[8]).toEqual value: " ", scopes: ["source.coffee"] + + {tokens} = grammar.tokenizeLine("{'} ='}") # Make sure this *isn't* tokenized as a destructuring assignment + expect(tokens[0]).not.toEqual value: "{", scopes: ["source.coffee", "meta.variable.assignment.destructured.object.coffee", "punctuation.definition.destructuring.begin.bracket.curly.coffee"] + expect(tokens[0]).toEqual value: "{", scopes: ["source.coffee", "meta.brace.curly.coffee"] + + it "tokenizes nested destructuring assignments", -> + {tokens} = grammar.tokenizeLine("{poet: {name, address: [street, city]}} = futurists") + expect(tokens[0]).toEqual value: "{", scopes: ["source.coffee", "meta.variable.assignment.destructured.object.coffee", "punctuation.definition.destructuring.begin.bracket.curly.coffee"] + expect(tokens[4]).toEqual value: "{", scopes: ["source.coffee", "meta.variable.assignment.destructured.object.coffee", "meta.variable.assignment.destructured.object.coffee", "punctuation.definition.destructuring.begin.bracket.curly.coffee"] + expect(tokens[11]).toEqual value: "[", scopes: ["source.coffee", "meta.variable.assignment.destructured.object.coffee", "meta.variable.assignment.destructured.object.coffee", "meta.variable.assignment.destructured.array.coffee", "punctuation.definition.destructuring.begin.bracket.square.coffee"] + expect(tokens[16]).toEqual value: "]", scopes: ["source.coffee", "meta.variable.assignment.destructured.object.coffee", "meta.variable.assignment.destructured.object.coffee", "meta.variable.assignment.destructured.array.coffee", "punctuation.definition.destructuring.end.bracket.square.coffee"] + expect(tokens[17]).toEqual value: "}", scopes: ["source.coffee", "meta.variable.assignment.destructured.object.coffee", "meta.variable.assignment.destructured.object.coffee", "punctuation.definition.destructuring.end.bracket.curly.coffee"] + expect(tokens[18]).toEqual value: "}", scopes: ["source.coffee", "meta.variable.assignment.destructured.object.coffee", "punctuation.definition.destructuring.end.bracket.curly.coffee"] + expect(tokens[19]).toEqual value: " ", scopes: ["source.coffee"] + expect(tokens[20]).toEqual value: "=", scopes: ["source.coffee", "keyword.operator.assignment.coffee"] + + it "tokenizes multiple nested destructuring assignments", -> + {tokens} = grammar.tokenizeLine("{start: {row: startRow}, end: {row: endRow}} = range") + expect(tokens[0]).toEqual value: "{", scopes: ["source.coffee", "meta.variable.assignment.destructured.object.coffee", "punctuation.definition.destructuring.begin.bracket.curly.coffee"] + expect(tokens[4]).toEqual value: "{", scopes: ["source.coffee", "meta.variable.assignment.destructured.object.coffee", "meta.variable.assignment.destructured.object.coffee", "punctuation.definition.destructuring.begin.bracket.curly.coffee"] + expect(tokens[9]).toEqual value: "}", scopes: ["source.coffee", "meta.variable.assignment.destructured.object.coffee", "meta.variable.assignment.destructured.object.coffee", "punctuation.definition.destructuring.end.bracket.curly.coffee"] + expect(tokens[15]).toEqual value: "{", scopes: ["source.coffee", "meta.variable.assignment.destructured.object.coffee", "meta.variable.assignment.destructured.object.coffee", "punctuation.definition.destructuring.begin.bracket.curly.coffee"] + expect(tokens[20]).toEqual value: "}", scopes: ["source.coffee", "meta.variable.assignment.destructured.object.coffee", "meta.variable.assignment.destructured.object.coffee", "punctuation.definition.destructuring.end.bracket.curly.coffee"] + expect(tokens[21]).toEqual value: "}", scopes: ["source.coffee", "meta.variable.assignment.destructured.object.coffee", "punctuation.definition.destructuring.end.bracket.curly.coffee"] + expect(tokens[22]).toEqual value: " ", scopes: ["source.coffee"] + expect(tokens[23]).toEqual value: "=", scopes: ["source.coffee", "keyword.operator.assignment.coffee"] + + it "doesn't tokenize nested brackets as destructuring assignments", -> + {tokens} = grammar.tokenizeLine("[Point(0, 1), [Point(0, 0), Point(0, 1)]]") + expect(tokens[0]).not.toEqual value: "[", scopes: ["source.coffee", "meta.variable.assignment.destructured.array.coffee", "punctuation.definition.destructuring.begin.bracket.square.coffee"] + expect(tokens[0]).toEqual value: "[", scopes: ["source.coffee", "punctuation.definition.array.begin.bracket.square.coffee"] + + it "tokenizes inline constant followed by unless statement correctly", -> + {tokens} = grammar.tokenizeLine("return 0 unless true") + expect(tokens[0]).toEqual value: "return", scopes: ["source.coffee", "keyword.control.coffee"] + expect(tokens[2]).toEqual value: "0", scopes: ["source.coffee", "constant.numeric.decimal.coffee"] + expect(tokens[4]).toEqual value: "unless", scopes: ["source.coffee", "keyword.control.coffee"] + expect(tokens[6]).toEqual value: "true", scopes: ["source.coffee", "constant.language.boolean.true.coffee"] + + describe "for loops", -> + it "tokenizes for-in loops", -> + {tokens} = grammar.tokenizeLine("for food in foods") + expect(tokens[0]).toEqual value: "for", scopes: ["source.coffee", "keyword.control.coffee"] + expect(tokens[1]).toEqual value: " food ", scopes: ["source.coffee"] + expect(tokens[2]).toEqual value: "in", scopes: ["source.coffee", "keyword.control.coffee"] + expect(tokens[3]).toEqual value: " foods", scopes: ["source.coffee"] + + it "tokenizes for-of loops", -> + {tokens} = grammar.tokenizeLine("for food, type of foods") + expect(tokens[0]).toEqual value: "for", scopes: ["source.coffee", "keyword.control.coffee"] + expect(tokens[1]).toEqual value: " food", scopes: ["source.coffee"] + expect(tokens[2]).toEqual value: ",", scopes: ["source.coffee", "punctuation.separator.delimiter.coffee"] + expect(tokens[3]).toEqual value: " type ", scopes: ["source.coffee"] + expect(tokens[4]).toEqual value: "of", scopes: ["source.coffee", "keyword.control.coffee"] + expect(tokens[5]).toEqual value: " foods", scopes: ["source.coffee"] + + it "tokenizes loops using arrays", -> + {tokens} = grammar.tokenizeLine("for food in ['broccoli', 'spinach', 'chocolate']") + expect(tokens[0]).toEqual value: "for", scopes: ["source.coffee", "keyword.control.coffee"] + expect(tokens[1]).toEqual value: " food ", scopes: ["source.coffee"] + expect(tokens[2]).toEqual value: "in", scopes: ["source.coffee", "keyword.control.coffee"] + expect(tokens[3]).toEqual value: " ", scopes: ["source.coffee"] + expect(tokens[4]).toEqual value: "[", scopes: ["source.coffee", "punctuation.definition.array.begin.bracket.square.coffee"] + expect(tokens[18]).toEqual value: "]", scopes: ["source.coffee", "punctuation.definition.array.end.bracket.square.coffee"] + + it "tokenizes loops using the optional `when` keyword", -> + {tokens} = grammar.tokenizeLine("for food in foods when food isnt chocolate") + expect(tokens[0]).toEqual value: "for", scopes: ["source.coffee", "keyword.control.coffee"] + expect(tokens[3]).toEqual value: " foods ", scopes: ["source.coffee"] + expect(tokens[4]).toEqual value: "when", scopes: ["source.coffee", "keyword.control.coffee"] + expect(tokens[5]).toEqual value: " food ", scopes: ["source.coffee"] + expect(tokens[6]).toEqual value: "isnt", scopes: ["source.coffee", "keyword.operator.comparison.coffee"] + expect(tokens[7]).toEqual value: " chocolate", scopes: ["source.coffee"] + + it "tokenizes loops using the optional `by` keyword", -> + {tokens} = grammar.tokenizeLine("for food in foods by -1") + expect(tokens[0]).toEqual value: "for", scopes: ["source.coffee", "keyword.control.coffee"] + expect(tokens[3]).toEqual value: " foods ", scopes: ["source.coffee"] + expect(tokens[4]).toEqual value: "by", scopes: ["source.coffee", "keyword.control.coffee"] + expect(tokens[5]).toEqual value: " ", scopes: ["source.coffee"] + expect(tokens[6]).toEqual value: "-", scopes: ["source.coffee", "keyword.operator.coffee"] + expect(tokens[7]).toEqual value: "1", scopes: ["source.coffee", "constant.numeric.decimal.coffee"] + + describe "regular expressions", -> + beforeEach -> + waitsForPromise -> + atom.packages.activatePackage("language-javascript") # Provides the regexp subgrammar + + it "tokenizes regular expressions", -> + {tokens} = grammar.tokenizeLine("/test/") + expect(tokens[0]).toEqual value: "/", scopes: ["source.coffee", "string.regexp.coffee", "punctuation.definition.string.begin.coffee"] + expect(tokens[1]).toEqual value: "test", scopes: ["source.coffee", "string.regexp.coffee"] + expect(tokens[2]).toEqual value: "/", scopes: ["source.coffee", "string.regexp.coffee", "punctuation.definition.string.end.coffee"] + + {tokens} = grammar.tokenizeLine("/{'}/") + expect(tokens[0]).toEqual value: "/", scopes: ["source.coffee", "string.regexp.coffee", "punctuation.definition.string.begin.coffee"] + expect(tokens[2]).toEqual value: "/", scopes: ["source.coffee", "string.regexp.coffee", "punctuation.definition.string.end.coffee"] + + {tokens} = grammar.tokenizeLine("foo + /test/") + expect(tokens[0]).toEqual value: "foo ", scopes: ["source.coffee"] + expect(tokens[1]).toEqual value: "+", scopes: ["source.coffee", "keyword.operator.coffee"] + expect(tokens[2]).toEqual value: " ", scopes: ["source.coffee"] + expect(tokens[3]).toEqual value: "/", scopes: ["source.coffee", "string.regexp.coffee", "punctuation.definition.string.begin.coffee"] + expect(tokens[4]).toEqual value: "test", scopes: ["source.coffee", "string.regexp.coffee"] + expect(tokens[5]).toEqual value: "/", scopes: ["source.coffee", "string.regexp.coffee", "punctuation.definition.string.end.coffee"] + + it "tokenizes regular expressions containing spaces", -> + {tokens} = grammar.tokenizeLine("/ te st /") + expect(tokens[0]).toEqual value: "/", scopes: ["source.coffee", "string.regexp.coffee", "punctuation.definition.string.begin.coffee"] + expect(tokens[1]).toEqual value: " te st ", scopes: ["source.coffee", "string.regexp.coffee"] + expect(tokens[2]).toEqual value: "/", scopes: ["source.coffee", "string.regexp.coffee", "punctuation.definition.string.end.coffee"] + + it "tokenizes regular expressions containing escaped forward slashes", -> + {tokens} = grammar.tokenizeLine("/test\\//") + expect(tokens[0]).toEqual value: "/", scopes: ["source.coffee", "string.regexp.coffee", "punctuation.definition.string.begin.coffee"] + expect(tokens[1]).toEqual value: "test", scopes: ["source.coffee", "string.regexp.coffee"] + expect(tokens[2]).toEqual value: "\\/", scopes: ["source.coffee", "string.regexp.coffee", "constant.character.escape.backslash.regexp"] + expect(tokens[3]).toEqual value: "/", scopes: ["source.coffee", "string.regexp.coffee", "punctuation.definition.string.end.coffee"] + + {tokens} = grammar.tokenizeLine("/one\\/two!\\/three/") + expect(tokens[0]).toEqual value: "/", scopes: ["source.coffee", "string.regexp.coffee", "punctuation.definition.string.begin.coffee"] + expect(tokens[1]).toEqual value: "one", scopes: ["source.coffee", "string.regexp.coffee"] + expect(tokens[2]).toEqual value: "\\/", scopes: ["source.coffee", "string.regexp.coffee", "constant.character.escape.backslash.regexp"] + expect(tokens[3]).toEqual value: "two!", scopes: ["source.coffee", "string.regexp.coffee"] + expect(tokens[4]).toEqual value: "\\/", scopes: ["source.coffee", "string.regexp.coffee", "constant.character.escape.backslash.regexp"] + expect(tokens[5]).toEqual value: "three", scopes: ["source.coffee", "string.regexp.coffee"] + expect(tokens[6]).toEqual value: "/", scopes: ["source.coffee", "string.regexp.coffee", "punctuation.definition.string.end.coffee"] + + it "tokenizes regular expressions inside arrays", -> + {tokens} = grammar.tokenizeLine("[/test/]") + expect(tokens[0]).toEqual value: "[", scopes: ["source.coffee", "punctuation.definition.array.begin.bracket.square.coffee"] + expect(tokens[1]).toEqual value: "/", scopes: ["source.coffee", "string.regexp.coffee", "punctuation.definition.string.begin.coffee"] + expect(tokens[2]).toEqual value: "test", scopes: ["source.coffee", "string.regexp.coffee"] + expect(tokens[3]).toEqual value: "/", scopes: ["source.coffee", "string.regexp.coffee", "punctuation.definition.string.end.coffee"] + expect(tokens[4]).toEqual value: "]", scopes: ["source.coffee", "punctuation.definition.array.end.bracket.square.coffee"] + + {tokens} = grammar.tokenizeLine("[1, /test/]") + expect(tokens[0]).toEqual value: "[", scopes: ["source.coffee", "punctuation.definition.array.begin.bracket.square.coffee"] + expect(tokens[1]).toEqual value: "1", scopes: ["source.coffee", "constant.numeric.decimal.coffee"] + expect(tokens[2]).toEqual value: ",", scopes: ["source.coffee", "punctuation.separator.delimiter.coffee"] + expect(tokens[3]).toEqual value: " ", scopes: ["source.coffee"] + expect(tokens[4]).toEqual value: "/", scopes: ["source.coffee", "string.regexp.coffee", "punctuation.definition.string.begin.coffee"] + expect(tokens[5]).toEqual value: "test", scopes: ["source.coffee", "string.regexp.coffee"] + expect(tokens[6]).toEqual value: "/", scopes: ["source.coffee", "string.regexp.coffee", "punctuation.definition.string.end.coffee"] + expect(tokens[7]).toEqual value: "]", scopes: ["source.coffee", "punctuation.definition.array.end.bracket.square.coffee"] + + it "does not tokenize multiple division as regex", -> + # https://github.com/atom/language-coffee-script/issues/112 + {tokens} = grammar.tokenizeLine("a / b + c / d") + expect(tokens[1]).toEqual value: "/", scopes: ["source.coffee", "keyword.operator.coffee"] + expect(tokens[2]).toEqual value: " b ", scopes: ["source.coffee"] + expect(tokens[5]).toEqual value: "/", scopes: ["source.coffee", "keyword.operator.coffee"] + + {tokens} = grammar.tokenizeLine("a / 2 / (3)") + expect(tokens[1]).toEqual value: "/", scopes: ["source.coffee", "keyword.operator.coffee"] + expect(tokens[3]).toEqual value: "2", scopes: ["source.coffee", "constant.numeric.decimal.coffee"] + expect(tokens[5]).toEqual value: "/", scopes: ["source.coffee", "keyword.operator.coffee"] + + it "does not tokenize comments with URLs in them as regex", -> + # Disclaimer: This does not fix when comments contain only slashes, such as `a / something # comment /` + {tokens} = grammar.tokenizeLine("canvas.width/2 # https://github.com/atom/language-coffee-script/issues/112") + expect(tokens[3]).toEqual value: "/", scopes: ["source.coffee", "keyword.operator.coffee"] + expect(tokens[6]).toEqual value: "#", scopes: ["source.coffee", "comment.line.number-sign.coffee", "punctuation.definition.comment.coffee"] + expect(tokens[7]).toEqual value: " https://github.com/atom/language-coffee-script/issues/112", scopes: ["source.coffee", "comment.line.number-sign.coffee"] + + it "stops tokenizing regex at the first non-escaped forwards slash", -> + {tokens} = grammar.tokenizeLine("path.replace(/\\\\/g, '/')") + expect(tokens[4]).toEqual value: "/", scopes: ["source.coffee", "meta.method-call.coffee", "meta.arguments.coffee", "string.regexp.coffee", "punctuation.definition.string.begin.coffee"] + expect(tokens[6]).toEqual value: "/", scopes: ["source.coffee", "meta.method-call.coffee", "meta.arguments.coffee", "string.regexp.coffee", "punctuation.definition.string.end.coffee"] + expect(tokens[11]).toEqual value: "/", scopes: ["source.coffee", "meta.method-call.coffee", "meta.arguments.coffee", "string.quoted.single.coffee"] + + {tokens} = grammar.tokenizeLine("path.replace(/\\\\\\//g, '/')") + expect(tokens[4]).toEqual value: "/", scopes: ["source.coffee", "meta.method-call.coffee", "meta.arguments.coffee", "string.regexp.coffee", "punctuation.definition.string.begin.coffee"] + expect(tokens[6]).toEqual value: "\\/", scopes: ["source.coffee", "meta.method-call.coffee", "meta.arguments.coffee", "string.regexp.coffee", "constant.character.escape.backslash.regexp"] + expect(tokens[7]).toEqual value: "/", scopes: ["source.coffee", "meta.method-call.coffee", "meta.arguments.coffee", "string.regexp.coffee", "punctuation.definition.string.end.coffee"] + expect(tokens[12]).toEqual value: "/", scopes: ["source.coffee", "meta.method-call.coffee", "meta.arguments.coffee", "string.quoted.single.coffee"] + + it "tokenises multi-line regular expressions", -> + {tokens} = grammar.tokenizeLine('/// (XYZ) ///') + expect(tokens[0]).toEqual value: '///', scopes: ['source.coffee', 'string.regexp.multiline.coffee', 'punctuation.definition.string.begin.coffee'] + expect(tokens[2]).toEqual value: '(', scopes: ['source.coffee', 'string.regexp.multiline.coffee', 'meta.group.regexp', 'punctuation.definition.group.regexp'] + expect(tokens[3]).toEqual value: 'XYZ', scopes: ['source.coffee', 'string.regexp.multiline.coffee', 'meta.group.regexp'] + expect(tokens[4]).toEqual value: ')', scopes: ['source.coffee', 'string.regexp.multiline.coffee', 'meta.group.regexp', 'punctuation.definition.group.regexp'] + expect(tokens[6]).toEqual value: '///', scopes: ['source.coffee', 'string.regexp.multiline.coffee', 'punctuation.definition.string.end.coffee'] + + lines = grammar.tokenizeLines """ + /// + XYZ // + /~/ + /// + """ + expect(lines[0][0]).toEqual value: '///', scopes: ['source.coffee', 'string.regexp.multiline.coffee', 'punctuation.definition.string.begin.coffee'] + expect(lines[1][0]).toEqual value: 'XYZ //', scopes: ['source.coffee', 'string.regexp.multiline.coffee'] + expect(lines[2][0]).toEqual value: '/~/', scopes: ['source.coffee', 'string.regexp.multiline.coffee'] + expect(lines[3][0]).toEqual value: '///', scopes: ['source.coffee', 'string.regexp.multiline.coffee', 'punctuation.definition.string.end.coffee'] + + describe "here-docs", -> + it "tokenises single-quoted here-docs", -> + {tokens} = grammar.tokenizeLine "'''XYZ'''" + expect(tokens[0]).toEqual value: "'''", scopes: ['source.coffee', 'string.quoted.single.heredoc.coffee', 'punctuation.definition.string.begin.coffee'] + expect(tokens[1]).toEqual value: 'XYZ', scopes: ['source.coffee', 'string.quoted.single.heredoc.coffee'] + expect(tokens[2]).toEqual value: "'''", scopes: ['source.coffee', 'string.quoted.single.heredoc.coffee', 'punctuation.definition.string.end.coffee'] + + lines = grammar.tokenizeLines """ + ''' + 'ABC' + XYZ '' + ''' + """ + expect(lines[0][0]).toEqual value: "'''", scopes: ['source.coffee', 'string.quoted.single.heredoc.coffee', 'punctuation.definition.string.begin.coffee'] + expect(lines[1][0]).toEqual value: "'ABC'", scopes: ['source.coffee', 'string.quoted.single.heredoc.coffee'] + expect(lines[2][0]).toEqual value: "XYZ ''", scopes: ['source.coffee', 'string.quoted.single.heredoc.coffee'] + expect(lines[3][0]).toEqual value: "'''", scopes: ['source.coffee', 'string.quoted.single.heredoc.coffee', 'punctuation.definition.string.end.coffee'] + + it "tokenises double-quoted here-docs", -> + {tokens} = grammar.tokenizeLine "'''XYZ'''" + expect(tokens[0]).toEqual value: "'''", scopes: ['source.coffee', 'string.quoted.single.heredoc.coffee', 'punctuation.definition.string.begin.coffee'] + expect(tokens[1]).toEqual value: 'XYZ', scopes: ['source.coffee', 'string.quoted.single.heredoc.coffee'] + expect(tokens[2]).toEqual value: "'''", scopes: ['source.coffee', 'string.quoted.single.heredoc.coffee', 'punctuation.definition.string.end.coffee'] + + lines = grammar.tokenizeLines ''' + """ + "ABC" + XYZ "" + """ + ''' + expect(lines[0][0]).toEqual value: '"""', scopes: ['source.coffee', 'string.quoted.double.heredoc.coffee', 'punctuation.definition.string.begin.coffee'] + expect(lines[1][0]).toEqual value: '"ABC"', scopes: ['source.coffee', 'string.quoted.double.heredoc.coffee'] + expect(lines[2][0]).toEqual value: 'XYZ ""', scopes: ['source.coffee', 'string.quoted.double.heredoc.coffee'] + expect(lines[3][0]).toEqual value: '"""', scopes: ['source.coffee', 'string.quoted.double.heredoc.coffee', 'punctuation.definition.string.end.coffee'] + + describe "escape sequences in strings", -> + it "tokenises leading backslashes in double-quoted strings", -> + {tokens} = grammar.tokenizeLine('"a\\\\b\\\\\\\\c"') + expect(tokens[0]).toEqual value: '"', scopes: ['source.coffee', 'string.quoted.double.coffee', 'punctuation.definition.string.begin.coffee'] + expect(tokens[1]).toEqual value: 'a', scopes: ['source.coffee', 'string.quoted.double.coffee'] + expect(tokens[2]).toEqual value: '\\', scopes: ['source.coffee', 'string.quoted.double.coffee', 'constant.character.escape.backslash.coffee', 'punctuation.definition.escape.backslash.coffee'] + expect(tokens[3]).toEqual value: '\\', scopes: ['source.coffee', 'string.quoted.double.coffee', 'constant.character.escape.backslash.coffee'] + expect(tokens[4]).toEqual value: 'b', scopes: ['source.coffee', 'string.quoted.double.coffee'] + expect(tokens[5]).toEqual value: '\\', scopes: ['source.coffee', 'string.quoted.double.coffee', 'constant.character.escape.backslash.coffee', 'punctuation.definition.escape.backslash.coffee'] + expect(tokens[6]).toEqual value: '\\', scopes: ['source.coffee', 'string.quoted.double.coffee', 'constant.character.escape.backslash.coffee'] + expect(tokens[7]).toEqual value: '\\', scopes: ['source.coffee', 'string.quoted.double.coffee', 'constant.character.escape.backslash.coffee', 'punctuation.definition.escape.backslash.coffee'] + expect(tokens[8]).toEqual value: '\\', scopes: ['source.coffee', 'string.quoted.double.coffee', 'constant.character.escape.backslash.coffee'] + expect(tokens[9]).toEqual value: 'c', scopes: ['source.coffee', 'string.quoted.double.coffee'] + expect(tokens[10]).toEqual value: '"', scopes: ['source.coffee', 'string.quoted.double.coffee', 'punctuation.definition.string.end.coffee'] + + {tokens} = grammar.tokenizeLine('"\\a\\t\\a\\b"') + expect(tokens[0]).toEqual value: '"', scopes: ['source.coffee', 'string.quoted.double.coffee', 'punctuation.definition.string.begin.coffee'] + expect(tokens[1]).toEqual value: '\\', scopes: ['source.coffee', 'string.quoted.double.coffee', 'constant.character.escape.backslash.coffee', 'punctuation.definition.escape.backslash.coffee'] + expect(tokens[2]).toEqual value: 'a', scopes: ['source.coffee', 'string.quoted.double.coffee', 'constant.character.escape.backslash.coffee'] + expect(tokens[3]).toEqual value: '\\', scopes: ['source.coffee', 'string.quoted.double.coffee', 'constant.character.escape.backslash.coffee', 'punctuation.definition.escape.backslash.coffee'] + expect(tokens[4]).toEqual value: 't', scopes: ['source.coffee', 'string.quoted.double.coffee', 'constant.character.escape.backslash.coffee'] + expect(tokens[5]).toEqual value: '\\', scopes: ['source.coffee', 'string.quoted.double.coffee', 'constant.character.escape.backslash.coffee', 'punctuation.definition.escape.backslash.coffee'] + expect(tokens[6]).toEqual value: 'a', scopes: ['source.coffee', 'string.quoted.double.coffee', 'constant.character.escape.backslash.coffee'] + expect(tokens[7]).toEqual value: '\\', scopes: ['source.coffee', 'string.quoted.double.coffee', 'constant.character.escape.backslash.coffee', 'punctuation.definition.escape.backslash.coffee'] + expect(tokens[8]).toEqual value: 'b', scopes: ['source.coffee', 'string.quoted.double.coffee', 'constant.character.escape.backslash.coffee'] + expect(tokens[9]).toEqual value: '"', scopes: ['source.coffee', 'string.quoted.double.coffee', 'punctuation.definition.string.end.coffee'] + + it "tokenises leading backslashes in single-quoted strings", -> + {tokens} = grammar.tokenizeLine("'a\\\\b\\\\\\\\c'") + expect(tokens[0]).toEqual value: "'", scopes: ['source.coffee', 'string.quoted.single.coffee', 'punctuation.definition.string.begin.coffee'] + expect(tokens[1]).toEqual value: 'a', scopes: ['source.coffee', 'string.quoted.single.coffee'] + expect(tokens[2]).toEqual value: '\\', scopes: ['source.coffee', 'string.quoted.single.coffee', 'constant.character.escape.backslash.coffee', 'punctuation.definition.escape.backslash.coffee'] + expect(tokens[3]).toEqual value: '\\', scopes: ['source.coffee', 'string.quoted.single.coffee', 'constant.character.escape.backslash.coffee'] + expect(tokens[4]).toEqual value: 'b', scopes: ['source.coffee', 'string.quoted.single.coffee'] + expect(tokens[5]).toEqual value: '\\', scopes: ['source.coffee', 'string.quoted.single.coffee', 'constant.character.escape.backslash.coffee', 'punctuation.definition.escape.backslash.coffee'] + expect(tokens[6]).toEqual value: '\\', scopes: ['source.coffee', 'string.quoted.single.coffee', 'constant.character.escape.backslash.coffee'] + expect(tokens[7]).toEqual value: '\\', scopes: ['source.coffee', 'string.quoted.single.coffee', 'constant.character.escape.backslash.coffee', 'punctuation.definition.escape.backslash.coffee'] + expect(tokens[8]).toEqual value: '\\', scopes: ['source.coffee', 'string.quoted.single.coffee', 'constant.character.escape.backslash.coffee'] + expect(tokens[9]).toEqual value: 'c', scopes: ['source.coffee', 'string.quoted.single.coffee'] + expect(tokens[10]).toEqual value: "'", scopes: ['source.coffee', 'string.quoted.single.coffee', 'punctuation.definition.string.end.coffee'] + + {tokens} = grammar.tokenizeLine("'\\a\\t\\a\\b'") + expect(tokens[0]).toEqual value: "'", scopes: ['source.coffee', 'string.quoted.single.coffee', 'punctuation.definition.string.begin.coffee'] + expect(tokens[1]).toEqual value: '\\', scopes: ['source.coffee', 'string.quoted.single.coffee', 'constant.character.escape.backslash.coffee', 'punctuation.definition.escape.backslash.coffee'] + expect(tokens[2]).toEqual value: 'a', scopes: ['source.coffee', 'string.quoted.single.coffee', 'constant.character.escape.backslash.coffee'] + expect(tokens[3]).toEqual value: '\\', scopes: ['source.coffee', 'string.quoted.single.coffee', 'constant.character.escape.backslash.coffee', 'punctuation.definition.escape.backslash.coffee'] + expect(tokens[4]).toEqual value: 't', scopes: ['source.coffee', 'string.quoted.single.coffee', 'constant.character.escape.backslash.coffee'] + expect(tokens[5]).toEqual value: '\\', scopes: ['source.coffee', 'string.quoted.single.coffee', 'constant.character.escape.backslash.coffee', 'punctuation.definition.escape.backslash.coffee'] + expect(tokens[6]).toEqual value: 'a', scopes: ['source.coffee', 'string.quoted.single.coffee', 'constant.character.escape.backslash.coffee'] + expect(tokens[7]).toEqual value: '\\', scopes: ['source.coffee', 'string.quoted.single.coffee', 'constant.character.escape.backslash.coffee', 'punctuation.definition.escape.backslash.coffee'] + expect(tokens[8]).toEqual value: 'b', scopes: ['source.coffee', 'string.quoted.single.coffee', 'constant.character.escape.backslash.coffee'] + expect(tokens[9]).toEqual value: "'", scopes: ['source.coffee', 'string.quoted.single.coffee', 'punctuation.definition.string.end.coffee'] + + describe "jsx", -> + it "tokenises HTML tags", -> + {tokens} = grammar.tokenizeLine("<div></div>") + expect(tokens[0]).toEqual value: '<', scopes: ['source.coffee', 'meta.tag.coffee', 'punctuation.definition.tag.coffee'] + expect(tokens[1]).toEqual value: 'div', scopes: ['source.coffee', 'meta.tag.coffee', 'entity.name.tag.coffee' ] + expect(tokens[2]).toEqual value: '>', scopes: ['source.coffee', 'meta.tag.coffee' ] + expect(tokens[3]).toEqual value: '</', scopes: ['source.coffee', 'meta.tag.coffee', 'punctuation.definition.tag.coffee' ] + expect(tokens[4]).toEqual value: 'div', scopes: ['source.coffee', 'meta.tag.coffee', 'entity.name.tag.coffee' ] + expect(tokens[5]).toEqual value: '>', scopes: ['source.coffee', 'meta.tag.coffee' ] + + {tokens} = grammar.tokenizeLine("<div/>") + expect(tokens[0]).toEqual value: '<', scopes: ['source.coffee', 'meta.tag.coffee', 'punctuation.definition.tag.coffee'] + expect(tokens[1]).toEqual value: 'div', scopes: ['source.coffee', 'meta.tag.coffee', 'entity.name.tag.coffee' ] + expect(tokens[2]).toEqual value: '/>', scopes: ['source.coffee', 'meta.tag.coffee' ] + + it "tokenises HTML tags with attributes", -> + {tokens} = grammar.tokenizeLine("<div class='myclass' id=\"myid\">") + expect(tokens[0]).toEqual value: '<', scopes: ['source.coffee', 'meta.tag.coffee', 'punctuation.definition.tag.coffee'] + expect(tokens[1]).toEqual value: 'div', scopes: ['source.coffee', 'meta.tag.coffee', 'entity.name.tag.coffee' ] + expect(tokens[2]).toEqual value: ' ', scopes: ['source.coffee', 'meta.tag.coffee' ] + expect(tokens[3]).toEqual value: 'class', scopes: ['source.coffee', 'meta.tag.coffee', 'entity.other.attribute-name.coffee' ] + expect(tokens[4]).toEqual value: '=', scopes: ['source.coffee', 'meta.tag.coffee', 'keyword.operator.assignment.coffee' ] + expect(tokens[5]).toEqual value: '\'', scopes: ['source.coffee', 'meta.tag.coffee', 'string.quoted.single.coffee', 'punctuation.definition.string.begin.coffee' ] + expect(tokens[6]).toEqual value: 'myclass', scopes: ['source.coffee', 'meta.tag.coffee', 'string.quoted.single.coffee' ] + expect(tokens[7]).toEqual value: '\'', scopes: ['source.coffee', 'meta.tag.coffee', 'string.quoted.single.coffee', 'punctuation.definition.string.end.coffee' ] + expect(tokens[8]).toEqual value: ' ', scopes: ['source.coffee', 'meta.tag.coffee' ] + expect(tokens[9]).toEqual value: 'id', scopes: ['source.coffee', 'meta.tag.coffee', 'entity.other.attribute-name.coffee' ] + expect(tokens[10]).toEqual value: '=', scopes: ['source.coffee', 'meta.tag.coffee', 'keyword.operator.assignment.coffee' ] + expect(tokens[11]).toEqual value: '"', scopes: ['source.coffee', 'meta.tag.coffee', 'string.quoted.double.coffee', 'punctuation.definition.string.begin.coffee' ] + expect(tokens[12]).toEqual value: 'myid', scopes: ['source.coffee', 'meta.tag.coffee', 'string.quoted.double.coffee' ] + expect(tokens[13]).toEqual value: '"', scopes: ['source.coffee', 'meta.tag.coffee', 'string.quoted.double.coffee', 'punctuation.definition.string.end.coffee' ] + expect(tokens[14]).toEqual value: '>', scopes: ['source.coffee', 'meta.tag.coffee' ] + + it "tokenises HTML tags with attributes that have expressions", -> + {tokens} = grammar.tokenizeLine("<div on-click={(e)->@handleClick(e)}>") + expect(tokens[0]).toEqual value: '<', scopes: ['source.coffee', 'meta.tag.coffee', 'punctuation.definition.tag.coffee'] + expect(tokens[1]).toEqual value: 'div', scopes: ['source.coffee', 'meta.tag.coffee', 'entity.name.tag.coffee' ] + expect(tokens[2]).toEqual value: ' ', scopes: ['source.coffee', 'meta.tag.coffee' ] + expect(tokens[3]).toEqual value: 'on-click', scopes: ['source.coffee', 'meta.tag.coffee', 'entity.other.attribute-name.coffee' ] + expect(tokens[4]).toEqual value: '=', scopes: ['source.coffee', 'meta.tag.coffee', 'keyword.operator.assignment.coffee' ] + expect(tokens[5]).toEqual value: '{', scopes: ['source.coffee', 'meta.tag.coffee', 'meta.brace.curly.coffee'] + expect(tokens[6]).toEqual value: '(', scopes: ['source.coffee', 'meta.tag.coffee', 'meta.function.inline.coffee', 'meta.parameters.coffee', 'punctuation.definition.parameters.begin.bracket.round.coffee' ] + expect(tokens[7]).toEqual value: 'e', scopes: ['source.coffee', 'meta.tag.coffee', 'meta.function.inline.coffee', 'meta.parameters.coffee', 'variable.parameter.function.coffee' ] + expect(tokens[8]).toEqual value: ')', scopes: ['source.coffee', 'meta.tag.coffee', 'meta.function.inline.coffee', 'meta.parameters.coffee', 'punctuation.definition.parameters.end.bracket.round.coffee' ] + expect(tokens[9]).toEqual value: '->', scopes: ['source.coffee', 'meta.tag.coffee', 'meta.function.inline.coffee', 'storage.type.function.coffee' ] + expect(tokens[10]).toEqual value: '@', scopes: ['source.coffee', 'meta.tag.coffee', 'meta.function-call.coffee', 'variable.other.readwrite.instance.coffee' ] + expect(tokens[11]).toEqual value: 'handleClick', scopes: ['source.coffee', 'meta.tag.coffee', 'meta.function-call.coffee', 'entity.name.function.coffee' ] + expect(tokens[12]).toEqual value: '(', scopes: ['source.coffee', 'meta.tag.coffee', 'meta.function-call.coffee', 'meta.arguments.coffee', 'punctuation.definition.arguments.begin.bracket.round.coffee' ] + expect(tokens[13]).toEqual value: 'e', scopes: ['source.coffee', 'meta.tag.coffee', 'meta.function-call.coffee', 'meta.arguments.coffee' ] + expect(tokens[14]).toEqual value: ')', scopes: ['source.coffee', 'meta.tag.coffee', 'meta.function-call.coffee', 'meta.arguments.coffee', 'punctuation.definition.arguments.end.bracket.round.coffee' ] + expect(tokens[15]).toEqual value: '}', scopes: ['source.coffee', 'meta.tag.coffee', 'meta.brace.curly.coffee'] + expect(tokens[16]).toEqual value: '>', scopes: ['source.coffee', 'meta.tag.coffee'] + + describe "firstLineMatch", -> + it "recognises interpreter directives", -> + valid = """ + #!/usr/sbin/coffee foo + #!/usr/bin/coffee foo=bar/ + #!/usr/sbin/coffee + #!/usr/sbin/coffee foo bar baz + #!/usr/bin/coffee perl + #!/usr/bin/coffee bin/perl + #!/usr/bin/coffee + #!/bin/coffee + #!/usr/bin/coffee --script=usr/bin + #! /usr/bin/env A=003 B=149 C=150 D=xzd E=base64 F=tar G=gz H=head I=tail coffee + #!\t/usr/bin/env --foo=bar coffee --quu=quux + #! /usr/bin/coffee + #!/usr/bin/env coffee + """ + for line in valid.split /\n/ + expect(grammar.firstLineRegex.scanner.findNextMatchSync(line)).not.toBeNull() + + invalid = """ + \x20#!/usr/sbin/coffee + \t#!/usr/sbin/coffee + #!/usr/bin/env-coffee/node-env/ + #!/usr/bin/env-coffee + #! /usr/bincoffee + #!\t/usr/bin/env --coffee=bar + """ + for line in invalid.split /\n/ + expect(grammar.firstLineRegex.scanner.findNextMatchSync(line)).toBeNull() + + it "recognises Emacs modelines", -> + valid = """ + #-*- coffee -*- + #-*- mode: Coffee -*- + /* -*-coffee-*- */ + // -*- Coffee -*- + /* -*- mode:Coffee -*- */ + // -*- font:bar;mode:Coffee -*- + // -*- font:bar;mode:Coffee;foo:bar; -*- + // -*-font:mode;mode:COFFEE-*- + // -*- foo:bar mode: coffee bar:baz -*- + " -*-foo:bar;mode:cOFFEE;bar:foo-*- "; + " -*-font-mode:foo;mode:coFFeE;foo-bar:quux-*-" + "-*-font:x;foo:bar; mode : Coffee; bar:foo;foooooo:baaaaar;fo:ba;-*-"; + "-*- font:x;foo : bar ; mode : Coffee ; bar : foo ; foooooo:baaaaar;fo:ba-*-"; + """ + for line in valid.split /\n/ + expect(grammar.firstLineRegex.scanner.findNextMatchSync(line)).not.toBeNull() + + invalid = """ + /* --*coffee-*- */ + /* -*-- coffee -*- + /* -*- -- Coffee -*- + /* -*- Coffee -;- -*- + // -*- freeCoffee -*- + // -*- Coffee; -*- + // -*- coffee-sugar -*- + /* -*- model:coffee -*- + /* -*- indent-mode:coffee -*- + // -*- font:mode;Coffee -*- + // -*- mode: -*- Coffee + // -*- mode: jfc-give-me-coffee -*- + // -*-font:mode;mode:coffee--*- + """ + for line in invalid.split /\n/ + expect(grammar.firstLineRegex.scanner.findNextMatchSync(line)).toBeNull() + + it "recognises Vim modelines", -> + valid = """ + vim: se filetype=coffee: + # vim: se ft=coffee: + # vim: set ft=COFFEE: + # vim: set filetype=CoffEE: + # vim: ft=CoffEE + # vim: syntax=CoffEE + # vim: se syntax=CoffEE: + # ex: syntax=CoffEE + # vim:ft=coffee + # vim600: ft=coffee + # vim>600: set ft=coffee: + # vi:noai:sw=3 ts=6 ft=coffee + # vi::::::::::noai:::::::::::: ft=COFFEE + # vim:ts=4:sts=4:sw=4:noexpandtab:ft=cOfFeE + # vi:: noai : : : : sw =3 ts =6 ft =coFFEE + # vim: ts=4: pi sts=4: ft=cofFeE: noexpandtab: sw=4: + # vim: ts=4 sts=4: ft=coffee noexpandtab: + # vim:noexpandtab sts=4 ft=coffEE ts=4 + # vim:noexpandtab:ft=cOFFEe + # vim:ts=4:sts=4 ft=cofFeE:noexpandtab:\x20 + # vim:noexpandtab titlestring=hi\|there\\\\ ft=cOFFEe ts=4 + """ + for line in valid.split /\n/ + expect(grammar.firstLineRegex.scanner.findNextMatchSync(line)).not.toBeNull() + + invalid = """ + ex: se filetype=coffee: + _vi: se filetype=coffee: + vi: se filetype=coffee + # vim set ft=coffee + # vim: soft=coffee + # vim: clean-syntax=coffee: + # vim set ft=coffee: + # vim: setft=coffee: + # vim: se ft=coffee backupdir=tmp + # vim: set ft=coffee set cmdheight=1 + # vim:noexpandtab sts:4 ft:coffee ts:4 + # vim:noexpandtab titlestring=hi\\|there\\ ft=coffee ts=4 + # vim:noexpandtab titlestring=hi\\|there\\\\\\ ft=coffee ts=4 + """ + for line in invalid.split /\n/ + expect(grammar.firstLineRegex.scanner.findNextMatchSync(line)).toBeNull() diff --git a/packages/language-csharp/.coffeelintignore b/packages/language-csharp/.coffeelintignore new file mode 100644 index 000000000..1db51fed7 --- /dev/null +++ b/packages/language-csharp/.coffeelintignore @@ -0,0 +1 @@ +spec/fixtures diff --git a/packages/language-csharp/.github/no-response.yml b/packages/language-csharp/.github/no-response.yml new file mode 100644 index 000000000..1c8799d13 --- /dev/null +++ b/packages/language-csharp/.github/no-response.yml @@ -0,0 +1,15 @@ +# Configuration for probot-no-response - https://github.com/probot/no-response + +# Number of days of inactivity before an issue is closed for lack of response +daysUntilClose: 28 + +# Label requiring a response +responseRequiredLabel: more-information-needed + +# Comment to post when closing an issue for lack of response. Set to `false` to disable. +closeComment: > + This issue has been automatically closed because there has been no response + to our request for more information from the original author. With only the + information that is currently in the issue, we don't have enough information + to take action. Please reach out if you have or find the answers we need so + that we can investigate further. diff --git a/packages/language-csharp/.github/workflows/ci.yml b/packages/language-csharp/.github/workflows/ci.yml new file mode 100644 index 000000000..ab77c1f1f --- /dev/null +++ b/packages/language-csharp/.github/workflows/ci.yml @@ -0,0 +1,23 @@ +name: CI + +on: [push] + +env: + CI: true + +jobs: + Test: + strategy: + matrix: + os: [ubuntu-latest, macos-latest, windows-latest] + channel: [stable, beta] + runs-on: ${{ matrix.os }} + steps: + - uses: actions/checkout@v1 + - uses: UziTech/action-setup-atom@v2 + with: + version: ${{ matrix.channel }} + - name: Install dependencies + run: apm install + - name: Run tests + run: atom --test spec diff --git a/packages/language-csharp/.gitignore b/packages/language-csharp/.gitignore new file mode 100644 index 000000000..3c3629e64 --- /dev/null +++ b/packages/language-csharp/.gitignore @@ -0,0 +1 @@ +node_modules diff --git a/packages/language-csharp/ISSUE_TEMPLATE.md b/packages/language-csharp/ISSUE_TEMPLATE.md new file mode 100644 index 000000000..b60bb86c9 --- /dev/null +++ b/packages/language-csharp/ISSUE_TEMPLATE.md @@ -0,0 +1,40 @@ +<!-- + +Have you read Atom's Code of Conduct? By filing an Issue, you are expected to comply with it, including treating everyone with respect: https://github.com/atom/atom/blob/master/CODE_OF_CONDUCT.md + +Do you want to ask a question? Are you looking for support? The Atom message board is the best place for getting support: https://discuss.atom.io + +--> + +### Prerequisites + +* [ ] Put an X between the brackets on this line if you have done all of the following: + * Reproduced the problem in Safe Mode: http://flight-manual.atom.io/hacking-atom/sections/debugging/#using-safe-mode + * Followed all applicable steps in the debugging guide: http://flight-manual.atom.io/hacking-atom/sections/debugging/ + * Checked the FAQs on the message board for common solutions: https://discuss.atom.io/c/faq + * Checked that your issue isn't already filed: https://github.com/issues?utf8=✓&q=is%3Aissue+user%3Aatom + * Checked that there is not already an Atom package that provides the described functionality: https://atom.io/packages + +### Description + +[Description of the issue] + +### Steps to Reproduce + +1. [First Step] +2. [Second Step] +3. [and so on...] + +**Expected behavior:** [What you expect to happen] + +**Actual behavior:** [What actually happens] + +**Reproduces how often:** [What percentage of the time does it reproduce?] + +### Versions + +You can get this information from copy and pasting the output of `atom --version` and `apm --version` from the command line. Also, please include the OS and what version of the OS you're running. + +### Additional Information + +Any additional information, configuration or data that might be necessary to reproduce the issue. diff --git a/packages/language-csharp/LICENSE.md b/packages/language-csharp/LICENSE.md new file mode 100644 index 000000000..0d94e407b --- /dev/null +++ b/packages/language-csharp/LICENSE.md @@ -0,0 +1,26 @@ +MIT License + +Copyright (c) 2016 .NET Foundation, GitHub Inc. + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. + +-------------------------------------------------------------------- + +This package uses the +https://github.com/dotnet/csharp-tmLanguage from the .NET Foundation diff --git a/packages/language-csharp/PULL_REQUEST_TEMPLATE.md b/packages/language-csharp/PULL_REQUEST_TEMPLATE.md new file mode 100644 index 000000000..cdaa94a86 --- /dev/null +++ b/packages/language-csharp/PULL_REQUEST_TEMPLATE.md @@ -0,0 +1,28 @@ +### Requirements + +* Filling out the template is required. Any pull request that does not include enough information to be reviewed in a timely manner may be closed at the maintainers' discretion. +* All new code requires tests to ensure against regressions + +### Description of the Change + +<!-- + +We must be able to understand the design of your change from this description. If we can't get a good idea of what the code will be doing from the description here, the pull request may be closed at the maintainers' discretion. Keep in mind that the maintainer reviewing this PR may not be familiar with or have worked with the code here recently, so please walk us through the concepts. + +--> + +### Alternate Designs + +<!-- Explain what other alternates were considered and why the proposed version was selected --> + +### Benefits + +<!-- What benefits will be realized by the code change? --> + +### Possible Drawbacks + +<!-- What are the possible side-effects or negative impacts of the code change? --> + +### Applicable Issues + +<!-- Enter any applicable Issues here --> diff --git a/packages/language-csharp/README.md b/packages/language-csharp/README.md new file mode 100644 index 000000000..2b4f991a9 --- /dev/null +++ b/packages/language-csharp/README.md @@ -0,0 +1,9 @@ +# C# language support in Atom +[![macOS Build Status](https://travis-ci.org/atom/language-csharp.svg?branch=master)](https://travis-ci.org/atom/language-csharp) +[![Windows Build Status](https://ci.appveyor.com/api/projects/status/j1as3753y5t90obn/branch/master?svg=true)](https://ci.appveyor.com/project/Atom/language-csharp/branch/master) [![Dependency Status](https://david-dm.org/atom/language-csharp.svg)](https://david-dm.org/atom/language-csharp) + +Adds syntax highlighting and snippets to C# files in Atom. + +The C# grammar comes from the [.NET Foundation's C# grammar](https://github.com/dotnet/csharp-tmLanguage) + +Contributions and issues with the the grammar should be raised upstream. diff --git a/packages/language-csharp/grammars/cake.cson b/packages/language-csharp/grammars/cake.cson new file mode 100644 index 000000000..69a6feb44 --- /dev/null +++ b/packages/language-csharp/grammars/cake.cson @@ -0,0 +1,14 @@ +scopeName: "source.cake" +name: "C# Cake File" +fileTypes: [ + "cake" +] +patterns: [ + { + include: "source.cs" + } + { + match: "^#(load|l)" + name: "preprocessor.source.cake" + } +] diff --git a/packages/language-csharp/grammars/csharp.cson b/packages/language-csharp/grammars/csharp.cson new file mode 100644 index 000000000..aceca5858 --- /dev/null +++ b/packages/language-csharp/grammars/csharp.cson @@ -0,0 +1,4195 @@ +name: "C#" +scopeName: "source.cs" +fileTypes: [ + "cs" +] +uuid: "f7de61e2-bdde-4e2a-a139-8221b179584e" +patterns: [ + { + include: "#preprocessor" + } + { + include: "#comment" + } + { + include: "#directives" + } + { + include: "#declarations" + } + { + include: "#script-top-level" + } +] +repository: + directives: + patterns: [ + { + include: "#extern-alias-directive" + } + { + include: "#using-directive" + } + { + include: "#attribute-section" + } + { + include: "#punctuation-semicolon" + } + ] + declarations: + patterns: [ + { + include: "#namespace-declaration" + } + { + include: "#type-declarations" + } + { + include: "#punctuation-semicolon" + } + ] + "script-top-level": + patterns: [ + { + include: "#method-declaration" + } + { + include: "#statement" + } + { + include: "#punctuation-semicolon" + } + ] + "type-declarations": + patterns: [ + { + include: "#preprocessor" + } + { + include: "#comment" + } + { + include: "#storage-modifier" + } + { + include: "#class-declaration" + } + { + include: "#delegate-declaration" + } + { + include: "#enum-declaration" + } + { + include: "#interface-declaration" + } + { + include: "#struct-declaration" + } + { + include: "#attribute-section" + } + { + include: "#punctuation-semicolon" + } + ] + "class-or-struct-members": + patterns: [ + { + include: "#preprocessor" + } + { + include: "#comment" + } + { + include: "#storage-modifier" + } + { + include: "#type-declarations" + } + { + include: "#property-declaration" + } + { + include: "#field-declaration" + } + { + include: "#event-declaration" + } + { + include: "#indexer-declaration" + } + { + include: "#variable-initializer" + } + { + include: "#constructor-declaration" + } + { + include: "#destructor-declaration" + } + { + include: "#operator-declaration" + } + { + include: "#conversion-operator-declaration" + } + { + include: "#method-declaration" + } + { + include: "#attribute-section" + } + { + include: "#punctuation-semicolon" + } + ] + "interface-members": + patterns: [ + { + include: "#preprocessor" + } + { + include: "#comment" + } + { + include: "#property-declaration" + } + { + include: "#event-declaration" + } + { + include: "#indexer-declaration" + } + { + include: "#method-declaration" + } + { + include: "#attribute-section" + } + { + include: "#punctuation-semicolon" + } + ] + statement: + patterns: [ + { + include: "#preprocessor" + } + { + include: "#comment" + } + { + include: "#while-statement" + } + { + include: "#do-statement" + } + { + include: "#for-statement" + } + { + include: "#foreach-statement" + } + { + include: "#if-statement" + } + { + include: "#else-part" + } + { + include: "#switch-statement" + } + { + include: "#goto-statement" + } + { + include: "#return-statement" + } + { + include: "#break-or-continue-statement" + } + { + include: "#throw-statement" + } + { + include: "#yield-statement" + } + { + include: "#await-statement" + } + { + include: "#try-statement" + } + { + include: "#checked-unchecked-statement" + } + { + include: "#lock-statement" + } + { + include: "#using-statement" + } + { + include: "#labeled-statement" + } + { + include: "#local-declaration" + } + { + include: "#block" + } + { + include: "#expression" + } + { + include: "#punctuation-semicolon" + } + ] + expression: + patterns: [ + { + include: "#preprocessor" + } + { + include: "#comment" + } + { + include: "#checked-unchecked-expression" + } + { + include: "#typeof-or-default-expression" + } + { + include: "#nameof-expression" + } + { + include: "#throw-expression" + } + { + include: "#interpolated-string" + } + { + include: "#verbatim-interpolated-string" + } + { + include: "#this-or-base-expression" + } + { + include: "#conditional-operator" + } + { + include: "#expression-operators" + } + { + include: "#await-expression" + } + { + include: "#query-expression" + } + { + include: "#as-expression" + } + { + include: "#is-expression" + } + { + include: "#anonymous-method-expression" + } + { + include: "#object-creation-expression" + } + { + include: "#array-creation-expression" + } + { + include: "#anonymous-object-creation-expression" + } + { + include: "#invocation-expression" + } + { + include: "#member-access-expression" + } + { + include: "#element-access-expression" + } + { + include: "#cast-expression" + } + { + include: "#literal" + } + { + include: "#parenthesized-expression" + } + { + include: "#tuple-deconstruction-assignment" + } + { + include: "#initializer-expression" + } + { + include: "#identifier" + } + ] + "extern-alias-directive": + begin: "\\s*(extern)\\b\\s*(alias)\\b\\s*(@?[_[:alpha:]][_[:alnum:]]*)" + beginCaptures: + "1": + name: "keyword.other.extern.cs" + "2": + name: "keyword.other.alias.cs" + "3": + name: "variable.other.alias.cs" + end: "(?=;)" + "using-directive": + patterns: [ + { + begin: "\\b(using)\\b\\s+(static)\\s+" + beginCaptures: + "1": + name: "keyword.other.using.cs" + "2": + name: "keyword.other.static.cs" + end: "(?=;)" + patterns: [ + { + include: "#type" + } + ] + } + { + begin: "\\b(using)\\s+(?=(@?[_[:alpha:]][_[:alnum:]]*)\\s*=)" + beginCaptures: + "1": + name: "keyword.other.using.cs" + "2": + name: "entity.name.type.alias.cs" + end: "(?=;)" + patterns: [ + { + include: "#comment" + } + { + include: "#type" + } + { + include: "#operator-assignment" + } + ] + } + { + begin: "\\b(using)\\s*" + beginCaptures: + "1": + name: "keyword.other.using.cs" + end: "(?=;)" + patterns: [ + { + include: "#comment" + } + { + name: "entity.name.type.namespace.cs" + match: "@?[_[:alpha:]][_[:alnum:]]*" + } + { + include: "#operator-assignment" + } + ] + } + ] + "attribute-section": + begin: "(\\[)(assembly|module|field|event|method|param|property|return|type)?(\\:)?" + beginCaptures: + "1": + name: "punctuation.squarebracket.open.cs" + "2": + name: "keyword.other.attribute-specifier.cs" + "3": + name: "punctuation.separator.colon.cs" + end: "(\\])" + endCaptures: + "1": + name: "punctuation.squarebracket.close.cs" + patterns: [ + { + include: "#comment" + } + { + include: "#attribute" + } + { + include: "#punctuation-comma" + } + ] + attribute: + patterns: [ + { + include: "#type-name" + } + { + include: "#attribute-arguments" + } + ] + "attribute-arguments": + begin: "(\\()" + beginCaptures: + "1": + name: "punctuation.parenthesis.open.cs" + end: "(\\))" + endCaptures: + "1": + name: "punctuation.parenthesis.close.cs" + patterns: [ + { + include: "#attribute-named-argument" + } + { + include: "#expression" + } + { + include: "#punctuation-comma" + } + ] + "attribute-named-argument": + begin: "(@?[_[:alpha:]][_[:alnum:]]*)\\s*(?==)" + beginCaptures: + "1": + name: "entity.name.variable.property.cs" + end: "(?=(,|\\)))" + patterns: [ + { + include: "#operator-assignment" + } + { + include: "#expression" + } + ] + "namespace-declaration": + begin: "\\b(namespace)\\s+" + beginCaptures: + "1": + name: "keyword.other.namespace.cs" + end: "(?<=\\})" + patterns: [ + { + include: "#comment" + } + { + name: "entity.name.type.namespace.cs" + match: "@?[_[:alpha:]][_[:alnum:]]*" + } + { + include: "#punctuation-accessor" + } + { + begin: "\\{" + beginCaptures: + "0": + name: "punctuation.curlybrace.open.cs" + end: "\\}" + endCaptures: + "0": + name: "punctuation.curlybrace.close.cs" + patterns: [ + { + include: "#declarations" + } + { + include: "#using-directive" + } + { + include: "#punctuation-semicolon" + } + ] + } + ] + "storage-modifier": + name: "storage.modifier.cs" + match: "(?<!\\.)\\b(new|public|protected|internal|private|abstract|virtual|override|sealed|static|partial|readonly|volatile|const|extern|async|unsafe|ref)\\b" + "class-declaration": + begin: "(?=\\bclass\\b)" + end: "(?<=\\})" + patterns: [ + { + begin: ''' + (?x) + \\b(class)\\b\\s+ + (@?[_[:alpha:]][_[:alnum:]]*)\\s* + ''' + beginCaptures: + "1": + name: "keyword.other.class.cs" + "2": + name: "entity.name.type.class.cs" + end: "(?=\\{)" + patterns: [ + { + include: "#comment" + } + { + include: "#type-parameter-list" + } + { + include: "#base-types" + } + { + include: "#generic-constraints" + } + ] + } + { + begin: "\\{" + beginCaptures: + "0": + name: "punctuation.curlybrace.open.cs" + end: "\\}" + endCaptures: + "0": + name: "punctuation.curlybrace.close.cs" + patterns: [ + { + include: "#class-or-struct-members" + } + ] + } + { + include: "#preprocessor" + } + { + include: "#comment" + } + ] + "delegate-declaration": + begin: ''' + (?x) + (?:\\b(delegate)\\b)\\s+ + (?<typename> + (?: + (?:ref\\s+(?:readonly\\s+)?)? # ref return + (?: + (?:(?<identifier>@?[_[:alpha:]][_[:alnum:]]*)\\s*\\:\\:\\s*)? # alias-qualification + (?<nameandtypeargs> # identifier + type arguments (if any) + \\g<identifier>\\s* + (?<typeargs>\\s*<(?:[^<>]|\\g<typeargs>)+>\\s*)? + ) + (?:\\s*\\.\\s*\\g<nameandtypeargs>)* | # Are there any more names being dotted into? + (?<tuple>\\s*\\((?:[^\\(\\)]|\\g<tuple>)+\\)) + ) + (?:\\s*\\?\\s*)? # nullable suffix? + (?:\\s*\\[(?:\\s*,\\s*)*\\]\\s*)* # array suffix? + ) + )\\s+ + (\\g<identifier>)\\s* + (<([^<>]+)>)?\\s* + (?=\\() + ''' + beginCaptures: + "1": + name: "keyword.other.delegate.cs" + "2": + patterns: [ + { + include: "#type" + } + ] + "7": + name: "entity.name.type.delegate.cs" + "8": + patterns: [ + { + include: "#type-parameter-list" + } + ] + end: "(?=;)" + patterns: [ + { + include: "#comment" + } + { + include: "#parenthesized-parameter-list" + } + { + include: "#generic-constraints" + } + ] + "enum-declaration": + begin: "(?=\\benum\\b)" + end: "(?<=\\})" + patterns: [ + { + begin: "(?=enum)" + end: "(?=\\{)" + patterns: [ + { + include: "#comment" + } + { + match: "(enum)\\s+(@?[_[:alpha:]][_[:alnum:]]*)" + captures: + "1": + name: "keyword.other.enum.cs" + "2": + name: "entity.name.type.enum.cs" + } + { + begin: ":" + beginCaptures: + "0": + name: "punctuation.separator.colon.cs" + end: "(?=\\{)" + patterns: [ + { + include: "#type" + } + ] + } + ] + } + { + begin: "\\{" + beginCaptures: + "0": + name: "punctuation.curlybrace.open.cs" + end: "\\}" + endCaptures: + "0": + name: "punctuation.curlybrace.close.cs" + patterns: [ + { + include: "#preprocessor" + } + { + include: "#comment" + } + { + include: "#attribute-section" + } + { + include: "#punctuation-comma" + } + { + begin: "@?[_[:alpha:]][_[:alnum:]]*" + beginCaptures: + "0": + name: "entity.name.variable.enum-member.cs" + end: "(?=(,|\\}))" + patterns: [ + { + include: "#comment" + } + { + include: "#variable-initializer" + } + ] + } + ] + } + { + include: "#preprocessor" + } + { + include: "#comment" + } + ] + "interface-declaration": + begin: "(?=\\binterface\\b)" + end: "(?<=\\})" + patterns: [ + { + begin: ''' + (?x) + (interface)\\b\\s+ + (@?[_[:alpha:]][_[:alnum:]]*) + ''' + beginCaptures: + "1": + name: "keyword.other.interface.cs" + "2": + name: "entity.name.type.interface.cs" + end: "(?=\\{)" + patterns: [ + { + include: "#comment" + } + { + include: "#type-parameter-list" + } + { + include: "#base-types" + } + { + include: "#generic-constraints" + } + ] + } + { + begin: "\\{" + beginCaptures: + "0": + name: "punctuation.curlybrace.open.cs" + end: "\\}" + endCaptures: + "0": + name: "punctuation.curlybrace.close.cs" + patterns: [ + { + include: "#interface-members" + } + ] + } + { + include: "#preprocessor" + } + { + include: "#comment" + } + ] + "struct-declaration": + begin: "(?=\\bstruct\\b)" + end: "(?<=\\})" + patterns: [ + { + begin: ''' + (?x) + (struct)\\b\\s+ + (@?[_[:alpha:]][_[:alnum:]]*) + ''' + beginCaptures: + "1": + name: "keyword.other.struct.cs" + "2": + name: "entity.name.type.struct.cs" + end: "(?=\\{)" + patterns: [ + { + include: "#comment" + } + { + include: "#type-parameter-list" + } + { + include: "#base-types" + } + { + include: "#generic-constraints" + } + ] + } + { + begin: "\\{" + beginCaptures: + "0": + name: "punctuation.curlybrace.open.cs" + end: "\\}" + endCaptures: + "0": + name: "punctuation.curlybrace.close.cs" + patterns: [ + { + include: "#class-or-struct-members" + } + ] + } + { + include: "#preprocessor" + } + { + include: "#comment" + } + ] + "type-parameter-list": + begin: "\\<" + beginCaptures: + "0": + name: "punctuation.definition.typeparameters.begin.cs" + end: "\\>" + endCaptures: + "0": + name: "punctuation.definition.typeparameters.end.cs" + patterns: [ + { + match: "\\b(in|out)\\b" + captures: + "1": + name: "storage.modifier.cs" + } + { + match: "(@?[_[:alpha:]][_[:alnum:]]*)\\b" + captures: + "1": + name: "entity.name.type.type-parameter.cs" + } + { + include: "#comment" + } + { + include: "#punctuation-comma" + } + { + include: "#attribute-section" + } + ] + "base-types": + begin: ":" + beginCaptures: + "0": + name: "punctuation.separator.colon.cs" + end: "(?=\\{|where)" + patterns: [ + { + include: "#type" + } + { + include: "#punctuation-comma" + } + { + include: "#preprocessor" + } + ] + "generic-constraints": + begin: "(where)\\s+(@?[_[:alpha:]][_[:alnum:]]*)\\s*(:)" + beginCaptures: + "1": + name: "keyword.other.where.cs" + "2": + name: "entity.name.type.type-parameter.cs" + "3": + name: "punctuation.separator.colon.cs" + end: "(?=\\{|where|;|=>)" + patterns: [ + { + name: "keyword.other.class.cs" + match: "\\bclass\\b" + } + { + name: "keyword.other.struct.cs" + match: "\\bstruct\\b" + } + { + match: "(new)\\s*(\\()\\s*(\\))" + captures: + "1": + name: "keyword.other.new.cs" + "2": + name: "punctuation.parenthesis.open.cs" + "3": + name: "punctuation.parenthesis.close.cs" + } + { + include: "#type" + } + { + include: "#punctuation-comma" + } + { + include: "#generic-constraints" + } + ] + "field-declaration": + begin: ''' + (?x) + (?<typename> + (?: + (?: + (?:(?<identifier>@?[_[:alpha:]][_[:alnum:]]*)\\s*\\:\\:\\s*)? # alias-qualification + (?<nameandtypeargs> # identifier + type arguments (if any) + \\g<identifier>\\s* + (?<typeargs>\\s*<(?:[^<>]|\\g<typeargs>)+>\\s*)? + ) + (?:\\s*\\.\\s*\\g<nameandtypeargs>)* | # Are there any more names being dotted into? + (?<tuple>\\s*\\((?:[^\\(\\)]|\\g<tuple>)+\\)) + ) + (?:\\s*\\?\\s*)? # nullable suffix? + (?:\\s*\\[(?:\\s*,\\s*)*\\]\\s*)* # array suffix? + ) + )\\s+ + (\\g<identifier>)\\s* # first field name + (?!=>|==)(?=,|;|=|$) + ''' + beginCaptures: + "1": + patterns: [ + { + include: "#type" + } + ] + "6": + name: "variable.other.cs" + end: "(?=;)" + patterns: [ + { + name: "variable.other.cs" + match: "@?[_[:alpha:]][_[:alnum:]]*" + } + { + include: "#punctuation-comma" + } + { + include: "#comment" + } + { + include: "#variable-initializer" + } + { + include: "#class-or-struct-members" + } + ] + "property-declaration": + begin: ''' + (?x) + + # The negative lookahead below ensures that we don't match nested types + # or other declarations as properties. + (?![[:word:][:space:]]*\\b(?:class|interface|struct|enum|event)\\b) + + (?<returntype> + (?<typename> + (?: + (?:ref\\s+(?:readonly\\s+)?)? # ref return + (?: + (?:(?<identifier>@?[_[:alpha:]][_[:alnum:]]*)\\s*\\:\\:\\s*)? # alias-qualification + (?<nameandtypeargs> # identifier + type arguments (if any) + \\g<identifier>\\s* + (?<typeargs>\\s*<(?:[^<>]|\\g<typeargs>)+>\\s*)? + ) + (?:\\s*\\.\\s*\\g<nameandtypeargs>)* | # Are there any more names being dotted into? + (?<tuple>\\s*\\((?:[^\\(\\)]|\\g<tuple>)+\\)) + ) + (?:\\s*\\?\\s*)? # nullable suffix? + (?:\\s*\\[(?:\\s*,\\s*)*\\]\\s*)* # array suffix? + ) + )\\s+ + ) + (?<interfacename>\\g<typename>\\s*\\.\\s*)? + (?<propertyname>\\g<identifier>)\\s* + (?=\\{|=>|$) + ''' + beginCaptures: + "1": + patterns: [ + { + include: "#type" + } + ] + "7": + patterns: [ + { + include: "#type" + } + { + include: "#punctuation-accessor" + } + ] + "8": + name: "variable.other.cs" + end: "(?<=\\})|(?=;)" + patterns: [ + { + include: "#comment" + } + { + include: "#property-accessors" + } + { + include: "#expression-body" + } + { + include: "#variable-initializer" + } + { + include: "#class-or-struct-members" + } + ] + "indexer-declaration": + begin: ''' + (?x) + (?<returntype> + (?<typename> + (?: + (?:ref\\s+(?:readonly\\s+)?)? # ref return + (?: + (?:(?<identifier>@?[_[:alpha:]][_[:alnum:]]*)\\s*\\:\\:\\s*)? # alias-qualification + (?<nameandtypeargs> # identifier + type arguments (if any) + \\g<identifier>\\s* + (?<typeargs>\\s*<(?:[^<>]|\\g<typeargs>)+>\\s*)? + ) + (?:\\s*\\.\\s*\\g<nameandtypeargs>)* | # Are there any more names being dotted into? + (?<tuple>\\s*\\((?:[^\\(\\)]|\\g<tuple>)+\\)) + ) + (?:\\s*\\?\\s*)? # nullable suffix? + (?:\\s*\\[(?:\\s*,\\s*)*\\]\\s*)* # array suffix? + ) + )\\s+ + ) + (?<interfacename>\\g<typename>\\s*\\.\\s*)? + (?<indexername>this)\\s* + (?=\\[) + ''' + beginCaptures: + "1": + patterns: [ + { + include: "#type" + } + ] + "7": + patterns: [ + { + include: "#type" + } + { + include: "#punctuation-accessor" + } + ] + "8": + name: "keyword.other.this.cs" + end: "(?<=\\})|(?=;)" + patterns: [ + { + include: "#comment" + } + { + include: "#bracketed-parameter-list" + } + { + include: "#property-accessors" + } + { + include: "#expression-body" + } + { + include: "#variable-initializer" + } + ] + "event-declaration": + begin: ''' + (?x) + \\b(event)\\b\\s* + (?<returntype> + (?<typename> + (?: + (?: + (?:(?<identifier>@?[_[:alpha:]][_[:alnum:]]*)\\s*\\:\\:\\s*)? # alias-qualification + (?<nameandtypeargs> # identifier + type arguments (if any) + \\g<identifier>\\s* + (?<typeargs>\\s*<(?:[^<>]|\\g<typeargs>)+>\\s*)? + ) + (?:\\s*\\.\\s*\\g<nameandtypeargs>)* | # Are there any more names being dotted into? + (?<tuple>\\s*\\((?:[^\\(\\)]|\\g<tuple>)+\\)) + ) + (?:\\s*\\?\\s*)? # nullable suffix? + (?:\\s*\\[(?:\\s*,\\s*)*\\]\\s*)* # array suffix? + ) + )\\s+ + ) + (?<interfacename>\\g<typename>\\s*\\.\\s*)? + (?<eventnames>\\g<identifier>(?:\\s*,\\s*\\g<identifier>)*)\\s* + (?=\\{|;|$) + ''' + beginCaptures: + "1": + name: "keyword.other.event.cs" + "2": + patterns: [ + { + include: "#type" + } + ] + "8": + patterns: [ + { + include: "#type" + } + { + include: "#punctuation-accessor" + } + ] + "9": + patterns: [ + { + name: "entity.name.variable.event.cs" + match: "@?[_[:alpha:]][_[:alnum:]]*" + } + { + include: "#punctuation-comma" + } + ] + end: "(?<=\\})|(?=;)" + patterns: [ + { + include: "#comment" + } + { + include: "#event-accessors" + } + { + include: "#punctuation-comma" + } + ] + "property-accessors": + begin: "\\{" + beginCaptures: + "0": + name: "punctuation.curlybrace.open.cs" + end: "\\}" + endCaptures: + "0": + name: "punctuation.curlybrace.close.cs" + patterns: [ + { + name: "storage.modifier.cs" + match: "\\b(private|protected|internal)\\b" + } + { + name: "keyword.other.get.cs" + match: "\\b(get)\\b" + } + { + name: "keyword.other.set.cs" + match: "\\b(set)\\b" + } + { + include: "#comment" + } + { + include: "#attribute-section" + } + { + include: "#expression-body" + } + { + include: "#block" + } + { + include: "#punctuation-semicolon" + } + ] + "event-accessors": + begin: "\\{" + beginCaptures: + "0": + name: "punctuation.curlybrace.open.cs" + end: "\\}" + endCaptures: + "0": + name: "punctuation.curlybrace.close.cs" + patterns: [ + { + name: "keyword.other.add.cs" + match: "\\b(add)\\b" + } + { + name: "keyword.other.remove.cs" + match: "\\b(remove)\\b" + } + { + include: "#comment" + } + { + include: "#attribute-section" + } + { + include: "#expression-body" + } + { + include: "#block" + } + { + include: "#punctuation-semicolon" + } + ] + "method-declaration": + begin: ''' + (?x) + (?<returntype> + (?<typename> + (?: + (?:ref\\s+(?:readonly\\s+)?)? # ref return + (?: + (?:(?<identifier>@?[_[:alpha:]][_[:alnum:]]*)\\s*\\:\\:\\s*)? # alias-qualification + (?<nameandtypeargs> # identifier + type arguments (if any) + \\g<identifier>\\s* + (?<typeargs>\\s*<(?:[^<>]|\\g<typeargs>)+>\\s*)? + ) + (?:\\s*\\.\\s*\\g<nameandtypeargs>)* | # Are there any more names being dotted into? + (?<tuple>\\s*\\((?:[^\\(\\)]|\\g<tuple>)+\\)) + ) + (?:\\s*\\?\\s*)? # nullable suffix? + (?:\\s*\\[(?:\\s*,\\s*)*\\]\\s*)* # array suffix? + ) + )\\s+ + ) + (?<interfacename>\\g<typename>\\s*\\.\\s*)? + (\\g<identifier>)\\s* + (<([^<>]+)>)?\\s* + (?=\\() + ''' + beginCaptures: + "1": + patterns: [ + { + include: "#type" + } + ] + "7": + patterns: [ + { + include: "#type" + } + { + include: "#punctuation-accessor" + } + ] + "8": + name: "entity.name.function.cs" + "9": + patterns: [ + { + include: "#type-parameter-list" + } + ] + end: "(?<=\\})|(?=;)" + patterns: [ + { + include: "#comment" + } + { + include: "#parenthesized-parameter-list" + } + { + include: "#generic-constraints" + } + { + include: "#expression-body" + } + { + include: "#block" + } + ] + "constructor-declaration": + begin: "(?=@?[_[:alpha:]][_[:alnum:]]*\\s*\\()" + end: "(?<=\\})|(?=;)" + patterns: [ + { + match: "(@?[_[:alpha:]][_[:alnum:]]*)\\b" + captures: + "1": + name: "entity.name.function.cs" + } + { + begin: "(:)" + beginCaptures: + "1": + name: "punctuation.separator.colon.cs" + end: "(?=\\{|=>)" + patterns: [ + { + include: "#constructor-initializer" + } + ] + } + { + include: "#parenthesized-parameter-list" + } + { + include: "#preprocessor" + } + { + include: "#comment" + } + { + include: "#expression-body" + } + { + include: "#block" + } + ] + "constructor-initializer": + begin: "\\b(?:(base)|(this))\\b\\s*(?=\\()" + beginCaptures: + "1": + name: "keyword.other.base.cs" + "2": + name: "keyword.other.this.cs" + end: "(?<=\\))" + patterns: [ + { + include: "#argument-list" + } + ] + "destructor-declaration": + begin: "(~)(@?[_[:alpha:]][_[:alnum:]]*)\\s*(?=\\()" + beginCaptures: + "1": + name: "punctuation.tilde.cs" + "2": + name: "entity.name.function.cs" + end: "(?<=\\})|(?=;)" + patterns: [ + { + include: "#comment" + } + { + include: "#parenthesized-parameter-list" + } + { + include: "#expression-body" + } + { + include: "#block" + } + ] + "operator-declaration": + begin: ''' + (?x) + (?<typename> + (?: + (?:ref\\s+(?:readonly\\s+)?)? # ref return + (?: + (?:(?<identifier>@?[_[:alpha:]][_[:alnum:]]*)\\s*\\:\\:\\s*)? # alias-qualification + (?<nameandtypeargs> # identifier + type arguments (if any) + \\g<identifier>\\s* + (?<typeargs>\\s*<(?:[^<>]|\\g<typeargs>)+>\\s*)? + ) + (?:\\s*\\.\\s*\\g<nameandtypeargs>)* | # Are there any more names being dotted into? + (?<tuple>\\s*\\((?:[^\\(\\)]|\\g<tuple>)+\\)) + ) + (?:\\s*\\?\\s*)? # nullable suffix? + (?:\\s*\\[(?:\\s*,\\s*)*\\]\\s*)* # array suffix? + ) + )\\s* + (?<operatorkeyword>(?:\\b(?:operator)))\\s* + (?<operator>(?:\\+|-|\\*|/|%|&|\\||\\^|\\<\\<|\\>\\>|==|!=|\\>|\\<|\\>=|\\<=|!|~|\\+\\+|--|true|false))\\s* + (?=\\() + ''' + beginCaptures: + "1": + patterns: [ + { + include: "#type" + } + ] + "6": + name: "keyword.other.operator-decl.cs" + "7": + name: "entity.name.function.cs" + end: "(?<=\\})|(?=;)" + patterns: [ + { + include: "#comment" + } + { + include: "#parenthesized-parameter-list" + } + { + include: "#expression-body" + } + { + include: "#block" + } + ] + "conversion-operator-declaration": + begin: ''' + (?x) + (?<explicitorimplicitkeyword>(?:\\b(?:explicit|implicit)))\\s* + (?<operatorkeyword>(?:\\b(?:operator)))\\s* + (?<typename> + (?: + (?:ref\\s+(?:readonly\\s+)?)? # ref return + (?: + (?:(?<identifier>@?[_[:alpha:]][_[:alnum:]]*)\\s*\\:\\:\\s*)? # alias-qualification + (?<nameandtypeargs> # identifier + type arguments (if any) + \\g<identifier>\\s* + (?<typeargs>\\s*<(?:[^<>]|\\g<typeargs>)+>\\s*)? + ) + (?:\\s*\\.\\s*\\g<nameandtypeargs>)* | # Are there any more names being dotted into? + (?<tuple>\\s*\\((?:[^\\(\\)]|\\g<tuple>)+\\)) + ) + (?:\\s*\\?\\s*)? # nullable suffix? + (?:\\s*\\[(?:\\s*,\\s*)*\\]\\s*)* # array suffix? + ) + )\\s* + (?=\\() + ''' + beginCaptures: + "1": + patterns: [ + { + match: "\\b(explicit)\\b" + captures: + "1": + name: "keyword.other.explicit.cs" + } + { + match: "\\b(implicit)\\b" + captures: + "1": + name: "keyword.other.implicit.cs" + } + ] + "2": + name: "keyword.other.operator-decl.cs" + "3": + patterns: [ + { + include: "#type" + } + ] + end: "(?<=\\})|(?=;)" + patterns: [ + { + include: "#comment" + } + { + include: "#parenthesized-parameter-list" + } + { + include: "#expression-body" + } + { + include: "#block" + } + ] + block: + begin: "\\{" + beginCaptures: + "0": + name: "punctuation.curlybrace.open.cs" + end: "\\}" + endCaptures: + "0": + name: "punctuation.curlybrace.close.cs" + patterns: [ + { + include: "#statement" + } + ] + "variable-initializer": + begin: "(?<!=|!)(=)(?!=|>)" + beginCaptures: + "1": + name: "keyword.operator.assignment.cs" + end: "(?=[,\\)\\];}])" + patterns: [ + { + include: "#ref-modifier" + } + { + include: "#expression" + } + ] + "expression-body": + begin: "=>" + beginCaptures: + "0": + name: "keyword.operator.arrow.cs" + end: "(?=[,\\);}])" + patterns: [ + { + include: "#ref-modifier" + } + { + include: "#expression" + } + ] + "goto-statement": + begin: "(?<!\\.)\\b(goto)\\b" + beginCaptures: + "1": + name: "keyword.control.goto.cs" + end: "(?=;)" + patterns: [ + { + begin: "\\b(case)\\b" + beginCaptures: + "1": + name: "keyword.control.case.cs" + end: "(?=;)" + patterns: [ + { + include: "#expression" + } + ] + } + { + match: "\\b(default)\\b" + captures: + "1": + name: "keyword.control.default.cs" + } + { + name: "entity.name.label.cs" + match: "@?[_[:alpha:]][_[:alnum:]]*" + } + ] + "return-statement": + begin: "(?<!\\.)\\b(return)\\b" + beginCaptures: + "1": + name: "keyword.control.flow.return.cs" + end: "(?=;)" + patterns: [ + { + include: "#ref-modifier" + } + { + include: "#expression" + } + ] + "break-or-continue-statement": + match: "(?<!\\.)\\b(?:(break)|(continue))\\b" + captures: + "1": + name: "keyword.control.flow.break.cs" + "2": + name: "keyword.control.flow.continue.cs" + "throw-statement": + begin: "(?<!\\.)\\b(throw)\\b" + beginCaptures: + "1": + name: "keyword.control.flow.throw.cs" + end: "(?=;)" + patterns: [ + { + include: "#expression" + } + ] + "yield-statement": + patterns: [ + { + include: "#yield-return-statement" + } + { + include: "#yield-break-statement" + } + ] + "yield-return-statement": + begin: "(?<!\\.)\\b(yield)\\b\\s*\\b(return)\\b" + beginCaptures: + "1": + name: "keyword.control.flow.yield.cs" + "2": + name: "keyword.control.flow.return.cs" + end: "(?=;)" + patterns: [ + { + include: "#expression" + } + ] + "yield-break-statement": + match: "(?<!\\.)\\b(yield)\\b\\s*\\b(break)\\b" + captures: + "1": + name: "keyword.control.flow.yield.cs" + "2": + name: "keyword.control.flow.break.cs" + "await-statement": + begin: "(?<!\\.)\\b(await)\\b" + beginCaptures: + "1": + name: "keyword.other.await.cs" + end: "(?=;)" + patterns: [ + { + include: "#expression" + } + ] + "if-statement": + begin: "(?<!\\.)\\b(if)\\b\\s*(?=\\()" + beginCaptures: + "1": + name: "keyword.control.conditional.if.cs" + end: "(?<=\\})|(?=;)" + patterns: [ + { + begin: "\\(" + beginCaptures: + "0": + name: "punctuation.parenthesis.open.cs" + end: "\\)" + endCaptures: + "0": + name: "punctuation.parenthesis.close.cs" + patterns: [ + { + include: "#expression" + } + ] + } + { + include: "#statement" + } + ] + "else-part": + begin: "(?<!\\.)\\b(else)\\b" + beginCaptures: + "1": + name: "keyword.control.conditional.else.cs" + end: "(?<=\\})|(?=;)" + patterns: [ + { + include: "#statement" + } + ] + "switch-statement": + begin: "(?<!\\.)\\b(switch)\\b\\s*(?=\\()" + beginCaptures: + "1": + name: "keyword.control.switch.cs" + end: "(?<=\\})" + patterns: [ + { + begin: "\\(" + beginCaptures: + "0": + name: "punctuation.parenthesis.open.cs" + end: "\\)" + endCaptures: + "0": + name: "punctuation.parenthesis.close.cs" + patterns: [ + { + include: "#expression" + } + ] + } + { + begin: "\\{" + beginCaptures: + "0": + name: "punctuation.curlybrace.open.cs" + end: "\\}" + endCaptures: + "0": + name: "punctuation.curlybrace.close.cs" + patterns: [ + { + include: "#switch-label" + } + { + include: "#statement" + } + ] + } + ] + "switch-label": + patterns: [ + { + begin: "(?<!\\.)\\b(case)\\b\\s+" + beginCaptures: + "1": + name: "keyword.control.case.cs" + end: ":" + endCaptures: + "0": + name: "punctuation.separator.colon.cs" + patterns: [ + { + include: "#expression" + } + ] + } + { + match: "(?<!\\.)\\b(default)\\b\\s*(:)" + captures: + "1": + name: "keyword.control.default.cs" + "2": + name: "punctuation.separator.colon.cs" + } + ] + "do-statement": + begin: "(?<!\\.)\\b(do)\\b" + beginCaptures: + "1": + name: "keyword.control.loop.do.cs" + end: "(?=;|})" + patterns: [ + { + include: "#statement" + } + ] + "while-statement": + begin: "(?<!\\.)\\b(while)\\b\\s*(?=\\()" + beginCaptures: + "1": + name: "keyword.control.loop.while.cs" + end: "(?<=\\})|(?=;)" + patterns: [ + { + begin: "\\(" + beginCaptures: + "0": + name: "punctuation.parenthesis.open.cs" + end: "\\)" + endCaptures: + "0": + name: "punctuation.parenthesis.close.cs" + patterns: [ + { + include: "#expression" + } + ] + } + { + include: "#statement" + } + ] + "for-statement": + begin: "(?<!\\.)\\b(for)\\b\\s*(?=\\()" + beginCaptures: + "1": + name: "keyword.control.loop.for.cs" + end: "(?<=\\})|(?=;)" + patterns: [ + { + begin: "\\(" + beginCaptures: + "0": + name: "punctuation.parenthesis.open.cs" + end: "\\)" + endCaptures: + "0": + name: "punctuation.parenthesis.close.cs" + patterns: [ + { + include: "#local-variable-declaration" + } + { + include: "#expression" + } + { + include: "#punctuation-comma" + } + { + include: "#punctuation-semicolon" + } + ] + } + { + include: "#statement" + } + ] + "foreach-statement": + begin: "(?<!\\.)\\b(foreach)\\b\\s*(?=\\()" + beginCaptures: + "1": + name: "keyword.control.loop.foreach.cs" + end: "(?<=\\})|(?=;)" + patterns: [ + { + begin: "\\(" + beginCaptures: + "0": + name: "punctuation.parenthesis.open.cs" + end: "\\)" + endCaptures: + "0": + name: "punctuation.parenthesis.close.cs" + patterns: [ + { + match: ''' + (?x) + (?: + (\\bvar\\b)| + (?<typename> + (?: + (?: + (?:(?<identifier>@?[_[:alpha:]][_[:alnum:]]*)\\s*\\:\\:\\s*)? # alias-qualification + (?<nameandtypeargs> # identifier + type arguments (if any) + \\g<identifier>\\s* + (?<typeargs>\\s*<(?:[^<>]|\\g<typeargs>)+>\\s*)? + ) + (?:\\s*\\.\\s*\\g<nameandtypeargs>)* | # Are there any more names being dotted into? + (?<tuple>\\s*\\((?:[^\\(\\)]|\\g<tuple>)+\\)) + ) + (?:\\s*\\?\\s*)? # nullable suffix? + (?:\\s*\\[(?:\\s*,\\s*)*\\]\\s*)* # array suffix? + ) + ) + )\\s+ + (\\g<identifier>)\\s+ + \\b(in)\\b + ''' + captures: + "1": + name: "keyword.other.var.cs" + "2": + patterns: [ + { + include: "#type" + } + ] + "7": + name: "variable.other.cs" + "8": + name: "keyword.control.loop.in.cs" + } + { + match: ''' + (?x) # match foreach (var (x, y) in ...) + (?:\\b(var)\\b\\s*)? + (?<tuple>\\((?:[^\\(\\)]|\\g<tuple>)+\\))\\s+ + \\b(in)\\b + ''' + captures: + "1": + name: "keyword.other.var.cs" + "2": + patterns: [ + { + include: "#tuple-declaration-deconstruction-element-list" + } + ] + "3": + name: "keyword.control.loop.in.cs" + } + { + include: "#expression" + } + ] + } + { + include: "#statement" + } + ] + "try-statement": + patterns: [ + { + include: "#try-block" + } + { + include: "#catch-clause" + } + { + include: "#finally-clause" + } + ] + "try-block": + begin: "(?<!\\.)\\b(try)\\b" + beginCaptures: + "1": + name: "keyword.control.try.cs" + end: "(?<=\\})" + patterns: [ + { + include: "#comment" + } + { + include: "#block" + } + ] + "finally-clause": + begin: "(?<!\\.)\\b(finally)\\b" + beginCaptures: + "1": + name: "keyword.control.try.finally.cs" + end: "(?<=\\})" + patterns: [ + { + include: "#comment" + } + { + include: "#block" + } + ] + "catch-clause": + begin: "(?<!\\.)\\b(catch)\\b" + beginCaptures: + "1": + name: "keyword.control.try.catch.cs" + end: "(?<=\\})" + patterns: [ + { + begin: "\\(" + beginCaptures: + "0": + name: "punctuation.parenthesis.open.cs" + end: "\\)" + endCaptures: + "0": + name: "punctuation.parenthesis.close.cs" + patterns: [ + { + match: ''' + (?x) + (?<typename> + (?: + (?: + (?:(?<identifier>@?[_[:alpha:]][_[:alnum:]]*)\\s*\\:\\:\\s*)? # alias-qualification + (?<nameandtypeargs> # identifier + type arguments (if any) + \\g<identifier>\\s* + (?<typeargs>\\s*<(?:[^<>]|\\g<typeargs>)+>\\s*)? + ) + (?:\\s*\\.\\s*\\g<nameandtypeargs>)* | # Are there any more names being dotted into? + (?<tuple>\\s*\\((?:[^\\(\\)]|\\g<tuple>)+\\)) + ) + (?:\\s*\\?\\s*)? # nullable suffix? + (?:\\s*\\[(?:\\s*,\\s*)*\\]\\s*)* # array suffix? + ) + )\\s* + (?:(\\g<identifier>)\\b)? + ''' + captures: + "1": + patterns: [ + { + include: "#type" + } + ] + "6": + name: "variable.other.cs" + } + ] + } + { + include: "#when-clause" + } + { + include: "#comment" + } + { + include: "#block" + } + ] + "when-clause": + begin: "(?<!\\.)\\b(when)\\b\\s*(\\()" + beginCaptures: + "1": + name: "keyword.control.try.when.cs" + "2": + name: "punctuation.parenthesis.open.cs" + end: "\\)" + endCaptures: + "0": + name: "punctuation.parenthesis.close.cs" + patterns: [ + { + include: "#expression" + } + { + include: "#comment" + } + ] + "checked-unchecked-statement": + begin: "(?<!\\.)\\b(?:(checked)|(unchecked))\\b\\s*(?!\\()" + beginCaptures: + "1": + name: "keyword.other.checked.cs" + "2": + name: "keyword.other.unchecked.cs" + end: "(?<=\\})" + patterns: [ + { + include: "#block" + } + { + include: "#comment" + } + ] + "lock-statement": + begin: "(?<!\\.)\\b(lock)\\b\\s*(?=\\()" + beginCaptures: + "1": + name: "keyword.other.lock.cs" + end: "(?<=\\})|(?=;)" + patterns: [ + { + begin: "\\(" + beginCaptures: + "0": + name: "punctuation.parenthesis.open.cs" + end: "\\)" + endCaptures: + "0": + name: "punctuation.parenthesis.close.cs" + patterns: [ + { + include: "#expression" + } + ] + } + { + include: "#statement" + } + ] + "using-statement": + begin: "(?<!\\.)\\b(using)\\b\\s*(?=\\()" + beginCaptures: + "1": + name: "keyword.other.using.cs" + end: "(?=\\;|})" + patterns: [ + { + begin: "\\(" + beginCaptures: + "0": + name: "punctuation.parenthesis.open.cs" + end: "\\)" + endCaptures: + "0": + name: "punctuation.parenthesis.close.cs" + patterns: [ + { + include: "#local-variable-declaration" + } + { + include: "#expression" + } + ] + } + { + include: "#statement" + } + ] + "labeled-statement": + match: "(@?[_[:alpha:]][_[:alnum:]]*)\\s*(:)" + captures: + "1": + name: "entity.name.label.cs" + "2": + name: "punctuation.separator.colon.cs" + "local-declaration": + patterns: [ + { + include: "#local-constant-declaration" + } + { + include: "#local-variable-declaration" + } + { + include: "#local-tuple-var-deconstruction" + } + ] + "local-variable-declaration": + begin: ''' + (?x) + (?: + (?:(\\bref)\\s+(?:(\\breadonly)\\s+)?)?(\\bvar\\b)| # ref local + (?<typename> + (?: + (?:ref\\s+(?:readonly\\s+)?)? # ref local + (?: + (?:(?<identifier>@?[_[:alpha:]][_[:alnum:]]*)\\s*\\:\\:\\s*)? # alias-qualification + (?<nameandtypeargs> # identifier + type arguments (if any) + \\g<identifier>\\s* + (?<typeargs>\\s*<(?:[^<>]|\\g<typeargs>)+>\\s*)? + ) + (?:\\s*\\.\\s*\\g<nameandtypeargs>)* | # Are there any more names being dotted into? + (?<tuple>\\s*\\((?:[^\\(\\)]|\\g<tuple>)+\\)) + ) + (?:\\s*\\?\\s*)? # nullable suffix? + (?:\\s*\\[(?:\\s*,\\s*)*\\]\\s*)* # array suffix? + ) + ) + )\\s+ + (\\g<identifier>)\\s* + (?=,|;|=|\\)) + ''' + beginCaptures: + "1": + name: "storage.modifier.cs" + "2": + name: "storage.modifier.cs" + "3": + name: "keyword.other.var.cs" + "4": + patterns: [ + { + include: "#type" + } + ] + "9": + name: "variable.other.cs" + end: "(?=;|\\))" + patterns: [ + { + name: "variable.other.cs" + match: "@?[_[:alpha:]][_[:alnum:]]*" + } + { + include: "#punctuation-comma" + } + { + include: "#comment" + } + { + include: "#variable-initializer" + } + ] + "local-constant-declaration": + begin: ''' + (?x) + (?<constkeyword>\\b(?:const)\\b)\\s* + (?<typename> + (?: + (?: + (?:(?<identifier>@?[_[:alpha:]][_[:alnum:]]*)\\s*\\:\\:\\s*)? # alias-qualification + (?<nameandtypeargs> # identifier + type arguments (if any) + \\g<identifier>\\s* + (?<typeargs>\\s*<(?:[^<>]|\\g<typeargs>)+>\\s*)? + ) + (?:\\s*\\.\\s*\\g<nameandtypeargs>)* | # Are there any more names being dotted into? + (?<tuple>\\s*\\((?:[^\\(\\)]|\\g<tuple>)+\\)) + ) + (?:\\s*\\?\\s*)? # nullable suffix? + (?:\\s*\\[(?:\\s*,\\s*)*\\]\\s*)* # array suffix? + ) + )\\s+ + (\\g<identifier>)\\s* + (?=,|;|=) + ''' + beginCaptures: + "1": + name: "storage.modifier.cs" + "2": + patterns: [ + { + include: "#type" + } + ] + "7": + name: "variable.other.cs" + end: "(?=;)" + patterns: [ + { + name: "variable.other.cs" + match: "@?[_[:alpha:]][_[:alnum:]]*" + } + { + include: "#punctuation-comma" + } + { + include: "#comment" + } + { + include: "#variable-initializer" + } + ] + "local-tuple-var-deconstruction": + begin: ''' + (?x) # e.g. var (x, y) = GetPoint(); + (?:\\b(var)\\b\\s*) + (?<tuple>\\((?:[^\\(\\)]|\\g<tuple>)+\\))\\s* + (?=;|=|\\)) + ''' + beginCaptures: + "1": + name: "keyword.other.var.cs" + "2": + patterns: [ + { + include: "#tuple-declaration-deconstruction-element-list" + } + ] + end: "(?=;|\\))" + patterns: [ + { + include: "#comment" + } + { + include: "#variable-initializer" + } + ] + "tuple-deconstruction-assignment": + match: ''' + (?x) + (?<tuple>\\s*\\((?:[^\\(\\)]|\\g<tuple>)+\\))\\s* + (?!=>|==)(?==) + ''' + captures: + "1": + patterns: [ + { + include: "#tuple-deconstruction-element-list" + } + ] + "tuple-declaration-deconstruction-element-list": + begin: "\\(" + beginCaptures: + "0": + name: "punctuation.parenthesis.open.cs" + end: "\\)" + endCaptures: + "0": + name: "punctuation.parenthesis.close.cs" + patterns: [ + { + include: "#comment" + } + { + include: "#tuple-declaration-deconstruction-element-list" + } + { + include: "#declaration-expression-tuple" + } + { + include: "#punctuation-comma" + } + { + match: ''' + (?x) # e.g. x + (@?[_[:alpha:]][_[:alnum:]]*)\\b\\s* + (?=[,)]) + ''' + captures: + "1": + name: "entity.name.variable.tuple-element.cs" + } + ] + "tuple-deconstruction-element-list": + begin: "\\(" + beginCaptures: + "0": + name: "punctuation.parenthesis.open.cs" + end: "\\)" + endCaptures: + "0": + name: "punctuation.parenthesis.close.cs" + patterns: [ + { + include: "#comment" + } + { + include: "#tuple-deconstruction-element-list" + } + { + include: "#declaration-expression-tuple" + } + { + include: "#punctuation-comma" + } + { + match: ''' + (?x) # e.g. x + (@?[_[:alpha:]][_[:alnum:]]*)\\b\\s* + (?=[,)]) + ''' + captures: + "1": + name: "variable.other.readwrite.cs" + } + ] + "declaration-expression-local": + match: ''' + (?x) # e.g. int x OR var x + (?: + \\b(var)\\b| + (?<typename> + (?: + (?: + (?:(?<identifier>@?[_[:alpha:]][_[:alnum:]]*)\\s*\\:\\:\\s*)? # alias-qualification + (?<nameandtypeargs> # identifier + type arguments (if any) + \\g<identifier>\\s* + (?<typeargs>\\s*<(?:[^<>]|\\g<typeargs>)+>\\s*)? + ) + (?:\\s*\\.\\s*\\g<nameandtypeargs>)* | # Are there any more names being dotted into? + (?<tuple>\\s*\\((?:[^\\(\\)]|\\g<tuple>)+\\)) + ) + (?:\\s*\\?\\s*)? # nullable suffix? + (?:\\s*\\[(?:\\s*,\\s*)*\\]\\s*)* # array suffix? + ) + ) + )\\s+ + (\\g<identifier>)\\b\\s* + (?=[,)\\]]) + ''' + captures: + "1": + name: "keyword.other.var.cs" + "2": + patterns: [ + { + include: "#type" + } + ] + "7": + name: "variable.other.cs" + "declaration-expression-tuple": + match: ''' + (?x) # e.g. int x OR var x + (?: + \\b(var)\\b| + (?<typename> + (?: + (?: + (?:(?<identifier>@?[_[:alpha:]][_[:alnum:]]*)\\s*\\:\\:\\s*)? # alias-qualification + (?<nameandtypeargs> # identifier + type arguments (if any) + \\g<identifier>\\s* + (?<typeargs>\\s*<(?:[^<>]|\\g<typeargs>)+>\\s*)? + ) + (?:\\s*\\.\\s*\\g<nameandtypeargs>)* | # Are there any more names being dotted into? + (?<tuple>\\s*\\((?:[^\\(\\)]|\\g<tuple>)+\\)) + ) + (?:\\s*\\?\\s*)? # nullable suffix? + (?:\\s*\\[(?:\\s*,\\s*)*\\]\\s*)* # array suffix? + ) + ) + )\\s+ + (\\g<identifier>)\\b\\s* + (?=[,)]) + ''' + captures: + "1": + name: "keyword.other.var.cs" + "2": + patterns: [ + { + include: "#type" + } + ] + "7": + name: "entity.name.variable.tuple-element.cs" + "checked-unchecked-expression": + begin: "(?<!\\.)\\b(?:(checked)|(unchecked))\\b\\s*(\\()" + beginCaptures: + "1": + name: "keyword.other.checked.cs" + "2": + name: "keyword.other.unchecked.cs" + "3": + name: "punctuation.parenthesis.open.cs" + end: "\\)" + endCaptures: + "0": + name: "punctuation.parenthesis.close.cs" + patterns: [ + { + include: "#expression" + } + ] + "typeof-or-default-expression": + begin: "(?<!\\.)\\b(?:(typeof)|(default))\\b\\s*(\\()" + beginCaptures: + "1": + name: "keyword.other.typeof.cs" + "2": + name: "keyword.other.default.cs" + "3": + name: "punctuation.parenthesis.open.cs" + end: "\\)" + endCaptures: + "0": + name: "punctuation.parenthesis.close.cs" + patterns: [ + { + include: "#type" + } + ] + "nameof-expression": + begin: "(?<!\\.)\\b(nameof)\\b\\s*(\\()" + beginCaptures: + "1": + name: "keyword.other.nameof.cs" + "2": + name: "punctuation.parenthesis.open.cs" + end: "\\)" + endCaptures: + "0": + name: "punctuation.parenthesis.close.cs" + patterns: [ + { + include: "#expression" + } + ] + "throw-expression": + match: "(?<!\\.)\\b(throw)\\b" + captures: + "1": + name: "keyword.control.flow.throw.cs" + "interpolated-string": + name: "string.quoted.double.cs" + begin: "\\$\"" + beginCaptures: + "0": + name: "punctuation.definition.string.begin.cs" + end: "(\")|((?:[^\\\\\\n])$)" + endCaptures: + "1": + name: "punctuation.definition.string.end.cs" + "2": + name: "invalid.illegal.newline.cs" + patterns: [ + { + include: "#string-character-escape" + } + { + include: "#interpolation" + } + ] + "verbatim-interpolated-string": + name: "string.quoted.double.cs" + begin: "\\$@\"" + beginCaptures: + "0": + name: "punctuation.definition.string.begin.cs" + end: "\"(?=[^\"])" + endCaptures: + "0": + name: "punctuation.definition.string.end.cs" + patterns: [ + { + include: "#verbatim-string-character-escape" + } + { + include: "#interpolation" + } + ] + interpolation: + name: "meta.interpolation.cs" + begin: "(?<=[^\\{])((?:\\{\\{)*)(\\{)(?=[^\\{])" + beginCaptures: + "1": + name: "string.quoted.double.cs" + "2": + name: "punctuation.definition.interpolation.begin.cs" + end: "\\}" + endCaptures: + "0": + name: "punctuation.definition.interpolation.end.cs" + patterns: [ + { + include: "#expression" + } + ] + literal: + patterns: [ + { + include: "#boolean-literal" + } + { + include: "#null-literal" + } + { + include: "#numeric-literal" + } + { + include: "#char-literal" + } + { + include: "#string-literal" + } + { + include: "#verbatim-string-literal" + } + { + include: "#tuple-literal" + } + ] + "boolean-literal": + patterns: [ + { + name: "constant.language.boolean.true.cs" + match: "(?<!\\.)\\btrue\\b" + } + { + name: "constant.language.boolean.false.cs" + match: "(?<!\\.)\\bfalse\\b" + } + ] + "null-literal": + name: "constant.language.null.cs" + match: "(?<!\\.)\\bnull\\b" + "numeric-literal": + patterns: [ + { + name: "constant.numeric.hex.cs" + match: "\\b0(x|X)[0-9a-fA-F_]+(U|u|L|l|UL|Ul|uL|ul|LU|Lu|lU|lu)?\\b" + } + { + name: "constant.numeric.binary.cs" + match: "\\b0(b|B)[01_]+(U|u|L|l|UL|Ul|uL|ul|LU|Lu|lU|lu)?\\b" + } + { + name: "constant.numeric.decimal.cs" + match: "\\b([0-9_]+)?\\.[0-9_]+((e|E)[0-9]+)?(F|f|D|d|M|m)?\\b" + } + { + name: "constant.numeric.decimal.cs" + match: "\\b[0-9_]+(e|E)[0-9_]+(F|f|D|d|M|m)?\\b" + } + { + name: "constant.numeric.decimal.cs" + match: "\\b[0-9_]+(F|f|D|d|M|m)\\b" + } + { + name: "constant.numeric.decimal.cs" + match: "\\b[0-9_]+(U|u|L|l|UL|Ul|uL|ul|LU|Lu|lU|lu)?\\b" + } + ] + "char-literal": + name: "string.quoted.single.cs" + begin: "'" + beginCaptures: + "0": + name: "punctuation.definition.char.begin.cs" + end: "(\\')|((?:[^\\\\\\n])$)" + endCaptures: + "1": + name: "punctuation.definition.char.end.cs" + "2": + name: "invalid.illegal.newline.cs" + patterns: [ + { + include: "#string-character-escape" + } + ] + "string-literal": + name: "string.quoted.double.cs" + begin: "(?<!@)\"" + beginCaptures: + "0": + name: "punctuation.definition.string.begin.cs" + end: "(\")|((?:[^\\\\\\n])$)" + endCaptures: + "1": + name: "punctuation.definition.string.end.cs" + "2": + name: "invalid.illegal.newline.cs" + patterns: [ + { + include: "#string-character-escape" + } + ] + "string-character-escape": + name: "constant.character.escape.cs" + match: "\\\\." + "verbatim-string-literal": + name: "string.quoted.double.cs" + begin: "@\"" + beginCaptures: + "0": + name: "punctuation.definition.string.begin.cs" + end: "\"(?=[^\"])" + endCaptures: + "0": + name: "punctuation.definition.string.end.cs" + patterns: [ + { + include: "#verbatim-string-character-escape" + } + ] + "verbatim-string-character-escape": + name: "constant.character.escape.cs" + match: "\"\"" + "tuple-literal": + begin: "(\\()(?=.*[:,])" + beginCaptures: + "1": + name: "punctuation.parenthesis.open.cs" + end: "\\)" + endCaptures: + "0": + name: "punctuation.parenthesis.close.cs" + patterns: [ + { + include: "#comment" + } + { + include: "#tuple-literal-element" + } + { + include: "#punctuation-comma" + } + ] + "tuple-literal-element": + begin: ''' + (?x) + (?:(@?[_[:alpha:]][_[:alnum:]]*)\\s*(:)\\s*)? + (?![,)]) + ''' + beginCaptures: + "0": + name: "entity.name.variable.tuple-element.cs" + "1": + name: "punctuation.separator.colon.cs" + end: "(?=[,)])" + patterns: [ + { + include: "#expression" + } + ] + "expression-operators": + patterns: [ + { + name: "keyword.operator.assignment.compound.cs" + match: "\\*=|/=|%=|\\+=|-=" + } + { + name: "keyword.operator.assignment.compound.bitwise.cs" + match: "\\&=|\\^=|<<=|>>=|\\|=" + } + { + name: "keyword.operator.bitwise.shift.cs" + match: "<<|>>" + } + { + name: "keyword.operator.comparison.cs" + match: "==|!=" + } + { + name: "keyword.operator.relational.cs" + match: "<=|>=|<|>" + } + { + name: "keyword.operator.logical.cs" + match: "\\!|&&|\\|\\|" + } + { + name: "keyword.operator.bitwise.cs" + match: "\\&|~|\\^|\\|" + } + { + name: "keyword.operator.assignment.cs" + match: "\\=" + } + { + name: "keyword.operator.decrement.cs" + match: "--" + } + { + name: "keyword.operator.increment.cs" + match: "\\+\\+" + } + { + name: "keyword.operator.arithmetic.cs" + match: "%|\\*|/|-|\\+" + } + { + name: "keyword.operator.null-coalescing.cs" + match: "\\?\\?" + } + ] + "conditional-operator": + begin: "(?<!\\?)\\?(?!\\?|\\.|\\[)" + beginCaptures: + "0": + name: "keyword.operator.conditional.question-mark.cs" + end: ":" + endCaptures: + "0": + name: "keyword.operator.conditional.colon.cs" + patterns: [ + { + include: "#expression" + } + ] + "await-expression": + name: "keyword.other.await.cs" + match: "(?!\\.)\\b(await)\\b" + "parenthesized-expression": + begin: "\\(" + beginCaptures: + "0": + name: "punctuation.parenthesis.open.cs" + end: "\\)" + endCaptures: + "0": + name: "punctuation.parenthesis.close.cs" + patterns: [ + { + include: "#expression" + } + ] + "initializer-expression": + begin: "\\{" + beginCaptures: + "0": + name: "punctuation.curlybrace.open.cs" + end: "\\}" + endCaptures: + "0": + name: "punctuation.curlybrace.close.cs" + patterns: [ + { + include: "#expression" + } + { + include: "#punctuation-comma" + } + ] + identifier: + name: "variable.other.readwrite.cs" + match: "@?[_[:alpha:]][_[:alnum:]]*" + "cast-expression": + match: ''' + (?x) + (\\()\\s* + (?<typename> + (?: + (?: + (?:(?<identifier>@?[_[:alpha:]][_[:alnum:]]*)\\s*\\:\\:\\s*)? # alias-qualification + (?<nameandtypeargs> # identifier + type arguments (if any) + \\g<identifier>\\s* + (?<typeargs>\\s*<(?:[^<>]|\\g<typeargs>)+>\\s*)? + ) + (?:\\s*\\.\\s*\\g<nameandtypeargs>)* | # Are there any more names being dotted into? + (?<tuple>\\s*\\((?:[^\\(\\)]|\\g<tuple>)+\\)) + ) + (?:\\s*\\?\\s*)? # nullable suffix? + (?:\\s*\\[(?:\\s*,\\s*)*\\]\\s*)* # array suffix? + ) + )\\s* + (\\))(?=\\s*@?[_[:alnum:]\\(]) + ''' + captures: + "1": + name: "punctuation.parenthesis.open.cs" + "2": + patterns: [ + { + include: "#type" + } + ] + "7": + name: "punctuation.parenthesis.close.cs" + "as-expression": + match: ''' + (?x) + (?<!\\.)\\b(as)\\b\\s* + (?<typename> + (?: + (?: + (?:(?<identifier>@?[_[:alpha:]][_[:alnum:]]*)\\s*\\:\\:\\s*)? # alias-qualification + (?<nameandtypeargs> # identifier + type arguments (if any) + \\g<identifier>\\s* + (?<typeargs>\\s*<(?:[^<>]|\\g<typeargs>)+>\\s*)? + ) + (?:\\s*\\.\\s*\\g<nameandtypeargs>)* | # Are there any more names being dotted into? + (?<tuple>\\s*\\((?:[^\\(\\)]|\\g<tuple>)+\\)) + ) + (?:\\s*\\?\\s*)? # nullable suffix? + (?:\\s*\\[(?:\\s*,\\s*)*\\]\\s*)* # array suffix? + ) + )? + ''' + captures: + "1": + name: "keyword.other.as.cs" + "2": + patterns: [ + { + include: "#type" + } + ] + "is-expression": + match: ''' + (?x) + (?<!\\.)\\b(is)\\b\\s* + (?<typename> + (?: + (?: + (?:(?<identifier>@?[_[:alpha:]][_[:alnum:]]*)\\s*\\:\\:\\s*)? # alias-qualification + (?<nameandtypeargs> # identifier + type arguments (if any) + \\g<identifier>\\s* + (?<typeargs>\\s*<(?:[^<>]|\\g<typeargs>)+>\\s*)? + ) + (?:\\s*\\.\\s*\\g<nameandtypeargs>)* | # Are there any more names being dotted into? + (?<tuple>\\s*\\((?:[^\\(\\)]|\\g<tuple>)+\\)) + ) + (?:\\s*\\?\\s*)? # nullable suffix? + (?:\\s*\\[(?:\\s*,\\s*)*\\]\\s*)* # array suffix? + ) + )? + ''' + captures: + "1": + name: "keyword.other.is.cs" + "2": + patterns: [ + { + include: "#type" + } + ] + "this-or-base-expression": + match: "\\b(?:(base)|(this))\\b" + captures: + "1": + name: "keyword.other.base.cs" + "2": + name: "keyword.other.this.cs" + "invocation-expression": + begin: ''' + (?x) + (?:(\\?)\\s*)? # preceding null-conditional operator? + (?:(\\.)\\s*)? # preceding dot? + (@?[_[:alpha:]][_[:alnum:]]*)\\s* # method name + (?<typeargs>\\s*<([^<>]|\\g<typeargs>)+>\\s*)?\\s* # type arguments + (?=\\() # open paren of argument list + ''' + beginCaptures: + "1": + name: "keyword.operator.null-conditional.cs" + "2": + name: "punctuation.accessor.cs" + "3": + name: "entity.name.function.cs" + "4": + patterns: [ + { + include: "#type-arguments" + } + ] + end: "(?<=\\))" + patterns: [ + { + include: "#argument-list" + } + ] + "element-access-expression": + begin: ''' + (?x) + (?:(\\?)\\s*)? # preceding null-conditional operator? + (?:(\\.)\\s*)? # preceding dot? + (?:(@?[_[:alpha:]][_[:alnum:]]*)\\s*)? # property name + (?:(\\?)\\s*)? # null-conditional operator? + (?=\\[) # open bracket of argument list + ''' + beginCaptures: + "1": + name: "keyword.operator.null-conditional.cs" + "2": + name: "punctuation.accessor.cs" + "3": + name: "variable.other.object.property.cs" + "4": + name: "keyword.operator.null-conditional.cs" + end: "(?<=\\])(?!\\s*\\[)" + patterns: [ + { + include: "#bracketed-argument-list" + } + ] + "member-access-expression": + patterns: [ + { + match: ''' + (?x) + (?:(\\?)\\s*)? # preceding null-conditional operator? + (\\.)\\s* # preceding dot + (@?[_[:alpha:]][_[:alnum:]]*)\\s* # property name + (?![_[:alnum:]]|\\(|(\\?)?\\[|<) # next character is not alpha-numeric, nor a (, [, or <. Also, test for ?[ + ''' + captures: + "1": + name: "keyword.operator.null-conditional.cs" + "2": + name: "punctuation.accessor.cs" + "3": + name: "variable.other.object.property.cs" + } + { + match: ''' + (?x) + (\\.)?\\s* + (@?[_[:alpha:]][_[:alnum:]]*) + (?<typeparams>\\s*<([^<>]|\\g<typeparams>)+>\\s*) + (?= + (\\s*\\?)? + \\s*\\.\\s*@?[_[:alpha:]][_[:alnum:]]* + ) + ''' + captures: + "1": + name: "punctuation.accessor.cs" + "2": + name: "variable.other.object.cs" + "3": + patterns: [ + { + include: "#type-arguments" + } + ] + } + { + match: ''' + (?x) + (@?[_[:alpha:]][_[:alnum:]]*) + (?= + (\\s*\\?)? + \\s*\\.\\s*@?[_[:alpha:]][_[:alnum:]]* + ) + ''' + captures: + "1": + name: "variable.other.object.cs" + } + ] + "object-creation-expression": + patterns: [ + { + include: "#object-creation-expression-with-parameters" + } + { + include: "#object-creation-expression-with-no-parameters" + } + ] + "object-creation-expression-with-parameters": + begin: ''' + (?x) + (new)\\s+ + (?<typename> + (?: + (?: + (?:(?<identifier>@?[_[:alpha:]][_[:alnum:]]*)\\s*\\:\\:\\s*)? # alias-qualification + (?<nameandtypeargs> # identifier + type arguments (if any) + \\g<identifier>\\s* + (?<typeargs>\\s*<(?:[^<>]|\\g<typeargs>)+>\\s*)? + ) + (?:\\s*\\.\\s*\\g<nameandtypeargs>)* | # Are there any more names being dotted into? + (?<tuple>\\s*\\((?:[^\\(\\)]|\\g<tuple>)+\\)) + ) + (?:\\s*\\?\\s*)? # nullable suffix? + (?:\\s*\\[(?:\\s*,\\s*)*\\]\\s*)* # array suffix? + ) + )\\s* + (?=\\() + ''' + beginCaptures: + "1": + name: "keyword.other.new.cs" + "2": + patterns: [ + { + include: "#type" + } + ] + end: "(?<=\\))" + patterns: [ + { + include: "#argument-list" + } + ] + "object-creation-expression-with-no-parameters": + match: ''' + (?x) + (new)\\s+ + (?<typename> + (?: + (?: + (?:(?<identifier>@?[_[:alpha:]][_[:alnum:]]*)\\s*\\:\\:\\s*)? # alias-qualification + (?<nameandtypeargs> # identifier + type arguments (if any) + \\g<identifier>\\s* + (?<typeargs>\\s*<(?:[^<>]|\\g<typeargs>)+>\\s*)? + ) + (?:\\s*\\.\\s*\\g<nameandtypeargs>)* | # Are there any more names being dotted into? + (?<tuple>\\s*\\((?:[^\\(\\)]|\\g<tuple>)+\\)) + ) + (?:\\s*\\?\\s*)? # nullable suffix? + (?:\\s*\\[(?:\\s*,\\s*)*\\]\\s*)* # array suffix? + ) + )\\s* + (?=\\{|$) + ''' + captures: + "1": + name: "keyword.other.new.cs" + "2": + patterns: [ + { + include: "#type" + } + ] + "array-creation-expression": + begin: ''' + (?x) + \\b(new|stackalloc)\\b\\s* + (?<typename> + (?: + (?: + (?:(?<identifier>@?[_[:alpha:]][_[:alnum:]]*)\\s*\\:\\:\\s*)? # alias-qualification + (?<nameandtypeargs> # identifier + type arguments (if any) + \\g<identifier>\\s* + (?<typeargs>\\s*<(?:[^<>]|\\g<typeargs>)+>\\s*)? + ) + (?:\\s*\\.\\s*\\g<nameandtypeargs>)* | # Are there any more names being dotted into? + (?<tuple>\\s*\\((?:[^\\(\\)]|\\g<tuple>)+\\)) + ) + (?:\\s*\\?\\s*)? # nullable suffix? + (?:\\s*\\[(?:\\s*,\\s*)*\\]\\s*)* # array suffix? + ) + )?\\s* + (?=\\[) + ''' + beginCaptures: + "1": + name: "keyword.other.new.cs" + "2": + patterns: [ + { + include: "#type" + } + ] + end: "(?<=\\])" + patterns: [ + { + include: "#bracketed-argument-list" + } + ] + "anonymous-object-creation-expression": + begin: "\\b(new)\\b\\s*(?=\\{|$)" + beginCaptures: + "1": + name: "keyword.other.new.cs" + end: "(?<=\\})" + patterns: [ + { + include: "#initializer-expression" + } + ] + "bracketed-parameter-list": + begin: "(?=(\\[))" + beginCaptures: + "1": + name: "punctuation.squarebracket.open.cs" + end: "(?=(\\]))" + endCaptures: + "1": + name: "punctuation.squarebracket.close.cs" + patterns: [ + { + begin: "(?<=\\[)" + end: "(?=\\])" + patterns: [ + { + include: "#comment" + } + { + include: "#attribute-section" + } + { + include: "#parameter" + } + { + include: "#punctuation-comma" + } + { + include: "#variable-initializer" + } + ] + } + ] + "parenthesized-parameter-list": + begin: "(\\()" + beginCaptures: + "0": + name: "punctuation.parenthesis.open.cs" + end: "(\\))" + endCaptures: + "0": + name: "punctuation.parenthesis.close.cs" + patterns: [ + { + include: "#comment" + } + { + include: "#attribute-section" + } + { + include: "#parameter" + } + { + include: "#punctuation-comma" + } + { + include: "#variable-initializer" + } + ] + parameter: + match: ''' + (?x) + (?:(?:\\b(ref|params|out|in|this)\\b)\\s+)? + (?<typename> + (?: + (?:ref\\s+)? # ref return + (?: + (?:(?<identifier>@?[_[:alpha:]][_[:alnum:]]*)\\s*\\:\\:\\s*)? # alias-qualification + (?<nameandtypeargs> # identifier + type arguments (if any) + \\g<identifier>\\s* + (?<typeargs>\\s*<(?:[^<>]|\\g<typeargs>)+>\\s*)? + ) + (?:\\s*\\.\\s*\\g<nameandtypeargs>)* | # Are there any more names being dotted into? + (?<tuple>\\s*\\((?:[^\\(\\)]|\\g<tuple>)+\\)) + ) + (?:\\s*\\?\\s*)? # nullable suffix? + (?:\\s*\\[(?:\\s*,\\s*)*\\]\\s*)* # array suffix? + ) + )\\s+ + (\\g<identifier>) + ''' + captures: + "1": + name: "storage.modifier.cs" + "2": + patterns: [ + { + include: "#type" + } + ] + "7": + name: "variable.parameter.function.cs" + "argument-list": + begin: "\\(" + beginCaptures: + "0": + name: "punctuation.parenthesis.open.cs" + end: "\\)" + endCaptures: + "0": + name: "punctuation.parenthesis.close.cs" + patterns: [ + { + include: "#named-argument" + } + { + include: "#argument" + } + { + include: "#punctuation-comma" + } + ] + "bracketed-argument-list": + begin: "\\[" + beginCaptures: + "0": + name: "punctuation.squarebracket.open.cs" + end: "\\]" + endCaptures: + "0": + name: "punctuation.squarebracket.close.cs" + patterns: [ + { + include: "#named-argument" + } + { + include: "#argument" + } + { + include: "#punctuation-comma" + } + ] + "named-argument": + begin: "(@?[_[:alpha:]][_[:alnum:]]*)\\s*(:)" + beginCaptures: + "1": + name: "variable.parameter.function.cs" + "2": + name: "punctuation.separator.colon.cs" + end: "(?=(,|\\)|\\]))" + patterns: [ + { + include: "#argument" + } + ] + argument: + patterns: [ + { + name: "storage.modifier.cs" + match: "\\b(ref|out|in)\\b" + } + { + include: "#declaration-expression-local" + } + { + include: "#expression" + } + ] + "query-expression": + begin: ''' + (?x) + \\b(from)\\b\\s* + (?<typename> + (?: + (?: + (?:(?<identifier>@?[_[:alpha:]][_[:alnum:]]*)\\s*\\:\\:\\s*)? # alias-qualification + (?<nameandtypeargs> # identifier + type arguments (if any) + \\g<identifier>\\s* + (?<typeargs>\\s*<(?:[^<>]|\\g<typeargs>)+>\\s*)? + ) + (?:\\s*\\.\\s*\\g<nameandtypeargs>)* | # Are there any more names being dotted into? + (?<tuple>\\s*\\((?:[^\\(\\)]|\\g<tuple>)+\\)) + ) + (?:\\s*\\?\\s*)? # nullable suffix? + (?:\\s*\\[(?:\\s*,\\s*)*\\]\\s*)* # array suffix? + ) + )? + \\s+(\\g<identifier>)\\b\\s* + \\b(in)\\b\\s* + ''' + beginCaptures: + "1": + name: "keyword.query.from.cs" + "2": + patterns: [ + { + include: "#type" + } + ] + "7": + name: "entity.name.variable.range-variable.cs" + "8": + name: "keyword.query.in.cs" + end: "(?=;|\\))" + patterns: [ + { + include: "#query-body" + } + { + include: "#expression" + } + ] + "query-body": + patterns: [ + { + include: "#let-clause" + } + { + include: "#where-clause" + } + { + include: "#join-clause" + } + { + include: "#orderby-clause" + } + { + include: "#select-clause" + } + { + include: "#group-clause" + } + ] + "let-clause": + begin: ''' + (?x) + \\b(let)\\b\\s* + (@?[_[:alpha:]][_[:alnum:]]*)\\b\\s* + (=)\\s* + ''' + beginCaptures: + "1": + name: "keyword.query.let.cs" + "2": + name: "entity.name.variable.range-variable.cs" + "3": + name: "keyword.operator.assignment.cs" + end: "(?=;|\\))" + patterns: [ + { + include: "#query-body" + } + { + include: "#expression" + } + ] + "where-clause": + begin: ''' + (?x) + \\b(where)\\b\\s* + ''' + beginCaptures: + "1": + name: "keyword.query.where.cs" + end: "(?=;|\\))" + patterns: [ + { + include: "#query-body" + } + { + include: "#expression" + } + ] + "join-clause": + begin: ''' + (?x) + \\b(join)\\b\\s* + (?<typename> + (?: + (?: + (?:(?<identifier>@?[_[:alpha:]][_[:alnum:]]*)\\s*\\:\\:\\s*)? # alias-qualification + (?<nameandtypeargs> # identifier + type arguments (if any) + \\g<identifier>\\s* + (?<typeargs>\\s*<(?:[^<>]|\\g<typeargs>)+>\\s*)? + ) + (?:\\s*\\.\\s*\\g<nameandtypeargs>)* | # Are there any more names being dotted into? + (?<tuple>\\s*\\((?:[^\\(\\)]|\\g<tuple>)+\\)) + ) + (?:\\s*\\?\\s*)? # nullable suffix? + (?:\\s*\\[(?:\\s*,\\s*)*\\]\\s*)* # array suffix? + ) + )? + \\s+(\\g<identifier>)\\b\\s* + \\b(in)\\b\\s* + ''' + beginCaptures: + "1": + name: "keyword.query.join.cs" + "2": + patterns: [ + { + include: "#type" + } + ] + "7": + name: "entity.name.variable.range-variable.cs" + "8": + name: "keyword.query.in.cs" + end: "(?=;|\\))" + patterns: [ + { + include: "#join-on" + } + { + include: "#join-equals" + } + { + include: "#join-into" + } + { + include: "#query-body" + } + { + include: "#expression" + } + ] + "join-on": + match: "\\b(on)\\b\\s*" + captures: + "1": + name: "keyword.query.on.cs" + "join-equals": + match: "\\b(equals)\\b\\s*" + captures: + "1": + name: "keyword.query.equals.cs" + "join-into": + match: ''' + (?x) + \\b(into)\\b\\s* + (@?[_[:alpha:]][_[:alnum:]]*)\\b\\s* + ''' + captures: + "1": + name: "keyword.query.into.cs" + "2": + name: "entity.name.variable.range-variable.cs" + "orderby-clause": + begin: "\\b(orderby)\\b\\s*" + beginCaptures: + "1": + name: "keyword.query.orderby.cs" + end: "(?=;|\\))" + patterns: [ + { + include: "#ordering-direction" + } + { + include: "#query-body" + } + { + include: "#expression" + } + { + include: "#punctuation-comma" + } + ] + "ordering-direction": + match: "\\b(?:(ascending)|(descending))\\b" + captures: + "1": + name: "keyword.query.ascending.cs" + "2": + name: "keyword.query.descending.cs" + "select-clause": + begin: "\\b(select)\\b\\s*" + beginCaptures: + "1": + name: "keyword.query.select.cs" + end: "(?=;|\\))" + patterns: [ + { + include: "#query-body" + } + { + include: "#expression" + } + ] + "group-clause": + begin: "\\b(group)\\b\\s*" + beginCaptures: + "1": + name: "keyword.query.group.cs" + end: "(?=;|\\))" + patterns: [ + { + include: "#group-by" + } + { + include: "#group-into" + } + { + include: "#query-body" + } + { + include: "#expression" + } + ] + "group-by": + match: "\\b(by)\\b\\s*" + captures: + "1": + name: "keyword.query.by.cs" + "group-into": + match: ''' + (?x) + \\b(into)\\b\\s* + (@?[_[:alpha:]][_[:alnum:]]*)\\b\\s* + ''' + captures: + "1": + name: "keyword.query.into.cs" + "2": + name: "entity.name.variable.range-variable.cs" + "anonymous-method-expression": + patterns: [ + { + begin: ''' + (?x) + (?:\\b(async)\\b\\s*)? + (@?[_[:alpha:]][_[:alnum:]]*)\\b\\s* + (=>) + ''' + beginCaptures: + "1": + name: "storage.modifier.cs" + "2": + name: "variable.parameter.function.cs" + "3": + name: "keyword.operator.arrow.cs" + end: "(?=\\)|;|}|,)" + patterns: [ + { + include: "#block" + } + { + include: "#ref-modifier" + } + { + include: "#expression" + } + ] + } + { + begin: ''' + (?x) + (?:\\b(async)\\b\\s*)? + (\\(.*?\\))\\s* + (=>) + ''' + beginCaptures: + "1": + name: "storage.modifier.cs" + "2": + patterns: [ + { + include: "#lambda-parameter-list" + } + ] + "3": + name: "keyword.operator.arrow.cs" + end: "(?=\\)|;|}|,)" + patterns: [ + { + include: "#block" + } + { + include: "#ref-modifier" + } + { + include: "#expression" + } + ] + } + { + begin: ''' + (?x) + (?:\\b(async)\\b\\s*)? + (?:\\b(delegate)\\b\\s*) + ''' + beginCaptures: + "1": + name: "storage.modifier.cs" + "2": + name: "keyword.other.delegate.cs" + end: "(?=\\)|;|}|,)" + patterns: [ + { + include: "#parenthesized-parameter-list" + } + { + include: "#block" + } + { + include: "#expression" + } + ] + } + ] + "lambda-parameter-list": + begin: "\\(" + beginCaptures: + "0": + name: "punctuation.parenthesis.open.cs" + end: "\\)" + endCaptures: + "0": + name: "punctuation.parenthesis.close.cs" + patterns: [ + { + include: "#comment" + } + { + include: "#attribute-section" + } + { + include: "#lambda-parameter" + } + { + include: "#punctuation-comma" + } + ] + "lambda-parameter": + match: ''' + (?x) + (?:\\b(ref|out|in)\\b)?\\s* + (?:(?<typename> + (?: + (?: + (?:(?<identifier>@?[_[:alpha:]][_[:alnum:]]*)\\s*\\:\\:\\s*)? # alias-qualification + (?<nameandtypeargs> # identifier + type arguments (if any) + \\g<identifier>\\s* + (?<typeargs>\\s*<(?:[^<>]|\\g<typeargs>)+>\\s*)? + ) + (?:\\s*\\.\\s*\\g<nameandtypeargs>)* | # Are there any more names being dotted into? + (?<tuple>\\s*\\((?:[^\\(\\)]|\\g<tuple>)+\\)) + ) + (?:\\s*\\?\\s*)? # nullable suffix? + (?:\\s*\\[(?:\\s*,\\s*)*\\]\\s*)* # array suffix? + ) + )\\s+)? + (\\g<identifier>)\\b\\s* + (?=[,)]) + ''' + captures: + "1": + name: "storage.modifier.cs" + "2": + patterns: [ + { + include: "#type" + } + ] + "7": + name: "variable.parameter.function.cs" + type: + name: "meta.type.cs" + patterns: [ + { + include: "#comment" + } + { + include: "#ref-modifier" + } + { + include: "#readonly-modifier" + } + { + include: "#tuple-type" + } + { + include: "#type-builtin" + } + { + include: "#type-name" + } + { + include: "#type-arguments" + } + { + include: "#type-array-suffix" + } + { + include: "#type-nullable-suffix" + } + ] + "ref-modifier": + name: "storage.modifier.cs" + match: "\\b(ref)\\b" + "readonly-modifier": + name: "storage.modifier.cs" + match: "\\b(readonly)\\b" + "tuple-type": + begin: "\\(" + beginCaptures: + "0": + name: "punctuation.parenthesis.open.cs" + end: "\\)" + endCaptures: + "0": + name: "punctuation.parenthesis.close.cs" + patterns: [ + { + include: "#tuple-element" + } + { + include: "#punctuation-comma" + } + ] + "tuple-element": + match: ''' + (?x) + (?<typename> + (?: + (?: + (?:(?<identifier>@?[_[:alpha:]][_[:alnum:]]*)\\s*\\:\\:\\s*)? # alias-qualification + (?<nameandtypeargs> # identifier + type arguments (if any) + \\g<identifier>\\s* + (?<typeargs>\\s*<(?:[^<>]|\\g<typeargs>)+>\\s*)? + ) + (?:\\s*\\.\\s*\\g<nameandtypeargs>)* | # Are there any more names being dotted into? + (?<tuple>\\s*\\((?:[^\\(\\)]|\\g<tuple>)+\\)) + ) + (?:\\s*\\?\\s*)? # nullable suffix? + (?:\\s*\\[(?:\\s*,\\s*)*\\]\\s*)* # array suffix? + ) + ) + (?:(?<tuplename>\\g<identifier>)\\b)? + ''' + captures: + "1": + patterns: [ + { + include: "#type" + } + ] + "6": + name: "entity.name.variable.tuple-element.cs" + "type-builtin": + match: "\\b(bool|byte|char|decimal|double|float|int|long|object|sbyte|short|string|uint|ulong|ushort|void|dynamic)\\b" + captures: + "1": + name: "keyword.type.cs" + "type-name": + patterns: [ + { + match: "(@?[_[:alpha:]][_[:alnum:]]*)\\s*(\\:\\:)" + captures: + "1": + name: "entity.name.type.alias.cs" + "2": + name: "punctuation.separator.coloncolon.cs" + } + { + match: "(@?[_[:alpha:]][_[:alnum:]]*)\\s*(\\.)" + captures: + "1": + name: "storage.type.cs" + "2": + name: "punctuation.accessor.cs" + } + { + match: "(\\.)\\s*(@?[_[:alpha:]][_[:alnum:]]*)" + captures: + "1": + name: "punctuation.accessor.cs" + "2": + name: "storage.type.cs" + } + { + name: "entity.name.type.cs" + match: "@?[_[:alpha:]][_[:alnum:]]*" + } + ] + "type-arguments": + begin: "<" + beginCaptures: + "0": + name: "punctuation.definition.typeparameters.begin.cs" + end: ">" + endCaptures: + "0": + name: "punctuation.definition.typeparameters.end.cs" + patterns: [ + { + include: "#comment" + } + { + include: "#type" + } + { + include: "#punctuation-comma" + } + ] + "type-array-suffix": + begin: "\\[" + beginCaptures: + "0": + name: "punctuation.squarebracket.open.cs" + end: "\\]" + endCaptures: + "0": + name: "punctuation.squarebracket.close.cs" + patterns: [ + { + include: "#punctuation-comma" + } + ] + "type-nullable-suffix": + match: "\\?" + captures: + "0": + name: "punctuation.separator.question-mark.cs" + "operator-assignment": + name: "keyword.operator.assignment.cs" + match: "(?<!=|!)(=)(?!=)" + "punctuation-comma": + name: "punctuation.separator.comma.cs" + match: "," + "punctuation-semicolon": + name: "punctuation.terminator.statement.cs" + match: ";" + "punctuation-accessor": + name: "punctuation.accessor.cs" + match: "\\." + preprocessor: + name: "meta.preprocessor.cs" + begin: "^\\s*(\\#)\\s*" + beginCaptures: + "1": + name: "punctuation.separator.hash.cs" + end: "(?<=$)" + patterns: [ + { + include: "#comment" + } + { + include: "#preprocessor-define-or-undef" + } + { + include: "#preprocessor-if-or-elif" + } + { + include: "#preprocessor-else-or-endif" + } + { + include: "#preprocessor-warning-or-error" + } + { + include: "#preprocessor-region" + } + { + include: "#preprocessor-endregion" + } + { + include: "#preprocessor-load" + } + { + include: "#preprocessor-r" + } + { + include: "#preprocessor-line" + } + { + include: "#preprocessor-pragma-warning" + } + { + include: "#preprocessor-pragma-checksum" + } + ] + "preprocessor-define-or-undef": + match: "\\b(?:(define)|(undef))\\b\\s*\\b([_[:alpha:]][_[:alnum:]]*)\\b" + captures: + "1": + name: "keyword.preprocessor.define.cs" + "2": + name: "keyword.preprocessor.undef.cs" + "3": + name: "entity.name.variable.preprocessor.symbol.cs" + "preprocessor-if-or-elif": + begin: "\\b(?:(if)|(elif))\\b" + beginCaptures: + "1": + name: "keyword.preprocessor.if.cs" + "2": + name: "keyword.preprocessor.elif.cs" + end: "(?=$)" + patterns: [ + { + include: "#comment" + } + { + include: "#preprocessor-expression" + } + ] + "preprocessor-else-or-endif": + match: "\\b(?:(else)|(endif))\\b" + captures: + "1": + name: "keyword.preprocessor.else.cs" + "2": + name: "keyword.preprocessor.endif.cs" + "preprocessor-warning-or-error": + match: "\\b(?:(warning)|(error))\\b\\s*(.*)(?=$)" + captures: + "1": + name: "keyword.preprocessor.warning.cs" + "2": + name: "keyword.preprocessor.error.cs" + "3": + name: "string.unquoted.preprocessor.message.cs" + "preprocessor-load": + begin: "\\b(load)\\b" + beginCaptures: + "1": + name: "keyword.preprocessor.load.cs" + end: "(?=$)" + patterns: [ + { + match: "\\\"[^\"]*\\\"" + captures: + "0": + name: "string.quoted.double.cs" + } + ] + "preprocessor-r": + begin: "\\b(r)\\b" + beginCaptures: + "1": + name: "keyword.preprocessor.r.cs" + end: "(?=$)" + patterns: [ + { + match: "\\\"[^\"]*\\\"" + captures: + "0": + name: "string.quoted.double.cs" + } + ] + "preprocessor-region": + match: "\\b(region)\\b\\s*(.*)(?=$)" + captures: + "1": + name: "keyword.preprocessor.region.cs" + "2": + name: "string.unquoted.preprocessor.message.cs" + "preprocessor-endregion": + match: "\\b(endregion)\\b" + captures: + "1": + name: "keyword.preprocessor.endregion.cs" + "preprocessor-line": + begin: "\\b(line)\\b" + beginCaptures: + "1": + name: "keyword.preprocessor.line.cs" + end: "(?=$)" + patterns: [ + { + match: "\\b(?:(default|hidden))" + captures: + "1": + name: "keyword.preprocessor.default.cs" + "2": + name: "keyword.preprocessor.hidden.cs" + } + { + match: "[0-9]+" + captures: + "0": + name: "constant.numeric.decimal.cs" + } + { + match: "\\\"[^\"]*\\\"" + captures: + "0": + name: "string.quoted.double.cs" + } + ] + "preprocessor-pragma-warning": + match: "\\b(pragma)\\b\\s*\\b(warning)\\b\\s*\\b(?:(disable)|(restore))\\b(\\s*[0-9]+(?:\\s*,\\s*[0-9]+)?)?" + captures: + "1": + name: "keyword.preprocessor.pragma.cs" + "2": + name: "keyword.preprocessor.warning.cs" + "3": + name: "keyword.preprocessor.disable.cs" + "4": + name: "keyword.preprocessor.restore.cs" + "5": + patterns: [ + { + match: "[0-9]+" + captures: + "0": + name: "constant.numeric.decimal.cs" + } + { + include: "#punctuation-comma" + } + ] + "preprocessor-pragma-checksum": + match: "\\b(pragma)\\b\\s*\\b(checksum)\\b\\s*(\\\"[^\"]*\\\")\\s*(\\\"[^\"]*\\\")\\s*(\\\"[^\"]*\\\")" + captures: + "1": + name: "keyword.preprocessor.pragma.cs" + "2": + name: "keyword.preprocessor.checksum.cs" + "3": + name: "string.quoted.double.cs" + "4": + name: "string.quoted.double.cs" + "5": + name: "string.quoted.double.cs" + "preprocessor-expression": + patterns: [ + { + begin: "\\(" + beginCaptures: + "0": + name: "punctuation.parenthesis.open.cs" + end: "\\)" + endCaptures: + "0": + name: "punctuation.parenthesis.close.cs" + patterns: [ + { + include: "#preprocessor-expression" + } + ] + } + { + match: "\\b(?:(true)|(false)|([_[:alpha:]][_[:alnum:]]*))\\b" + captures: + "1": + name: "constant.language.boolean.true.cs" + "2": + name: "constant.language.boolean.false.cs" + "3": + name: "entity.name.variable.preprocessor.symbol.cs" + } + { + match: "(==|!=)|(\\!|&&|\\|\\|)" + captures: + "1": + name: "keyword.operator.comparison.cs" + "2": + name: "keyword.operator.logical.cs" + } + ] + comment: + patterns: [ + { + name: "comment.block.cs" + begin: "/\\*" + beginCaptures: + "0": + name: "punctuation.definition.comment.cs" + end: "\\*/" + endCaptures: + "0": + name: "punctuation.definition.comment.cs" + } + { + begin: "(^\\s+)?(?=//)" + beginCaptures: + "1": + name: "punctuation.whitespace.comment.leading.cs" + end: "(?=$)" + patterns: [ + { + name: "comment.block.documentation.cs" + begin: "(?<!/)///(?!/)" + beginCaptures: + "0": + name: "punctuation.definition.comment.cs" + end: "(?=$)" + patterns: [ + { + include: "#xml-doc-comment" + } + ] + } + { + name: "comment.line.double-slash.cs" + begin: "(?<!/)//(?:(?!/)|(?=//))" + beginCaptures: + "0": + name: "punctuation.definition.comment.cs" + end: "(?=$)" + } + ] + } + ] + "xml-doc-comment": + patterns: [ + { + include: "#xml-comment" + } + { + include: "#xml-character-entity" + } + { + include: "#xml-cdata" + } + { + include: "#xml-tag" + } + ] + "xml-tag": + name: "meta.tag.cs" + begin: ''' + (?x) + (</?) + ( + (?: + ([-_[:alnum:]]+) + (:) + )? + ([-_[:alnum:]]+) + ) + ''' + beginCaptures: + "1": + name: "punctuation.definition.tag.cs" + "2": + name: "entity.name.tag.cs" + "3": + name: "entity.name.tag.namespace.cs" + "4": + name: "punctuation.separator.colon.cs" + "5": + name: "entity.name.tag.localname.cs" + end: "(/?>)" + endCaptures: + "1": + name: "punctuation.definition.tag.cs" + patterns: [ + { + include: "#xml-attribute" + } + ] + "xml-attribute": + patterns: [ + { + match: ''' + (?x) + (?:^|\\s+) + ( + (?: + ([-_[:alnum:]]+) + (:) + )? + ([-_[:alnum:]]+) + ) + (=) + ''' + captures: + "1": + name: "entity.other.attribute-name.cs" + "2": + name: "entity.other.attribute-name.namespace.cs" + "3": + name: "punctuation.separator.colon.cs" + "4": + name: "entity.other.attribute-name.localname.cs" + "5": + name: "punctuation.separator.equals.cs" + } + { + include: "#xml-string" + } + ] + "xml-cdata": + name: "string.unquoted.cdata.cs" + begin: "<!\\[CDATA\\[" + beginCaptures: + "0": + name: "punctuation.definition.string.begin.cs" + end: "\\]\\]>" + endCaptures: + "0": + name: "punctuation.definition.string.end.cs" + "xml-string": + patterns: [ + { + name: "string.quoted.single.cs" + begin: "\\'" + beginCaptures: + "0": + name: "punctuation.definition.string.begin.cs" + end: "\\'" + endCaptures: + "0": + name: "punctuation.definition.string.end.cs" + patterns: [ + { + include: "#xml-character-entity" + } + ] + } + { + name: "string.quoted.double.cs" + begin: "\\\"" + beginCaptures: + "0": + name: "punctuation.definition.string.begin.cs" + end: "\\\"" + endCaptures: + "0": + name: "punctuation.definition.string.end.cs" + patterns: [ + { + include: "#xml-character-entity" + } + ] + } + ] + "xml-character-entity": + patterns: [ + { + name: "constant.character.entity.cs" + match: ''' + (?x) + (&) + ( + (?:[[:alpha:]:_][[:alnum:]:_.-]*)| + (?:\\#[[:digit:]]+)| + (?:\\#x[[:xdigit:]]+) + ) + (;) + ''' + captures: + "1": + name: "punctuation.definition.constant.cs" + "3": + name: "punctuation.definition.constant.cs" + } + { + name: "invalid.illegal.bad-ampersand.cs" + match: "&" + } + ] + "xml-comment": + name: "comment.block.cs" + begin: "<!--" + beginCaptures: + "0": + name: "punctuation.definition.comment.cs" + end: "-->" + endCaptures: + "0": + name: "punctuation.definition.comment.cs" \ No newline at end of file diff --git a/packages/language-csharp/grammars/csx.cson b/packages/language-csharp/grammars/csx.cson new file mode 100644 index 000000000..f7e3de7c1 --- /dev/null +++ b/packages/language-csharp/grammars/csx.cson @@ -0,0 +1,14 @@ +scopeName: "source.csx" +name: "C# Script File" +fileTypes: [ + "csx" +] +patterns: [ + { + include: "source.cs" + } + { + match: "^#(load|r)" + name: "preprocessor.source.csx" + } +] diff --git a/packages/language-csharp/package.json b/packages/language-csharp/package.json new file mode 100644 index 000000000..800cd7c83 --- /dev/null +++ b/packages/language-csharp/package.json @@ -0,0 +1,17 @@ +{ + "name": "language-csharp", + "version": "1.1.0", + "private": true, + "description": "C# language support for Atom", + "repository": "https://github.com/atom/language-csharp", + "keywords": [ + "C#", + "csharp", + ".Net" + ], + "license": "MIT", + "engines": { + "atom": ">0.50.0" + }, + "dependencies": {} +} diff --git a/packages/language-csharp/scripts/converter.py b/packages/language-csharp/scripts/converter.py new file mode 100644 index 000000000..2e4542cf8 --- /dev/null +++ b/packages/language-csharp/scripts/converter.py @@ -0,0 +1,19 @@ +# Removes '-' characters from named groups to make +# Oniguruma expressions compatible with PCRE engine. +import re + +def read(filename): + with open(filename, 'rt', encoding='utf8') as file: + return file.read() + +def write(filename, content): + with open(filename, 'w', encoding='utf8') as file: + file.write(content) + +def convert(string): + result = re.sub(r'\?<([a-zA-Z-_]*)>', lambda x: x.group().replace('-', ''), string) + return re.sub(r'\\\\g<([a-zA-Z-]*)>', lambda x: x.group().replace('-', ''), result) + +content = read('../grammars/csharp.cson') +updated = convert(content) +write('../grammars/csharp.cson', updated) diff --git a/packages/language-csharp/settings/language-csharp.cson b/packages/language-csharp/settings/language-csharp.cson new file mode 100644 index 000000000..970f58082 --- /dev/null +++ b/packages/language-csharp/settings/language-csharp.cson @@ -0,0 +1,5 @@ +'.source.cs': + 'editor': + 'commentStart': '// ' + 'increaseIndentPattern': '(?x)\n\t\t^ .* \\{ [^}"\']* $\n\t| ^ \\s* \\{ \\} $\n\t' + 'decreaseIndentPattern': '(?x)\n\t\t^ (.*\\*/)? \\s* \\} ( [^}{"\']* \\{ | \\s* while \\s* \\( .* )? [;\\s]* (//.*|/\\*.*\\*/\\s*)? $\n\t' diff --git a/packages/language-csharp/snippets/language-csharp.cson b/packages/language-csharp/snippets/language-csharp.cson new file mode 100644 index 000000000..9f49417cf --- /dev/null +++ b/packages/language-csharp/snippets/language-csharp.cson @@ -0,0 +1,142 @@ +'.source.cs': + 'Abstract': + 'prefix': 'ab' + 'body': 'abstract ' + 'Array': + 'prefix': 'arr' + 'body': '${0:DataType}[] ${1:VariableName} = {};' + 'Async Task': + 'prefix': 'at' + 'body': 'async Task<${0:T}> ${1:MethodName}($2) {\n\t$3\n}' + 'Async Void': + 'prefix': 'av' + 'body': 'async void ${0:MethodName}($1) {\n\t$2\n}' + 'Await': + 'prefix': 'aw' + 'body': 'await ' + 'Break': + 'prefix': 'br' + 'body': 'break;\n' + 'Case': + 'prefix': 'cs' + 'body': 'case ${1:Condition}:\n\t$2\n$0' + 'Catch': + 'prefix': 'ca' + 'body': 'catch (${1:Exception} ${2:e}) {\n\t$0\n}' + 'Class': + 'prefix': 'cl' + 'body': 'class $1\n{\n\t$0\n}' + 'Constant String': + 'prefix': 'cos' + 'body': 'public const string ${1:Var} = $2;$0' + 'Constant': + 'prefix': 'co' + 'body': 'public const ${1:string} ${2:Var} = $3;$0' + 'Default': + 'prefix': 'de' + 'body': 'default:\n\t$0' + 'Do While': + 'prefix': 'do' + 'body': 'do {\n\t$0\n} while (${1:Condition});' + 'Else If': + 'prefix': 'elif' + 'body': 'else if (${1:Condition}) {\n\t$0\n}' + 'Else': + 'prefix': 'el' + 'body': 'else {\n\t$0\n}' + 'Enumeration': + 'prefix': 'enum' + 'body': 'enum $1\n{\n\t$0\n}' + 'Finally': + 'prefix': 'fy' + 'body': 'finally {\n\t$0\n}' + 'Fixed': + 'prefix': 'fi' + 'body': 'fixed (${1:Expression}) {\n\t$0\n}' + 'For': + 'prefix': 'for' + 'body': 'for (${1:Initializer}; ${2:Condition}; ${3:Update}) {\n\t$0\n}' + 'For Each': + 'prefix': 'fore' + 'body': 'foreach (${1:Type} in ${2:Collection}) {\n\t$0\n}' + 'If ': + 'prefix': 'if' + 'body': 'if (${1:Condition}) {\n\t$0\n}' + 'Interface': + 'prefix': 'in' + 'body': 'interface $1\n{\n\t$0\n}' + 'Method (Main)': + 'prefix': 'main' + 'body': '/// <summary>\n/// The main entry point for the application\n/// </summary>\n[STAThread]\npublic static void Main(string[] args)\n{\n\t$0\n}' + 'Method': + 'prefix': 'm' + 'body': '${1:void} ${2:Method}($3)\n{\n\t$0\n}' + 'Namespace ': + 'prefix': 'ns' + 'body': 'namespace ${1:NamespaceName}\n{\n\t$0\n}' + 'Override': + 'prefix': 'over' + 'body': 'override ' + 'Parse': + 'prefix': 'par' + 'body': '${0:DataType}.Parse(${1:VariableName});' + 'Private': + 'prefix': 'pr' + 'body': 'private ' + 'Property': + 'prefix': 'prop' + 'body': 'public ${1:string} ${2:PropertyName} { get; set; }' + 'Protected': + 'prefix': 'po' + 'body': 'protected ' + 'Public ': + 'prefix': 'pu' + 'body': 'public ' + 'ReadLine': + 'prefix': 'rl' + 'body': 'Console.ReadLine();' + 'Region': + 'prefix': 'reg' + 'body': '#region ${1:Region Name}\n\n$0\n\n#endregion\n' + 'Return': + 'prefix': 're' + 'body': 'return ' + 'Sealed': + 'prefix': 'se' + 'body': 'sealed ' + 'Static': + 'prefix': 'st' + 'body': 'static ' + 'Struct': + 'prefix': 'su' + 'body': 'struct $1\n{\n\t$0\n}' + 'Switch': + 'prefix': 'sw' + 'body': 'switch (${1:Expression}) {\n\t$0\n}' + 'Throw New': + 'prefix': 'tn' + 'body': 'throw new $0' + 'Throw': + 'prefix': 'th' + 'body': 'throw $0' + 'Try': + 'prefix': 'tr' + 'body': 'try {\n\t$0\n}' + 'Using': + 'prefix': 'us' + 'body': 'using ${1:System};$0' + 'Variable': + 'prefix': 'v' + 'body': '${1:string} ${2:var}${3: = ${0:null}};' + 'Virtual': + 'prefix': 'virt' + 'body': 'virtual ' + 'While': + 'prefix': 'wh' + 'body': 'while (${1:Condition}) {\n\t$0\n}' + 'Write': + 'prefix': 'w' + 'body': 'Console.Write($1);$0' + 'WriteLine': + 'prefix': 'wl' + 'body': 'Console.WriteLine($1);$0' diff --git a/packages/language-csharp/spec/grammar-spec.coffee b/packages/language-csharp/spec/grammar-spec.coffee new file mode 100644 index 000000000..34fcbfac9 --- /dev/null +++ b/packages/language-csharp/spec/grammar-spec.coffee @@ -0,0 +1,17 @@ +describe "Language C# package", -> + + beforeEach -> + waitsForPromise -> + atom.packages.activatePackage("language-csharp") + + describe "C# Script grammar", -> + it "parses the grammar", -> + grammar = atom.grammars.grammarForScopeName("source.csx") + expect(grammar).toBeDefined() + expect(grammar.scopeName).toBe "source.csx" + + describe "C# Cake grammar", -> + it "parses the grammar", -> + grammar = atom.grammars.grammarForScopeName("source.cake") + expect(grammar).toBeDefined() + expect(grammar.scopeName).toBe "source.cake" diff --git a/packages/language-css/.github/no-response.yml b/packages/language-css/.github/no-response.yml new file mode 100644 index 000000000..1c8799d13 --- /dev/null +++ b/packages/language-css/.github/no-response.yml @@ -0,0 +1,15 @@ +# Configuration for probot-no-response - https://github.com/probot/no-response + +# Number of days of inactivity before an issue is closed for lack of response +daysUntilClose: 28 + +# Label requiring a response +responseRequiredLabel: more-information-needed + +# Comment to post when closing an issue for lack of response. Set to `false` to disable. +closeComment: > + This issue has been automatically closed because there has been no response + to our request for more information from the original author. With only the + information that is currently in the issue, we don't have enough information + to take action. Please reach out if you have or find the answers we need so + that we can investigate further. diff --git a/packages/language-css/.github/workflows/main.yml b/packages/language-css/.github/workflows/main.yml new file mode 100644 index 000000000..1fab392a0 --- /dev/null +++ b/packages/language-css/.github/workflows/main.yml @@ -0,0 +1,28 @@ +name: CI + +on: [push] + +env: + CI: true + +jobs: + Test: + strategy: + matrix: + os: [ubuntu-latest, macos-latest, windows-latest] + channel: [stable, beta] + runs-on: ${{ matrix.os }} + steps: + - uses: actions/checkout@v1 + - uses: UziTech/action-setup-atom@v2 + with: + version: ${{ matrix.channel }} + - name: Install windows-build-tools + if: ${{ matrix.os == 'windows-latest' }} + run: | + npm install node-gyp@latest + npm config set msvs_version 2019 + - name: Install dependencies + run: npm i + - name: Run tests + run: atom --test spec diff --git a/packages/language-css/.gitignore b/packages/language-css/.gitignore new file mode 100644 index 000000000..3c3629e64 --- /dev/null +++ b/packages/language-css/.gitignore @@ -0,0 +1 @@ +node_modules diff --git a/packages/language-css/CONTRIBUTING.md b/packages/language-css/CONTRIBUTING.md new file mode 100644 index 000000000..0fd0ad696 --- /dev/null +++ b/packages/language-css/CONTRIBUTING.md @@ -0,0 +1 @@ +See the [Atom contributing guide](https://github.com/atom/atom/blob/master/CONTRIBUTING.md) diff --git a/packages/language-css/ISSUE_TEMPLATE.md b/packages/language-css/ISSUE_TEMPLATE.md new file mode 100644 index 000000000..b60bb86c9 --- /dev/null +++ b/packages/language-css/ISSUE_TEMPLATE.md @@ -0,0 +1,40 @@ +<!-- + +Have you read Atom's Code of Conduct? By filing an Issue, you are expected to comply with it, including treating everyone with respect: https://github.com/atom/atom/blob/master/CODE_OF_CONDUCT.md + +Do you want to ask a question? Are you looking for support? The Atom message board is the best place for getting support: https://discuss.atom.io + +--> + +### Prerequisites + +* [ ] Put an X between the brackets on this line if you have done all of the following: + * Reproduced the problem in Safe Mode: http://flight-manual.atom.io/hacking-atom/sections/debugging/#using-safe-mode + * Followed all applicable steps in the debugging guide: http://flight-manual.atom.io/hacking-atom/sections/debugging/ + * Checked the FAQs on the message board for common solutions: https://discuss.atom.io/c/faq + * Checked that your issue isn't already filed: https://github.com/issues?utf8=✓&q=is%3Aissue+user%3Aatom + * Checked that there is not already an Atom package that provides the described functionality: https://atom.io/packages + +### Description + +[Description of the issue] + +### Steps to Reproduce + +1. [First Step] +2. [Second Step] +3. [and so on...] + +**Expected behavior:** [What you expect to happen] + +**Actual behavior:** [What actually happens] + +**Reproduces how often:** [What percentage of the time does it reproduce?] + +### Versions + +You can get this information from copy and pasting the output of `atom --version` and `apm --version` from the command line. Also, please include the OS and what version of the OS you're running. + +### Additional Information + +Any additional information, configuration or data that might be necessary to reproduce the issue. diff --git a/packages/language-css/LICENSE.md b/packages/language-css/LICENSE.md new file mode 100644 index 000000000..04e53c81a --- /dev/null +++ b/packages/language-css/LICENSE.md @@ -0,0 +1,31 @@ +Copyright (c) 2014 GitHub Inc. + +Permission is hereby granted, free of charge, to any person obtaining +a copy of this software and associated documentation files (the +"Software"), to deal in the Software without restriction, including +without limitation the rights to use, copy, modify, merge, publish, +distribute, sublicense, and/or sell copies of the Software, and to +permit persons to whom the Software is furnished to do so, subject to +the following conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. + +-------------------------------------------------------------------- + +This package was derived from a TextMate bundle located at +https://github.com/textmate/css.tmbundle and distributed under the following +license, located in `README.mdown`: + +Permission to copy, use, modify, sell and distribute this +software is granted. This software is provided "as is" without +express or implied warranty, and with no claim as to its +suitability for any purpose. diff --git a/packages/language-css/PULL_REQUEST_TEMPLATE.md b/packages/language-css/PULL_REQUEST_TEMPLATE.md new file mode 100644 index 000000000..cdaa94a86 --- /dev/null +++ b/packages/language-css/PULL_REQUEST_TEMPLATE.md @@ -0,0 +1,28 @@ +### Requirements + +* Filling out the template is required. Any pull request that does not include enough information to be reviewed in a timely manner may be closed at the maintainers' discretion. +* All new code requires tests to ensure against regressions + +### Description of the Change + +<!-- + +We must be able to understand the design of your change from this description. If we can't get a good idea of what the code will be doing from the description here, the pull request may be closed at the maintainers' discretion. Keep in mind that the maintainer reviewing this PR may not be familiar with or have worked with the code here recently, so please walk us through the concepts. + +--> + +### Alternate Designs + +<!-- Explain what other alternates were considered and why the proposed version was selected --> + +### Benefits + +<!-- What benefits will be realized by the code change? --> + +### Possible Drawbacks + +<!-- What are the possible side-effects or negative impacts of the code change? --> + +### Applicable Issues + +<!-- Enter any applicable Issues here --> diff --git a/packages/language-css/README.md b/packages/language-css/README.md new file mode 100644 index 000000000..65aeec125 --- /dev/null +++ b/packages/language-css/README.md @@ -0,0 +1,10 @@ +# CSS language support in Atom +![CI Status](https://github.com/atom/language-css/actions/workflows/main.yml/badge.svg) + +Adds syntax highlighting, completions, and snippets to CSS files in Atom. + +Originally [converted](http://flight-manual.atom.io/hacking-atom/sections/converting-from-textmate) +from the [CSS TextMate bundle](https://github.com/textmate/css.tmbundle). + +Contributions are greatly appreciated. Please fork this repository and open a +pull request to add snippets, make grammar tweaks, etc. diff --git a/packages/language-css/coffeelint.json b/packages/language-css/coffeelint.json new file mode 100644 index 000000000..a5dd715e3 --- /dev/null +++ b/packages/language-css/coffeelint.json @@ -0,0 +1,37 @@ +{ + "max_line_length": { + "level": "ignore" + }, + "no_empty_param_list": { + "level": "error" + }, + "arrow_spacing": { + "level": "error" + }, + "no_interpolation_in_single_quotes": { + "level": "error" + }, + "no_debugger": { + "level": "error" + }, + "prefer_english_operator": { + "level": "error" + }, + "colon_assignment_spacing": { + "spacing": { + "left": 0, + "right": 1 + }, + "level": "error" + }, + "braces_spacing": { + "spaces": 0, + "level": "error" + }, + "spacing_after_comma": { + "level": "error" + }, + "no_stand_alone_at": { + "level": "error" + } +} diff --git a/packages/language-css/grammars/css.cson b/packages/language-css/grammars/css.cson new file mode 100644 index 000000000..31e04a600 --- /dev/null +++ b/packages/language-css/grammars/css.cson @@ -0,0 +1,2135 @@ +'scopeName': 'source.css' +'name': 'CSS' +'fileTypes': [ + 'css' + 'css.erb' +] +'firstLineMatch': '''(?xi) + # Emacs modeline + -\\*-(?:\\s*(?=[^:;\\s]+\\s*-\\*-)|(?:.*?[;\\s]|(?<=-\\*-))mode\\s*:\\s*) + css + (?=[\\s;]|(?<![-*])-\\*-).*?-\\*- + | + # Vim modeline + (?:(?:\\s|^)vi(?:m[<=>]?\\d+|m)?|\\sex)(?=:(?=\\s*set?\\s[^\\n:]+:)|:(?!\\s*set?\\s))(?:(?:\\s|\\s*:\\s*)\\w*(?:\\s*=(?:[^\\n\\\\\\s]|\\\\.)*)?)*[\\s:](?:filetype|ft|syntax)\\s*= + css + (?=\\s|:|$) +''' +'patterns': [ + { + 'include': '#comment-block' + } + { + 'include': '#escapes' + } + { + 'include': '#combinators' + } + { + 'include': '#selector' + } + { + 'include': '#at-rules' + } + { + 'include': '#rule-list' + } +] +'repository': + 'at-rules': + 'patterns': [ + { + # @charset, with possible preceding BOM sequence + 'begin': '\\A(?:\\xEF\\xBB\\xBF)?(?i:(?=\\s*@charset\\b))' + 'end': ';|(?=$)' + 'endCaptures': + '0': + 'name': 'punctuation.terminator.rule.css' + 'name': 'meta.at-rule.charset.css' + 'patterns': [ + { + 'captures': + '1': + 'name': 'invalid.illegal.not-lowercase.charset.css' + '2': + 'name': 'invalid.illegal.leading-whitespace.charset.css' + '3': + 'name': 'invalid.illegal.no-whitespace.charset.css' + '4': + 'name': 'invalid.illegal.whitespace.charset.css' + '5': + 'name': 'invalid.illegal.not-double-quoted.charset.css' + '6': + 'name': 'invalid.illegal.unclosed-string.charset.css' + '7': + 'name': 'invalid.illegal.unexpected-characters.charset.css' + 'match': '''(?x) # Possible errors: + \\G + ((?!@charset)@\\w+) # Not lowercase (@charset is case-sensitive) + | + \\G(\\s+) # Preceding whitespace + | + (@charset\\S[^;]*) # No whitespace after @charset + | + (?<=@charset) # Before quoted charset name + (\\x20{2,}|\\t+) # More than one space used, or a tab + | + (?<=@charset\\x20) # Beginning of charset name + ([^";]+) # Not double-quoted + | + ("[^"]+$) # Unclosed quote + | + (?<=") # After charset name + ([^;]+) # Unexpected junk instead of semicolon + ''' + } + { + 'captures': + '1': + 'name': 'keyword.control.at-rule.charset.css' + '2': + 'name': 'punctuation.definition.keyword.css' + 'match': '((@)charset)(?=\\s)' + } + { + 'begin': '"' + 'beginCaptures': + '0': + 'name': 'punctuation.definition.string.begin.css' + 'end': '"|$' + 'endCaptures': + '0': + 'name': 'punctuation.definition.string.end.css' + 'name': 'string.quoted.double.css' + 'patterns': [ + { + 'begin': '(?:\\G|^)(?=(?:[^"])+$)' + 'end': '$' + 'name': 'invalid.illegal.unclosed.string.css' + } + ] + } + ] + } + { + # @import + 'begin': '(?i)((@)import)(?:\\s+|$|(?=[\'"]|/\\*))' + 'beginCaptures': + '1': + 'name': 'keyword.control.at-rule.import.css' + '2': + 'name': 'punctuation.definition.keyword.css' + 'end': ';' + 'endCaptures': + '0': + 'name': 'punctuation.terminator.rule.css' + 'name': 'meta.at-rule.import.css' + 'patterns': [ + { + 'begin': '\\G\\s*(?=/\\*)' + 'end': '(?<=\\*/)\\s*' + 'patterns': [ + { + 'include': '#comment-block' + } + ] + } + { + 'include': '#string' + } + { + 'include': '#url' + } + { + 'include': '#media-query-list' + } + ] + } + { + # @font-face + 'begin': '(?i)((@)font-face)(?=\\s*|{|/\\*|$)' + 'beginCaptures': + '1': + 'name': 'keyword.control.at-rule.font-face.css' + '2': + 'name': 'punctuation.definition.keyword.css' + 'end': '(?!\\G)' + 'name': 'meta.at-rule.font-face.css' + 'patterns': [ + { + 'include': '#comment-block' + } + { + 'include': '#escapes' + } + { + 'include': '#rule-list' + } + ] + } + { + # @page + 'begin': '(?i)(@)page(?=[\\s:{]|/\\*|$)' + 'captures': + '0': + 'name': 'keyword.control.at-rule.page.css' + '1': + 'name': 'punctuation.definition.keyword.css' + 'end': '(?=\\s*($|[:{;]))' + 'name': 'meta.at-rule.page.css' + 'patterns': [ + { + 'include': '#rule-list' + } + ] + } + { + # @media + 'begin': '(?i)(?=@media(\\s|\\(|/\\*|$))' + 'end': '(?<=})(?!\\G)' + 'patterns': [ + { + 'begin': '(?i)\\G(@)media' + 'beginCaptures': + '0': + 'name': 'keyword.control.at-rule.media.css' + '1': + 'name': 'punctuation.definition.keyword.css' + 'end': '(?=\\s*[{;])' + 'name': 'meta.at-rule.media.header.css' + 'patterns': [ + { + 'include': '#media-query-list' + } + ] + } + { + 'begin': '{' + 'beginCaptures': + '0': + 'name': 'punctuation.section.media.begin.bracket.curly.css' + 'end': '}' + 'endCaptures': + '0': + 'name': 'punctuation.section.media.end.bracket.curly.css' + 'name': 'meta.at-rule.media.body.css' + 'patterns': [ + { + 'include': '$self' + } + ] + } + ] + } + { + # @counter-style + 'begin': '(?i)(?=@counter-style([\\s\'"{;]|/\\*|$))' + 'end': '(?<=})(?!\\G)' + 'patterns': [ + { + 'begin': '(?i)\\G(@)counter-style' + 'beginCaptures': + '0': + 'name': 'keyword.control.at-rule.counter-style.css' + '1': + 'name': 'punctuation.definition.keyword.css' + 'end': '(?=\\s*{)' + 'name': 'meta.at-rule.counter-style.header.css' + 'patterns': [ + { + 'include': '#comment-block' + } + { + 'include': '#escapes' + } + { + 'captures': + '0': + 'patterns': [ + { + 'include': '#escapes' + } + ] + 'match': '''(?x) + (?:[-a-zA-Z_] | [^\\x00-\\x7F]) # First letter + (?:[-a-zA-Z0-9_] | [^\\x00-\\x7F] # Remainder of identifier + |\\\\(?:[0-9a-fA-F]{1,6}|.) + )* + ''' + 'name': 'variable.parameter.style-name.css' + } + ] + } + { + 'begin': '{' + 'beginCaptures': + '0': + 'name': 'punctuation.section.property-list.begin.bracket.curly.css' + 'end': '}' + 'endCaptures': + '0': + 'name': 'punctuation.section.property-list.end.bracket.curly.css' + 'name': 'meta.at-rule.counter-style.body.css' + 'patterns': [ + { + 'include': '#comment-block' + } + { + 'include': '#escapes' + } + { + 'include': '#rule-list-innards' + } + ] + } + ] + } + { + # @document + 'begin': '(?i)(?=@document([\\s\'"{;]|/\\*|$))' + 'end': '(?<=})(?!\\G)' + 'patterns': [ + { + 'begin': '(?i)\\G(@)document' + 'beginCaptures': + '0': + 'name': 'keyword.control.at-rule.document.css' + '1': + 'name': 'punctuation.definition.keyword.css' + 'end': '(?=\\s*[{;])' + 'name': 'meta.at-rule.document.header.css' + 'patterns': [ + { + 'begin': '(?i)(?<![\\w-])(url-prefix|domain|regexp)(\\()' + 'beginCaptures': + '1': + 'name': 'support.function.document-rule.css' + '2': + 'name': 'punctuation.section.function.begin.bracket.round.css' + 'end': '\\)' + 'endCaptures': + '0': + 'name': 'punctuation.section.function.end.bracket.round.css' + 'name': 'meta.function.document-rule.css' + 'patterns': [ + { + 'include': '#string' + } + { + 'include': '#comment-block' + } + { + 'include': '#escapes' + } + { + 'match': '[^\'")\\s]+' + 'name': 'variable.parameter.document-rule.css' + } + ] + } + { + 'include': '#url' + } + { + 'include': '#commas' + } + { + 'include': '#comment-block' + } + { + 'include': '#escapes' + } + ] + } + { + 'begin': '{' + 'beginCaptures': + '0': + 'name': 'punctuation.section.document.begin.bracket.curly.css' + 'end': '}' + 'endCaptures': + '0': + 'name': 'punctuation.section.document.end.bracket.curly.css' + 'name': 'meta.at-rule.document.body.css' + 'patterns': [ + { + 'include': '$self' + } + ] + } + ] + } + { + # @keyframes + 'begin': '(?i)(?=@(?:-(?:webkit|moz|o|ms)-)?keyframes([\\s\'"{;]|/\\*|$))' + 'end': '(?<=})(?!\\G)' + 'patterns': [ + { + 'begin': '(?i)\\G(@)(?:-(?:webkit|moz|o|ms)-)?keyframes' + 'beginCaptures': + '0': + 'name': 'keyword.control.at-rule.keyframes.css' + '1': + 'name': 'punctuation.definition.keyword.css' + 'end': '(?=\\s*{)' + 'name': 'meta.at-rule.keyframes.header.css' + 'patterns': [ + { + 'include': '#comment-block' + } + { + 'include': '#escapes' + } + { + 'captures': + '0': + 'patterns': [ + { + 'include': '#escapes' + } + ] + 'match': '''(?x) + (?:[-a-zA-Z_] | [^\\x00-\\x7F]) # First letter + (?:[-a-zA-Z0-9_] | [^\\x00-\\x7F] # Remainder of identifier + |\\\\(?:[0-9a-fA-F]{1,6}|.) + )* + ''' + 'name': 'variable.parameter.keyframe-list.css' + } + ] + } + { + 'begin': '{' + 'beginCaptures': + '0': + 'name': 'punctuation.section.keyframes.begin.bracket.curly.css' + 'end': '}' + 'endCaptures': + '0': + 'name': 'punctuation.section.keyframes.end.bracket.curly.css' + 'name': 'meta.at-rule.keyframes.body.css' + 'patterns': [ + { + 'include': '#comment-block' + } + { + 'include': '#escapes' + } + { + 'captures': + '1': + 'name': 'entity.other.keyframe-offset.css' + '2': + 'name': 'entity.other.keyframe-offset.percentage.css' + 'match': '''(?xi) + (?<![\\w-]) (from|to) (?![\\w-]) # Keywords for 0% | 100% + | + ([-+]?(?:\\d+(?:\\.\\d+)?|\\.\\d+)%) # Percentile value + ''' + } + { + 'include': '#rule-list' + } + ] + } + ] + } + { + # @supports + 'begin': '(?i)(?=@supports(\\s|\\(|/\\*|$))' + 'end': '(?<=})(?!\\G)|(?=;)' + 'patterns': [ + { + 'begin': '(?i)\\G(@)supports' + 'beginCaptures': + '0': + 'name': 'keyword.control.at-rule.supports.css' + '1': + 'name': 'punctuation.definition.keyword.css' + 'end': '(?=\\s*[{;])' + 'name': 'meta.at-rule.supports.header.css' + 'patterns': [ + { + 'include': '#feature-query-operators' + } + { + 'include': '#feature-query' + } + { + 'include': '#comment-block' + } + { + 'include': '#escapes' + } + ] + } + { + 'begin': '{' + 'beginCaptures': + '0': + 'name': 'punctuation.section.supports.begin.bracket.curly.css' + 'end': '}' + 'endCaptures': + '0': + 'name': 'punctuation.section.supports.end.bracket.curly.css' + 'name': 'meta.at-rule.supports.body.css' + 'patterns': [ + { + 'include': '$self' + } + ] + } + ] + } + { + # @viewport + 'begin': '(?i)((@)(-(ms|o)-)?viewport)(?=[\\s\'"{;]|/\\*|$)' + 'beginCaptures': + '1': + 'name': 'keyword.control.at-rule.viewport.css' + '2': + 'name': 'punctuation.definition.keyword.css' + 'end': '(?=\\s*[@{;])' + 'name': 'meta.at-rule.viewport.css' + 'patterns': [ + { + 'include': '#comment-block' + } + { + 'include': '#escapes' + } + ] + } + { + # @font-feature-values + 'begin': '(?i)((@)font-feature-values)(?=[\\s\'"{;]|/\\*|$)\\s*' + 'beginCaptures': + '1': + 'name': 'keyword.control.at-rule.font-feature-values.css' + '2': + 'name': 'punctuation.definition.keyword.css' + 'contentName': 'variable.parameter.font-name.css' + 'end': '(?=\\s*[@{;])' + 'name': 'meta.at-rule.font-features.css' + 'patterns': [ + { + 'include': '#comment-block' + } + { + 'include': '#escapes' + } + ] + } + { + # @annotation, @character-variant, @ornaments, @styleset, @stylistic, @swash + 'include': '#font-features' + } + { + # @namespace + 'begin': '(?i)((@)namespace)(?=[\\s\'";]|/\\*|$)' + 'beginCaptures': + '1': + 'name': 'keyword.control.at-rule.namespace.css' + '2': + 'name': 'punctuation.definition.keyword.css' + 'end': ';|(?=[@{])' + 'endCaptures': + '0': + 'name': 'punctuation.terminator.rule.css' + 'name': 'meta.at-rule.namespace.css' + 'patterns': [ + { + 'include': '#url' + } + { + 'captures': + '1': + 'patterns': [ + { + 'include': '#comment-block' + } + ] + '2': + 'name': 'entity.name.function.namespace-prefix.css' + 'patterns': [ + { + 'include': '#escapes' + } + ] + 'match': '''(?xi) + (?:\\G|^|(?<=\\s)) + (?= + (?<=\\s|^) # Starts with whitespace + (?:[-a-zA-Z_]|[^\\x00-\\x7F]) # Then a valid identifier character + | + \\s* # Possible adjoining whitespace + /\\*(?:[^*]|\\*[^/])*\\*/ # Injected comment + ) + (.*?) # Grouped to embed #comment-block + ( + (?:[-a-zA-Z_] | [^\\x00-\\x7F]) # First letter + (?:[-a-zA-Z0-9_] | [^\\x00-\\x7F] # Remainder of identifier + |\\\\(?:[0-9a-fA-F]{1,6}|.) + )* + ) + ''' + } + { + 'include': '#comment-block' + } + { + 'include': '#escapes' + } + { + 'include': '#string' + } + ] + }, + { + # Single line @custom-at-rule terminated with `;`, such as + # @my-rule foo bar; + 'begin': '(?i)(?=@[\\w-]+[^;]+;\s*$)' + 'end': '(?<=;)(?!\\G)' + 'patterns': [ + { + 'begin': '(?i)\\G(@)[\\w-]+' + 'beginCaptures': + '0': + 'name': 'keyword.control.at-rule.css' + '1': + 'name': 'punctuation.definition.keyword.css' + 'end': ';' + 'endCaptures': + '0': + 'name': 'punctuation.terminator.rule.css' + 'name': 'meta.at-rule.header.css' + } + ] + } + { + # @custom-at-rule + 'begin': '(?i)(?=@[\\w-]+(\\s|\\(|{|/\\*|$))' + 'end': '(?<=})(?!\\G)' + 'patterns': [ + { + 'begin': '(?i)\\G(@)[\\w-]+' + 'beginCaptures': + '0': + 'name': 'keyword.control.at-rule.css' + '1': + 'name': 'punctuation.definition.keyword.css' + 'end': '(?=\\s*[{;])' + 'name': 'meta.at-rule.header.css' + } + { + 'begin': '{' + 'beginCaptures': + '0': + 'name': 'punctuation.section.begin.bracket.curly.css' + 'end': '}' + 'endCaptures': + '0': + 'name': 'punctuation.section.end.bracket.curly.css' + 'name': 'meta.at-rule.body.css' + 'patterns': [ + { + 'include': '$self' + } + ] + } + ] + } + ] + 'color-keywords': + 'patterns': [ + { + # CSS 2.1 colours: http://www.w3.org/TR/CSS21/syndata.html#value-def-color + 'match': '(?i)(?<![\\w-])(aqua|black|blue|fuchsia|gray|green|lime|maroon|navy|olive|orange|purple|red|silver|teal|white|yellow)(?![\\w-])' + 'name': 'support.constant.color.w3c-standard-color-name.css' + } + { + # SVG/CSS3 colour names: http://www.w3.org/TR/css3-color/#svg-color + 'match': '''(?xi) (?<![\\w-]) + (aliceblue|antiquewhite|aquamarine|azure|beige|bisque|blanchedalmond|blueviolet|brown|burlywood + |cadetblue|chartreuse|chocolate|coral|cornflowerblue|cornsilk|crimson|cyan|darkblue|darkcyan + |darkgoldenrod|darkgray|darkgreen|darkgrey|darkkhaki|darkmagenta|darkolivegreen|darkorange + |darkorchid|darkred|darksalmon|darkseagreen|darkslateblue|darkslategray|darkslategrey|darkturquoise + |darkviolet|deeppink|deepskyblue|dimgray|dimgrey|dodgerblue|firebrick|floralwhite|forestgreen + |gainsboro|ghostwhite|gold|goldenrod|greenyellow|grey|honeydew|hotpink|indianred|indigo|ivory|khaki + |lavender|lavenderblush|lawngreen|lemonchiffon|lightblue|lightcoral|lightcyan|lightgoldenrodyellow + |lightgray|lightgreen|lightgrey|lightpink|lightsalmon|lightseagreen|lightskyblue|lightslategray + |lightslategrey|lightsteelblue|lightyellow|limegreen|linen|magenta|mediumaquamarine|mediumblue + |mediumorchid|mediumpurple|mediumseagreen|mediumslateblue|mediumspringgreen|mediumturquoise + |mediumvioletred|midnightblue|mintcream|mistyrose|moccasin|navajowhite|oldlace|olivedrab|orangered + |orchid|palegoldenrod|palegreen|paleturquoise|palevioletred|papayawhip|peachpuff|peru|pink|plum + |powderblue|rebeccapurple|rosybrown|royalblue|saddlebrown|salmon|sandybrown|seagreen|seashell + |sienna|skyblue|slateblue|slategray|slategrey|snow|springgreen|steelblue|tan|thistle|tomato + |transparent|turquoise|violet|wheat|whitesmoke|yellowgreen) + (?![\\w-]) + ''' + 'name': 'support.constant.color.w3c-extended-color-name.css' + } + { + # Current text colour + 'match': '(?i)(?<![\\w-])currentColor(?![\\w-])' + 'name': 'support.constant.color.current.css' + } + { + # These colours are deprecated in CSS3: http://www.w3.org/TR/css3-color/#css2-system + 'match': '''(?xi) (?<![\\w-]) + (ActiveBorder|ActiveCaption|AppWorkspace|Background|ButtonFace|ButtonHighlight|ButtonShadow + |ButtonText|CaptionText|GrayText|Highlight|HighlightText|InactiveBorder|InactiveCaption + |InactiveCaptionText|InfoBackground|InfoText|Menu|MenuText|Scrollbar|ThreeDDarkShadow + |ThreeDFace|ThreeDHighlight|ThreeDLightShadow|ThreeDShadow|Window|WindowFrame|WindowText) + (?![\\w-]) + ''' + 'name': 'invalid.deprecated.color.system.css' + } + ] + 'combinators': + 'patterns': [ + { + 'match': '/deep/|>>>' + 'name': 'invalid.deprecated.combinator.css' + } + { + 'match': '>>|>|\\+|~' + 'name': 'keyword.operator.combinator.css' + } + ] + 'commas': + 'match': ',' + 'name': 'punctuation.separator.list.comma.css' + 'comment-block': + 'begin': '/\\*' + 'beginCaptures': + '0': + 'name': 'punctuation.definition.comment.begin.css' + 'end': '\\*/' + 'endCaptures': + '0': + 'name': 'punctuation.definition.comment.end.css' + 'name': 'comment.block.css' + 'escapes': + 'patterns': [ + { + 'match': '\\\\[0-9a-fA-F]{1,6}' + 'name': 'constant.character.escape.codepoint.css' + } + { + 'begin': '\\\\$\\s*' + 'end': '^(?<!\\G)' + 'name': 'constant.character.escape.newline.css' + } + { + 'match': '\\\\.' + 'name': 'constant.character.escape.css' + } + ] + 'feature-query': + 'begin': '\\(' + 'beginCaptures': + '0': + 'name': 'punctuation.definition.condition.begin.bracket.round.css' + 'end': '\\)' + 'endCaptures': + '0': + 'name': 'punctuation.definition.condition.end.bracket.round.css' + 'name': 'meta.feature-query.css' + 'patterns': [ + { + 'include': '#feature-query-operators' + } + { + 'include': '#feature-query' + } + ] + 'feature-query-operators': + 'patterns': [ + { + 'match': '(?i)(?<=[\\s()]|^|\\*/)(and|not|or)(?=[\\s()]|/\\*|$)' + 'name': 'keyword.operator.logical.feature.$1.css' + } + { + 'include': '#rule-list-innards' + } + ] + 'font-features': + 'begin': '''(?xi) + ((@)(annotation|character-variant|ornaments|styleset|stylistic|swash)) + (?=[\\s@'"{;]|/\\*|$) + ''' + 'beginCaptures': + '1': + 'name': 'keyword.control.at-rule.${3:/downcase}.css' + '2': + 'name': 'punctuation.definition.keyword.css' + 'end': '(?<=})' + 'name': 'meta.at-rule.${3:/downcase}.css' + 'patterns': [ + { + 'begin': '{' + 'beginCaptures': + '0': + 'name': 'punctuation.section.property-list.begin.bracket.curly.css' + 'end': '}' + 'endCaptures': + '0': + 'name': 'punctuation.section.property-list.end.bracket.curly.css' + 'name': 'meta.property-list.font-feature.css' + 'patterns': [ + { + # Font-feature name + 'captures': + '0': + 'patterns': [ + { + 'include': '#escapes' + } + ] + 'match': '''(?x) + (?: [-a-zA-Z_] | [^\\x00-\\x7F] ) # First letter + (?: [-a-zA-Z0-9_] | [^\\x00-\\x7F] # Remainder of identifier + | \\\\(?:[0-9a-fA-F]{1,6}|.) + )* + ''' + 'name': 'variable.font-feature.css' + } + { + 'include': '#rule-list-innards' + } + ] + } + ] + 'functions': + 'patterns': [ + # Calculation + { + 'begin': '(?i)(?<![\\w-])(calc)(\\()' + 'beginCaptures': + '1': + 'name': 'support.function.calc.css' + '2': + 'name': 'punctuation.section.function.begin.bracket.round.css' + 'end': '\\)' + 'endCaptures': + '0': + 'name': 'punctuation.section.function.end.bracket.round.css' + 'name': 'meta.function.calc.css' + 'patterns': [ + { + 'match': '[*/]|(?<=\\s|^)[-+](?=\\s|$)' + 'name': 'keyword.operator.arithmetic.css' + } + { + 'include': '#property-values' + } + ] + } + # Colours + { + 'begin': '(?i)(?<![\\w-])(rgba?|hsla?|hwb|lab|lch)(\\()' + 'beginCaptures': + '1': + 'name': 'support.function.misc.css' + '2': + 'name': 'punctuation.section.function.begin.bracket.round.css' + 'end': '\\)' + 'endCaptures': + '0': + 'name': 'punctuation.section.function.end.bracket.round.css' + 'name': 'meta.function.color.css' + 'patterns': [ + { + 'include': '#property-values' + } + ] + } + # Gradients + { + 'begin': '''(?xi) (?<![\\w-]) + ( + (?:-webkit-|-moz-|-o-)? # Accept prefixed/historical variants + (?:repeating-)? # "Repeating"-type gradient + (?:linear|radial|conic) # Shape + -gradient + ) + (\\() + ''' + 'beginCaptures': + '1': + 'name': 'support.function.gradient.css' + '2': + 'name': 'punctuation.section.function.begin.bracket.round.css' + 'end': '\\)' + 'endCaptures': + '0': + 'name': 'punctuation.section.function.end.bracket.round.css' + 'name': 'meta.function.gradient.css' + 'patterns': [ + { + 'match': '(?i)(?<![\\w-])(from|to|at)(?![\\w-])' + 'name': 'keyword.operator.gradient.css' + } + { + 'include': '#property-values' + } + ] + } + # Gradients (Deprecated/original Webkit syntax) + { + 'begin': '(?i)(?<![\\w-])(-webkit-gradient)(\\()' + 'beginCaptures': + '1': + 'name': 'invalid.deprecated.gradient.function.css' + '2': + 'name': 'punctuation.section.function.begin.bracket.round.css' + 'end': '\\)' + 'endCaptures': + '0': + 'name': 'punctuation.section.function.end.bracket.round.css' + 'name': 'meta.function.gradient.invalid.deprecated.gradient.css' + 'patterns': [ + { + 'begin': '(?i)(?<![\\w-])(from|to|color-stop)(\\()' + 'beginCaptures': + '1': + 'name': 'invalid.deprecated.function.css' + '2': + 'name': 'punctuation.section.function.begin.bracket.round.css' + 'end': '\\)' + 'endCaptures': + '0': + 'name': 'punctuation.section.function.end.bracket.round.css' + 'patterns': [ + { + 'include': '#property-values' + } + ] + } + { + 'include': '#property-values' + } + ] + } + # Misc functions + { + 'begin': '''(?xi) (?<![\\w-]) + (annotation|attr|blur|brightness|character-variant|clamp|contrast|counters? + |cross-fade|drop-shadow|element|fit-content|format|grayscale|hue-rotate + |image-set|invert|local|max|min|minmax|opacity|ornaments|repeat|saturate|sepia + |styleset|stylistic|swash|symbols) + (\\() + ''' + 'beginCaptures': + '1': + 'name': 'support.function.misc.css' + '2': + 'name': 'punctuation.section.function.begin.bracket.round.css' + 'end': '\\)' + 'endCaptures': + '0': + 'name': 'punctuation.section.function.end.bracket.round.css' + 'name': 'meta.function.misc.css' + 'patterns': [ + { + 'match': '(?i)(?<=[,\\s"]|\\*/|^)\\d+x(?=[\\s,"\')]|/\\*|$)' + 'name': 'constant.numeric.other.density.css' + } + { + 'include': '#property-values' + } + { + 'match': '[^\'"),\\s]+' + 'name': 'variable.parameter.misc.css' + } + ] + } + # Shapes + { + 'begin': '(?i)(?<![\\w-])(circle|ellipse|inset|polygon|rect)(\\()' + 'beginCaptures': + '1': + 'name': 'support.function.shape.css' + '2': + 'name': 'punctuation.section.function.begin.bracket.round.css' + 'end': '\\)' + 'endCaptures': + '0': + 'name': 'punctuation.section.function.end.bracket.round.css' + 'name': 'meta.function.shape.css' + 'patterns': [ + { + 'match': '(?i)(?<=\\s|^|\\*/)(at|round)(?=\\s|/\\*|$)' + 'name': 'keyword.operator.shape.css' + } + { + 'include': '#property-values' + } + ] + } + # Timing-functions + { + 'begin': '(?i)(?<![\\w-])(cubic-bezier|steps)(\\()' + 'beginCaptures': + '1': + 'name': 'support.function.timing-function.css' + '2': + 'name': 'punctuation.section.function.begin.bracket.round.css' + 'end': '\\)' + 'endCaptures': + '0': + 'name': 'punctuation.section.function.end.bracket.round.css' + 'name': 'meta.function.timing-function.css' + 'patterns': [ + { + 'match': '(?i)(?<![\\w-])(start|end)(?=\\s*\\)|$)' + 'name': 'support.constant.step-direction.css' + } + { + 'include': '#property-values' + } + ] + } + # Transform functions + { + 'begin': '''(?xi) (?<![\\w-]) + ( (?:translate|scale|rotate)(?:[XYZ]|3D)? + | matrix(?:3D)? + | skew[XY]? + | perspective + ) + (\\() + ''' + 'beginCaptures': + '1': + 'name': 'support.function.transform.css' + '2': + 'name': 'punctuation.section.function.begin.bracket.round.css' + 'end': '\\)' + 'endCaptures': + '0': + 'name': 'punctuation.section.function.end.bracket.round.css' + 'patterns': [ + { + 'include': '#property-values' + } + ] + } + { + 'include': '#url' + } + # Variable expansion + { + 'begin': '(?i)(?<![\\w-])(var)(\\()' + 'beginCaptures': + '1': + 'name': 'support.function.misc.css' + '2': + 'name': 'punctuation.section.function.begin.bracket.round.css' + 'end': '\\)' + 'endCaptures': + '0': + 'name': 'punctuation.section.function.end.bracket.round.css' + 'name': 'meta.function.variable.css' + 'patterns': [ + { + 'name': 'variable.argument.css' + 'match': '''(?x) + -- + (?:[-a-zA-Z_] | [^\\x00-\\x7F]) # First letter + (?:[-a-zA-Z0-9_] | [^\\x00-\\x7F] # Remainder of identifier + |\\\\(?:[0-9a-fA-F]{1,6}|.) + )* + ''' + } + { + 'include': '#property-values' + } + ] + } + ] + 'functional-pseudo-classes': + 'patterns': [ + { + # Text direction + 'begin': '(?i)((:)dir)(\\()' + 'beginCaptures': + '1': + 'name': 'entity.other.attribute-name.pseudo-class.css' + '2': + 'name': 'punctuation.definition.entity.css' + '3': + 'name': 'punctuation.section.function.begin.bracket.round.css' + 'end': '\\)' + 'endCaptures': + '0': + 'name': 'punctuation.section.function.end.bracket.round.css' + 'patterns': [ + { + 'include': '#comment-block' + } + { + 'include': '#escapes' + } + { + 'match': '(?i)(?<![\\w-])(ltr|rtl)(?![\\w-])' + 'name': 'support.constant.text-direction.css' + } + { + 'include': '#property-values' + } + ] + } + { + # Language + 'begin': '(?i)((:)lang)(\\()' + 'beginCaptures': + '1': + 'name': 'entity.other.attribute-name.pseudo-class.css' + '2': + 'name': 'punctuation.definition.entity.css' + '3': + 'name': 'punctuation.section.function.begin.bracket.round.css' + 'end': '\\)' + 'endCaptures': + '0': + 'name': 'punctuation.section.function.end.bracket.round.css' + 'patterns': [ + { + 'match': '(?<=[(,\\s])[a-zA-Z]+(-[a-zA-Z0-9]*|\\\\(?:[0-9a-fA-F]{1,6}|.))*(?=[),\\s])' + 'name': 'support.constant.language-range.css' + } + { + 'begin': '"' + 'beginCaptures': + '0': + 'name': 'punctuation.definition.string.begin.css' + 'end': '"' + 'endCaptures': + '0': + 'name': 'punctuation.definition.string.end.css' + 'name': 'string.quoted.double.css' + 'patterns': [ + { + 'include': '#escapes' + } + { + 'match': '(?<=["\\s])[a-zA-Z*]+(-[a-zA-Z0-9*]*)*(?=["\\s])' + 'name': 'support.constant.language-range.css' + } + ] + } + { + 'begin': "'" + 'beginCaptures': + '0': + 'name': 'punctuation.definition.string.begin.css' + 'end': "'" + 'endCaptures': + '0': + 'name': 'punctuation.definition.string.end.css' + 'name': 'string.quoted.single.css' + 'patterns': [ + { + 'include': '#escapes' + } + { + 'match': "(?<=['\\s])[a-zA-Z*]+(-[a-zA-Z0-9*]*)*(?=['\\s])" + 'name': 'support.constant.language-range.css' + } + ] + } + { + 'include': '#commas' + } + ] + } + { + # Logical / Selector-based + 'begin': '(?i)((:)(?:not|has|matches))(\\()' + 'beginCaptures': + '1': + 'name': 'entity.other.attribute-name.pseudo-class.css' + '2': + 'name': 'punctuation.definition.entity.css' + '3': + 'name': 'punctuation.section.function.begin.bracket.round.css' + 'end': '\\)' + 'endCaptures': + '0': + 'name': 'punctuation.section.function.end.bracket.round.css' + 'patterns': [ + { + 'include': '#selector-innards' + } + ] + } + { + # Child-indexed + 'begin': '(?i)((:)nth-(?:last-)?(?:child|of-type))(\\()' + 'beginCaptures': + '1': + 'name': 'entity.other.attribute-name.pseudo-class.css' + '2': + 'name': 'punctuation.definition.entity.css' + '3': + 'name': 'punctuation.section.function.begin.bracket.round.css' + 'end': '\\)' + 'endCaptures': + '0': + 'name': 'punctuation.section.function.end.bracket.round.css' + 'patterns': [ + { + 'match': '(?i)[+-]?(\\d+n?|n)(\\s*[+-]\\s*\\d+)?' + 'name': 'constant.numeric.css' + } + { + 'match': '(?i)even|odd' + 'name': 'support.constant.parity.css' + } + ] + } + ] + 'media-features': + 'captures': + '1': + 'name': 'support.type.property-name.media.css' + '2': + 'name': 'support.type.property-name.media.css' + '3': + 'name': 'support.type.vendored.property-name.media.css' + 'match': '''(?xi) + (?<=^|\\s|\\(|\\*/) # Preceded by whitespace, bracket or comment + (?: + # Standardised features + ( + (?:min-|max-)? # Range features + (?: height + | width + | aspect-ratio + | color + | color-index + | monochrome + | resolution + ) + | grid # Discrete features + | scan + | orientation + | display-mode + | hover + ) + | + # Deprecated features + ( + (?:min-|max-)? # Deprecated in Media Queries 4 + device- + (?: height + | width + | aspect-ratio + ) + ) + | + # Vendor extensions + ( + (?: + # Spec-compliant syntax + [-_] + (?: webkit # Webkit/Blink + | apple|khtml # Webkit aliases + | epub # ePub3 + | moz # Gecko + | ms # Microsoft + | o # Presto (pre-Opera 15) + | xv|ah|rim|atsc| # Less common vendors + hp|tc|wap|ro + ) + | + # Non-standard prefixes + (?: mso # Microsoft Office + | prince # YesLogic + ) + ) + - + [\\w-]+ # Feature name + (?= # Terminates correctly + \\s* # Possible whitespace + (?: # Possible injected comment + /\\* + (?:[^*]|\\*[^/])* + \\*/ + )? + \\s* + [:)] # Ends with a colon or closed bracket + ) + ) + ) + (?=\\s|$|[><:=]|\\)|/\\*) # Terminates cleanly + ''' + 'media-feature-keywords': + 'match': '''(?xi) + (?<=^|\\s|:|\\*/) + (?: portrait # Orientation + | landscape + | progressive # Scan types + | interlace + | fullscreen # Display modes + | standalone + | minimal-ui + | browser + | hover + ) + (?=\\s|\\)|$) + ''' + 'name': 'support.constant.property-value.css' + 'media-query': + 'begin': '\\G' + 'end': '(?=\\s*[{;])' + 'patterns': [ + { + 'include': '#comment-block' + } + { + 'include': '#escapes' + } + { + 'include': '#media-types' + } + { + 'match': '(?i)(?<=\\s|^|,|\\*/)(only|not)(?=\\s|{|/\\*|$)' + 'name': 'keyword.operator.logical.$1.media.css' + } + { + 'match': '(?i)(?<=\\s|^|\\*/|\\))and(?=\\s|/\\*|$)' + 'name': 'keyword.operator.logical.and.media.css' + } + { + 'match': ',(?:(?:\\s*,)+|(?=\\s*[;){]))' + 'name': 'invalid.illegal.comma.css' + } + { + 'include': '#commas' + } + { + 'begin': '\\(' + 'beginCaptures': + '0': + 'name': 'punctuation.definition.parameters.begin.bracket.round.css' + 'end': '\\)' + 'endCaptures': + '0': + 'name': 'punctuation.definition.parameters.end.bracket.round.css' + 'patterns': [ + { + 'include': '#media-features' + } + { + 'include': '#media-feature-keywords' + } + { + 'match': ':' + 'name': 'punctuation.separator.key-value.css' + } + { + 'match': '>=|<=|=|<|>' + 'name': 'keyword.operator.comparison.css' + } + { + 'captures': + '1': + 'name': 'constant.numeric.css' + '2': + 'name': 'keyword.operator.arithmetic.css' + '3': + 'name': 'constant.numeric.css' + 'match': '(\\d+)\\s*(/)\\s*(\\d+)' + 'name': 'meta.ratio.css' + } + { + 'include': '#numeric-values' + } + { + 'include': '#comment-block' + } + ] + } + ] + 'media-query-list': + 'begin': '(?=\\s*[^{;])' + 'end': '(?=\\s*[{;])' + 'patterns': [ + { + 'include': '#media-query' + } + ] + 'media-types': + 'captures': + '1': + 'name': 'support.constant.media.css' + '2': + 'name': 'invalid.deprecated.constant.media.css' + 'match': '''(?xi) + (?<=^|\\s|,|\\*/) + (?: + # Valid media types + (all|print|screen|speech) + | + # Deprecated in Media Queries 4: http://dev.w3.org/csswg/mediaqueries/#media-types + (aural|braille|embossed|handheld|projection|tty|tv) + ) + (?=$|[{,\\s;]|/\\*) + ''' + 'numeric-values': + 'patterns': [ + { + 'captures': + '1': + 'name': 'punctuation.definition.constant.css' + 'match': '(#)(?:[0-9a-fA-F]{3,4}|[0-9a-fA-F]{6}|[0-9a-fA-F]{8})\\b' + 'name': 'constant.other.color.rgb-value.hex.css' + } + { + 'captures': + '1': + 'name': 'keyword.other.unit.percentage.css' + '2': + 'name': 'keyword.other.unit.${2:/downcase}.css' + 'match': '''(?xi) (?<![\\w-]) + [-+]? # Sign indicator + + (?: # Numerals + [0-9]+ (?:\\.[0-9]+)? # Integer/float with leading digits + | \\.[0-9]+ # Float without leading digits + ) + + (?: # Scientific notation + (?<=[0-9]) # Exponent must follow a digit + E # Exponent indicator + [-+]? # Possible sign indicator + [0-9]+ # Exponent value + )? + + (?: # Possible unit for data-type: + (%) # - Percentage + | ( deg|grad|rad|turn # - Angle + | Hz|kHz # - Frequency + | ch|cm|em|ex|fr|in|mm|mozmm| # - Length + pc|pt|px|q|rem|vh|vmax|vmin| + vw + | dpi|dpcm|dppx # - Resolution + | s|ms # - Time + ) + \\b # Boundary checking intentionally lax to + )? # facilitate embedding in CSS-like grammars + ''' + 'name': 'constant.numeric.css' + } + ] + 'property-keywords': + 'patterns': [ + { + 'match': '''(?xi) (?<![\\w-]) + (above|absolute|active|add|additive|after-edge|alias|all|all-petite-caps|all-scroll|all-small-caps|alpha|alphabetic|alternate|alternate-reverse + |always|antialiased|auto|auto-pos|available|avoid|avoid-column|avoid-page|avoid-region|backwards|balance|baseline|before-edge|below|bevel + |bidi-override|blink|block|block-axis|block-start|block-end|bold|bolder|border|border-box|both|bottom|bottom-outside|break-all|break-word|bullets + |butt|capitalize|caption|cell|center|central|char|circle|clip|clone|close-quote|closest-corner|closest-side|col-resize|collapse|color|color-burn + |color-dodge|column|column-reverse|common-ligatures|compact|condensed|contain|content|content-box|contents|context-menu|contextual|copy|cover + |crisp-edges|crispEdges|crosshair|cyclic|dark|darken|dashed|decimal|default|dense|diagonal-fractions|difference|digits|disabled|disc|discretionary-ligatures + |distribute|distribute-all-lines|distribute-letter|distribute-space|dot|dotted|double|double-circle|downleft|downright|e-resize|each-line|ease|ease-in + |ease-in-out|ease-out|economy|ellipse|ellipsis|embed|end|evenodd|ew-resize|exact|exclude|exclusion|expanded|extends|extra-condensed|extra-expanded + |fallback|farthest-corner|farthest-side|fill|fill-available|fill-box|filled|fit-content|fixed|flat|flex|flex-end|flex-start|flip|flow-root|forwards|freeze + |from-image|full-width|geometricPrecision|georgian|grab|grabbing|grayscale|grid|groove|hand|hanging|hard-light|help|hidden|hide + |historical-forms|historical-ligatures|horizontal|horizontal-tb|hue|icon|ideograph-alpha|ideograph-numeric|ideograph-parenthesis|ideograph-space + |ideographic|inactive|infinite|inherit|initial|inline|inline-axis|inline-block|inline-end|inline-flex|inline-grid|inline-list-item|inline-start + |inline-table|inset|inside|inter-character|inter-ideograph|inter-word|intersect|invert|isolate|isolate-override|italic|jis04|jis78|jis83 + |jis90|justify|justify-all|kannada|keep-all|landscape|large|larger|left|light|lighten|lighter|line|line-edge|line-through|linear|linearRGB + |lining-nums|list-item|local|loose|lowercase|lr|lr-tb|ltr|luminance|luminosity|main-size|mandatory|manipulation|manual|margin-box|match-parent + |match-source|mathematical|max-content|medium|menu|message-box|middle|min-content|miter|mixed|move|multiply|n-resize|narrower|ne-resize + |nearest-neighbor|nesw-resize|newspaper|no-change|no-clip|no-close-quote|no-common-ligatures|no-contextual|no-discretionary-ligatures + |no-drop|no-historical-ligatures|no-open-quote|no-repeat|none|nonzero|normal|not-allowed|nowrap|ns-resize|numbers|numeric|nw-resize|nwse-resize + |oblique|oldstyle-nums|open|open-quote|optimizeLegibility|optimizeQuality|optimizeSpeed|optional|ordinal|outset|outside|over|overlay|overline|padding + |padding-box|page|painted|pan-down|pan-left|pan-right|pan-up|pan-x|pan-y|paused|petite-caps|pixelated|plaintext|pointer|portrait|pre|pre-line + |pre-wrap|preserve-3d|progress|progressive|proportional-nums|proportional-width|proximity|radial|recto|region|relative|remove|repeat|repeat-[xy] + |reset-size|reverse|revert|ridge|right|rl|rl-tb|round|row|row-resize|row-reverse|row-severse|rtl|ruby|ruby-base|ruby-base-container|ruby-text + |ruby-text-container|run-in|running|s-resize|saturation|scale-down|screen|scroll|scroll-position|se-resize|semi-condensed|semi-expanded|separate + |sesame|show|sideways|sideways-left|sideways-lr|sideways-right|sideways-rl|simplified|slashed-zero|slice|small|small-caps|small-caption|smaller + |smooth|soft-light|solid|space|space-around|space-between|space-evenly|spell-out|square|sRGB|stacked-fractions|start|static|status-bar|swap + |step-end|step-start|sticky|stretch|strict|stroke|stroke-box|style|sub|subgrid|subpixel-antialiased|subtract|super|sw-resize|symbolic|table + |table-caption|table-cell|table-column|table-column-group|table-footer-group|table-header-group|table-row|table-row-group|tabular-nums|tb|tb-rl + |text|text-after-edge|text-before-edge|text-bottom|text-top|thick|thin|titling-caps|top|top-outside|touch|traditional|transparent|triangle + |ultra-condensed|ultra-expanded|under|underline|unicase|unset|upleft|uppercase|upright|use-glyph-orientation|use-script|verso|vertical + |vertical-ideographic|vertical-lr|vertical-rl|vertical-text|view-box|visible|visibleFill|visiblePainted|visibleStroke|w-resize|wait|wavy + |weight|whitespace|wider|words|wrap|wrap-reverse|x|x-large|x-small|xx-large|xx-small|y|zero|zoom-in|zoom-out) + (?![\\w-]) + ''' + 'name': 'support.constant.property-value.css' + } + { + 'match': '''(?xi) (?<![\\w-]) + (arabic-indic|armenian|bengali|cambodian|circle|cjk-decimal|cjk-earthly-branch|cjk-heavenly-stem|cjk-ideographic + |decimal|decimal-leading-zero|devanagari|disc|disclosure-closed|disclosure-open|ethiopic-halehame-am + |ethiopic-halehame-ti-e[rt]|ethiopic-numeric|georgian|gujarati|gurmukhi|hangul|hangul-consonant|hebrew + |hiragana|hiragana-iroha|japanese-formal|japanese-informal|kannada|katakana|katakana-iroha|khmer + |korean-hangul-formal|korean-hanja-formal|korean-hanja-informal|lao|lower-alpha|lower-armenian|lower-greek + |lower-latin|lower-roman|malayalam|mongolian|myanmar|oriya|persian|simp-chinese-formal|simp-chinese-informal + |square|tamil|telugu|thai|tibetan|trad-chinese-formal|trad-chinese-informal|upper-alpha|upper-armenian + |upper-latin|upper-roman|urdu) + (?![\\w-]) + ''' + 'name': 'support.constant.property-value.list-style-type.css' + } + { + 'match': '(?<![\\w-])(?i:-(?:ah|apple|atsc|epub|hp|khtml|moz|ms|o|rim|ro|tc|wap|webkit|xv)|(?:mso|prince))-[a-zA-Z-]+' + 'name': 'support.constant.vendored.property-value.css' + } + { + 'match': '(?<![\\w-])(?i:arial|century|comic|courier|garamond|georgia|helvetica|impact|lucida|symbol|system-ui|system|tahoma|times|trebuchet|ui-monospace|ui-rounded|ui-sans-serif|ui-serif|utopia|verdana|webdings|sans-serif|serif|monospace)(?![\\w-])' + 'name': 'support.constant.font-name.css' + } + ] + 'property-names': + 'patterns': [ + { + 'match': '''(?xi) (?<![\\w-]) + (?: + # Standard CSS + accent-color|additive-symbols|align-content|align-items|align-self|all|animation|animation-delay|animation-direction|animation-duration + | animation-fill-mode|animation-iteration-count|animation-name|animation-play-state|animation-timing-function|backdrop-filter + | backface-visibility|background|background-attachment|background-blend-mode|background-clip|background-color|background-image + | background-origin|background-position|background-position-[xy]|background-repeat|background-size|bleed|block-size|border + | border-block-end|border-block-end-color|border-block-end-style|border-block-end-width|border-block-start|border-block-start-color + | border-block-start-style|border-block-start-width|border-bottom|border-bottom-color|border-bottom-left-radius|border-bottom-right-radius + | border-bottom-style|border-bottom-width|border-collapse|border-color|border-end-end-radius|border-end-start-radius|border-image + | border-image-outset|border-image-repeat|border-image-slice|border-image-source|border-image-width|border-inline-end + | border-inline-end-color|border-inline-end-style|border-inline-end-width|border-inline-start|border-inline-start-color + | border-inline-start-style|border-inline-start-width|border-left|border-left-color|border-left-style|border-left-width + | border-radius|border-right|border-right-color|border-right-style|border-right-width|border-spacing|border-start-end-radius + | border-start-start-radius|border-style|border-top|border-top-color|border-top-left-radius|border-top-right-radius|border-top-style + | border-top-width|border-width|bottom|box-decoration-break|box-shadow|box-sizing|break-after|break-before|break-inside|caption-side + | caret-color|clear|clip|clip-path|clip-rule|color|color-adjust|color-interpolation-filters|color-scheme|column-count|column-fill|column-gap + | column-rule|column-rule-color|column-rule-style|column-rule-width|column-span|column-width|columns|contain|content|counter-increment + | counter-reset|cursor|direction|display|empty-cells|enable-background|fallback|fill|fill-opacity|fill-rule|filter|flex|flex-basis + | flex-direction|flex-flow|flex-grow|flex-shrink|flex-wrap|float|flood-color|flood-opacity|font|font-display|font-family + | font-feature-settings|font-kerning|font-language-override|font-optical-sizing|font-size|font-size-adjust|font-stretch + | font-style|font-synthesis|font-variant|font-variant-alternates|font-variant-caps|font-variant-east-asian|font-variant-ligatures + | font-variant-numeric|font-variant-position|font-variation-settings|font-weight|gap|glyph-orientation-horizontal|glyph-orientation-vertical + | grid|grid-area|grid-auto-columns|grid-auto-flow|grid-auto-rows|grid-column|grid-column-end|grid-column-gap|grid-column-start + | grid-gap|grid-row|grid-row-end|grid-row-gap|grid-row-start|grid-template|grid-template-areas|grid-template-columns|grid-template-rows + | hanging-punctuation|height|hyphens|image-orientation|image-rendering|image-resolution|ime-mode|initial-letter|initial-letter-align + | inline-size|inset|inset-block|inset-block-end|inset-block-start|inset-inline|inset-inline-end|inset-inline-start|isolation + | justify-content|justify-items|justify-self|kerning|left|letter-spacing|lighting-color|line-break|line-clamp|line-height|list-style + | list-style-image|list-style-position|list-style-type|margin|margin-block|margin-block-end|margin-block-start|margin-bottom|margin-inline|margin-inline-end|margin-inline-start + | margin-left|margin-right|margin-top|marker-end|marker-mid|marker-start|marks|mask|mask-border|mask-border-mode|mask-border-outset + | mask-border-repeat|mask-border-slice|mask-border-source|mask-border-width|mask-clip|mask-composite|mask-image|mask-mode + | mask-origin|mask-position|mask-repeat|mask-size|mask-type|max-block-size|max-height|max-inline-size|max-lines|max-width + | max-zoom|min-block-size|min-height|min-inline-size|min-width|min-zoom|mix-blend-mode|negative|object-fit|object-position + | offset|offset-anchor|offset-distance|offset-path|offset-position|offset-rotation|opacity|order|orientation|orphans + | outline|outline-color|outline-offset|outline-style|outline-width|overflow|overflow-anchor|overflow-block|overflow-inline + | overflow-wrap|overflow-[xy]|overscroll-behavior|overscroll-behavior-block|overscroll-behavior-inline|overscroll-behavior-[xy] + | pad|padding|padding-block|padding-block-end|padding-block-start|padding-bottom|padding-inline|padding-inline-end|padding-inline-start|padding-left + | padding-right|padding-top|page-break-after|page-break-before|page-break-inside|paint-order|perspective|perspective-origin + | place-content|place-items|place-self|pointer-events|position|prefix|quotes|range|resize|right|rotate|row-gap|ruby-align + | ruby-merge|ruby-position|scale|scroll-behavior|scroll-margin|scroll-margin-block|scroll-margin-block-end|scroll-margin-block-start + | scroll-margin-bottom|scroll-margin-inline|scroll-margin-inline-end|scroll-margin-inline-start|scroll-margin-left|scroll-margin-right + | scroll-margin-top|scroll-padding|scroll-padding-block|scroll-padding-block-end|scroll-padding-block-start|scroll-padding-bottom + | scroll-padding-inline|scroll-padding-inline-end|scroll-padding-inline-start|scroll-padding-left|scroll-padding-right + | scroll-padding-top|scroll-snap-align|scroll-snap-coordinate|scroll-snap-destination|scroll-snap-stop|scroll-snap-type + | scrollbar-color|scrollbar-gutter|scrollbar-width|shape-image-threshold|shape-margin|shape-outside|shape-rendering|size + | speak-as|src|stop-color|stop-opacity|stroke|stroke-dasharray|stroke-dashoffset|stroke-linecap|stroke-linejoin|stroke-miterlimit + | stroke-opacity|stroke-width|suffix|symbols|system|tab-size|table-layout|text-align|text-align-last|text-anchor|text-combine-upright + | text-decoration|text-decoration-color|text-decoration-line|text-decoration-skip|text-decoration-skip-ink|text-decoration-style + | text-emphasis|text-emphasis-color|text-emphasis-position|text-emphasis-style|text-indent|text-justify|text-orientation + | text-overflow|text-rendering|text-shadow|text-size-adjust|text-transform|text-underline-offset|text-underline-position|top|touch-action|transform + | transform-box|transform-origin|transform-style|transition|transition-delay|transition-duration|transition-property|transition-timing-function + | translate|unicode-bidi|unicode-range|user-select|user-zoom|vertical-align|visibility|white-space|widows|width|will-change + | word-break|word-spacing|word-wrap|writing-mode|z-index|zoom + + # SVG attributes + | alignment-baseline|baseline-shift|clip-rule|color-interpolation|color-interpolation-filters|color-profile + | color-rendering|cx|cy|dominant-baseline|enable-background|fill|fill-opacity|fill-rule|flood-color|flood-opacity + | glyph-orientation-horizontal|glyph-orientation-vertical|height|kerning|lighting-color|marker-end|marker-mid + | marker-start|r|rx|ry|shape-rendering|stop-color|stop-opacity|stroke|stroke-dasharray|stroke-dashoffset|stroke-linecap + | stroke-linejoin|stroke-miterlimit|stroke-opacity|stroke-width|text-anchor|width|x|y + + # Not listed on MDN; presumably deprecated + | adjust|after|align|align-last|alignment|alignment-adjust|appearance|attachment|azimuth|background-break + | balance|baseline|before|bidi|binding|bookmark|bookmark-label|bookmark-level|bookmark-target|border-length + | bottom-color|bottom-left-radius|bottom-right-radius|bottom-style|bottom-width|box|box-align|box-direction + | box-flex|box-flex-group|box-lines|box-ordinal-group|box-orient|box-pack|break|character|collapse|column + | column-break-after|column-break-before|count|counter|crop|cue|cue-after|cue-before|decoration|decoration-break + | delay|display-model|display-role|down|drop|drop-initial-after-adjust|drop-initial-after-align|drop-initial-before-adjust + | drop-initial-before-align|drop-initial-size|drop-initial-value|duration|elevation|emphasis|family|fit|fit-position + | flex-group|float-offset|gap|grid-columns|grid-rows|hanging-punctuation|header|hyphenate|hyphenate-after|hyphenate-before + | hyphenate-character|hyphenate-lines|hyphenate-resource|icon|image|increment|indent|index|initial-after-adjust + | initial-after-align|initial-before-adjust|initial-before-align|initial-size|initial-value|inline-box-align|iteration-count + | justify|label|left-color|left-style|left-width|length|level|line|line-stacking|line-stacking-ruby|line-stacking-shift + | line-stacking-strategy|lines|list|mark|mark-after|mark-before|marks|marquee|marquee-direction|marquee-play-count|marquee-speed + | marquee-style|max|min|model|move-to|name|nav|nav-down|nav-index|nav-left|nav-right|nav-up|new|numeral|offset|ordinal-group + | orient|origin|overflow-style|overhang|pack|page|page-policy|pause|pause-after|pause-before|phonemes|pitch|pitch-range + | play-count|play-during|play-state|point|presentation|presentation-level|profile|property|punctuation|punctuation-trim + | radius|rate|rendering-intent|repeat|replace|reset|resolution|resource|respond-to|rest|rest-after|rest-before|richness + | right-color|right-style|right-width|role|rotation|rotation-point|rows|ruby|ruby-overhang|ruby-span|rule|rule-color + | rule-style|rule-width|shadow|size|size-adjust|sizing|space|space-collapse|spacing|span|speak|speak-header|speak-numeral + | speak-punctuation|speech|speech-rate|speed|stacking|stacking-ruby|stacking-shift|stacking-strategy|stress|stretch + | string-set|style|style-image|style-position|style-type|target|target-name|target-new|target-position|text|text-height + | text-justify|text-outline|text-replace|text-wrap|timing-function|top-color|top-left-radius|top-right-radius|top-style + | top-width|trim|unicode|up|user-select|variant|voice|voice-balance|voice-duration|voice-family|voice-pitch|voice-pitch-range + | voice-rate|voice-stress|voice-volume|volume|weight|white|white-space-collapse|word|wrap + ) + (?![\\w-]) + ''' + 'name': 'support.type.property-name.css' + } + { + 'match': '(?<![\\w-])(?i:-(?:ah|apple|atsc|epub|hp|khtml|moz|ms|o|rim|ro|tc|wap|webkit|xv)|(?:mso|prince))-[a-zA-Z-]+' + 'name': 'support.type.vendored.property-name.css' + } + ] + 'property-values': + 'patterns': [ + { + 'include': '#commas' + } + { + 'include': '#comment-block' + } + { + 'include': '#escapes' + } + { + 'include': '#functions' + } + { + 'include': '#property-keywords' + } + { + 'include': '#unicode-range' + } + { + 'include': '#numeric-values' + } + { + 'include': '#color-keywords' + } + { + 'include': '#string' + } + { + 'match': '!\\s*important(?![\\w-])' + 'name': 'keyword.other.important.css' + } + ] + 'pseudo-classes': + 'captures': + '1': + 'name': 'punctuation.definition.entity.css' + '2': + 'name': 'invalid.illegal.colon.css' + 'match': '''(?xi) + (:)(:*) + (?: active|any-link|checked|default|disabled|empty|enabled|first + | (?:first|last|only)-(?:child|of-type)|focus|focus-visible|focus-within|fullscreen|host|hover + | in-range|indeterminate|invalid|left|link|optional|out-of-range + | read-only|read-write|required|right|root|scope|target|unresolved + | valid|visited + )(?![\\w-]|\\s*[;}]) + ''' + 'name': 'entity.other.attribute-name.pseudo-class.css' + 'pseudo-elements': + 'captures': + '1': + 'name': 'punctuation.definition.entity.css' + '2': + 'name': 'punctuation.definition.entity.css' + 'match': '''(?xi) + (?: + (::?) # Elements using both : and :: notation + (?: after + | before + | first-letter + | first-line + | (?:-(?:ah|apple|atsc|epub|hp|khtml|moz + |ms|o|rim|ro|tc|wap|webkit|xv) + | (?:mso|prince)) + -[a-z-]+ + ) + | + (::) # Double-colon only + (?: backdrop + | content + | grammar-error + | marker + | placeholder + | selection + | shadow + | spelling-error + ) + ) + (?![\\w-]|\\s*[;}]) + ''' + 'name': 'entity.other.attribute-name.pseudo-element.css' + 'rule-list': + 'begin': '{' + 'beginCaptures': + '0': + 'name': 'punctuation.section.property-list.begin.bracket.curly.css' + 'end': '}' + 'endCaptures': + '0': + 'name': 'punctuation.section.property-list.end.bracket.curly.css' + 'name': 'meta.property-list.css' + 'patterns': [ + { + 'include': '#rule-list-innards' + } + ] + 'rule-list-innards': + 'patterns': [ + { + 'include': '#comment-block' + } + { + 'include': '#escapes' + } + { + 'include': '#font-features' + } + { + # Custom properties + 'match': '''(?x) (?<![\\w-]) + -- + (?:[-a-zA-Z_] | [^\\x00-\\x7F]) # First letter + (?:[-a-zA-Z0-9_] | [^\\x00-\\x7F] # Remainder of identifier + |\\\\(?:[0-9a-fA-F]{1,6}|.) + )* + ''' + 'name': 'variable.css' + } + { + 'begin': '(?<![-a-zA-Z])(?=[-a-zA-Z])' + 'end': '$|(?![-a-zA-Z])' + 'name': 'meta.property-name.css' + 'patterns': [ + { + 'include': '#property-names' + } + ] + } + { + 'begin': '(:)\\s*' + 'beginCaptures': + '1': + 'name': 'punctuation.separator.key-value.css' + 'end': '\\s*(;)|\\s*(?=}|\\))' + 'endCaptures': + '1': + 'name': 'punctuation.terminator.rule.css' + 'contentName': 'meta.property-value.css' + 'patterns': [ + { + 'include': '#comment-block' + } + { + 'include': '#property-values' + } + ] + } + { + 'match': ';' + 'name': 'punctuation.terminator.rule.css' + } + ] + 'selector': + 'begin': '''(?x) + (?= + (?:\\|)? # Possible anonymous namespace prefix + (?: + [-\\[:.*\\#a-zA-Z_] # Valid selector character + | + [^\\x00-\\x7F] # Which can include non-ASCII symbols + | + \\\\ # Or an escape sequence + (?:[0-9a-fA-F]{1,6}|.) + ) + ) + ''' + 'end': '(?=\\s*[/@{)])' + 'name': 'meta.selector.css' + 'patterns': [ + { + 'include': '#selector-innards' + } + ] + 'selector-innards': + 'patterns': [ + { + 'include': '#comment-block' + } + { + 'include': '#commas' + } + { + 'include': '#escapes' + } + { + 'include': '#combinators' + } + { + 'captures': + '1': + 'name': 'entity.other.namespace-prefix.css' + '2': + 'name': 'punctuation.separator.css' + 'match': '''(?x) + (?:^|(?<=[\\s,(};])) # Follows whitespace, comma, semicolon, or bracket + (?! + [-\\w*]+ + \\| + (?! + [-\\[:.*\\#a-zA-Z_] # Make sure there's a selector to match + | [^\\x00-\\x7F] + ) + ) + ( + (?: [-a-zA-Z_] | [^\\x00-\\x7F] ) # First letter + (?: [-a-zA-Z0-9_] | [^\\x00-\\x7F] # Remainder of identifier + | \\\\(?:[0-9a-fA-F]{1,6}|.) + )* + | + \\* # Universal namespace + )? + (\\|) # Namespace separator + ''' + } + { + 'include': '#tag-names' + } + { + 'match': '\\*' + 'name': 'entity.name.tag.wildcard.css' + } + { + 'captures': + '1': + 'name': 'punctuation.definition.entity.css' + '2': + 'patterns': [ + { + 'include': '#escapes' + } + ] + 'match': '''(?x) (?<![@\\w-]) + ([.\\#]) + # Invalid identifier + ( + (?: + # Starts with ASCII digits, with possible hyphen preceding it + -?[0-9] + | + # Consists of a hyphen only + - # Terminated by either: + (?= $ # - End-of-line + | [\\s,.\\#)\\[:{>+~|] # - Followed by another selector + | /\\* # - Followed by a block comment + ) + | + # Name contains unescaped ASCII symbol + (?: # Check for acceptable preceding characters + [-a-zA-Z_0-9]|[^\\x00-\\x7F] # - Valid selector character + | \\\\(?:[0-9a-fA-F]{1,6}|.) # - Escape sequence + )* + (?: # Invalid punctuation + [!"'%&(*;<?@^`|\\]}] # - NOTE: We exempt `)` from the list of checked + | # symbols to avoid matching `:not(.invalid)` + / (?!\\*) # - Avoid invalidating the start of a comment + )+ + ) + # Mark remainder of selector invalid + (?: [-a-zA-Z_0-9]|[^\\x00-\\x7F] # - Otherwise valid identifier characters + | \\\\(?:[0-9a-fA-F]{1,6}|.) # - Escape sequence + )* + ) + ''' + 'name': 'invalid.illegal.bad-identifier.css' + } + { + 'captures': + '1': + 'name': 'punctuation.definition.entity.css' + '2': + 'patterns': [ + { + 'include': '#escapes' + } + ] + 'match': '''(?x) + (\\.) # Valid class-name + ( + (?: [-a-zA-Z_0-9]|[^\\x00-\\x7F] # Valid identifier characters + | \\\\(?:[0-9a-fA-F]{1,6}|.) # Escape sequence + )+ + ) # Followed by either: + (?= $ # - End of the line + | [\\s,.\\#)\\[:{>+~|] # - Another selector + | /\\* # - A block comment + ) + ''' + 'name': 'entity.other.attribute-name.class.css' + } + { + 'captures': + '1': + 'name': 'punctuation.definition.entity.css' + '2': + 'patterns': [ + { + 'include': '#escapes' + } + ] + 'match': '''(?x) + (\\#) + ( + -? + (?![0-9]) + (?:[-a-zA-Z0-9_]|[^\\x00-\\x7F]|\\\\(?:[0-9a-fA-F]{1,6}|.))+ + ) + (?=$|[\\s,.\\#)\\[:{>+~|]|/\\*) + ''' + 'name': 'entity.other.attribute-name.id.css' + } + { + 'begin': '\\[' + 'beginCaptures': + '0': + 'name': 'punctuation.definition.entity.begin.bracket.square.css' + 'end': '\\]' + 'endCaptures': + '0': + 'name': 'punctuation.definition.entity.end.bracket.square.css' + 'name': 'meta.attribute-selector.css' + 'patterns': [ + { + 'include': '#comment-block' + } + { + 'include': '#string' + } + { + 'captures': + '1': + 'name': 'storage.modifier.ignore-case.css' + 'match': '(?<=["\'\\s]|^|\\*/)\\s*([iI])\\s*(?=[\\s\\]]|/\\*|$)' + } + { + 'captures': + '1': + 'name': 'string.unquoted.attribute-value.css' + 'patterns': [ + { + 'include': '#escapes' + } + ] + 'match': '(?x)(?<==)\\s*((?!/\\*)(?:[^\\\\"\'\\s\\]]|\\\\.)+)' + } + { + 'include': '#escapes' + } + { + 'match': '[~|^$*]?=' + 'name': 'keyword.operator.pattern.css' + } + { + 'match': '\\|' + 'name': 'punctuation.separator.css' + } + { + 'captures': + '1': + 'name': 'entity.other.namespace-prefix.css' + 'patterns': [ + { + 'include': '#escapes' + } + ] + 'match': '''(?x) + # Qualified namespace prefix + ( -?(?!\\d)(?:[\\w-]|[^\\x00-\\x7F]|\\\\(?:[0-9a-fA-F]{1,6}|.))+ + | \\* + ) + # Lookahead to ensure there's a valid identifier ahead + (?= + \\| (?!\\s|=|$|\\]) + (?: -?(?!\\d) + | [\\\\\\w-] + | [^\\x00-\\x7F] + ) + ) + ''' + } + { + 'captures': + '1': + 'name': 'entity.other.attribute-name.css' + 'patterns': [ + { + 'include': '#escapes' + } + ] + 'match': '''(?x) + (-?(?!\\d)(?>[\\w-]|[^\\x00-\\x7F]|\\\\(?:[0-9a-fA-F]{1,6}|.))+) + \\s* + (?=[~|^\\]$*=]|/\\*) + ''' + } + ] + } + { + 'include': '#pseudo-classes' + } + { + 'include': '#pseudo-elements' + } + { + 'include': '#functional-pseudo-classes' + } + # Custom HTML elements + { + 'match': '''(?x) (?<![@\\w-]) + (?= # Custom element names must: + [a-z] # - start with a lowercase ASCII letter, + \\w* - # - contain at least one dash + ) + (?: + (?![A-Z]) # No uppercase ASCII letters are allowed + [\\w-] # Allow any other word character or dash + )+ + (?![(\\w-]) + ''' + 'name': 'entity.name.tag.custom.css' + } + ] + 'string': + 'patterns': [ + { + 'begin': '"' + 'beginCaptures': + '0': + 'name': 'punctuation.definition.string.begin.css' + 'end': '"|(?<!\\\\)(?=$|\\n)' + 'endCaptures': + '0': + 'name': 'punctuation.definition.string.end.css' + 'name': 'string.quoted.double.css' + 'patterns': [ + { + 'begin': '(?:\\G|^)(?=(?:[^\\\\"]|\\\\.)+$)' + 'end': '$' + 'name': 'invalid.illegal.unclosed.string.css' + 'patterns': [ + { + 'include': '#escapes' + } + ] + } + { + 'include': '#escapes' + } + ] + } + { + 'begin': '\'' + 'beginCaptures': + '0': + 'name': 'punctuation.definition.string.begin.css' + 'end': "'|(?<!\\\\)(?=$|\\n)" + 'endCaptures': + '0': + 'name': 'punctuation.definition.string.end.css' + 'name': 'string.quoted.single.css' + 'patterns': [ + { + 'begin': "(?:\\G|^)(?=(?:[^\\\\']|\\\\.)+$)" + 'end': '$' + 'name': 'invalid.illegal.unclosed.string.css' + 'patterns': [ + { + 'include': '#escapes' + } + ] + } + { + 'include': '#escapes' + } + ] + } + ] + 'tag-names': + 'match': '''(?xi) (?<![\\w:-]) + (?: + # HTML + a|abbr|acronym|address|applet|area|article|aside|audio|b|base|basefont|bdi|bdo|bgsound + | big|blink|blockquote|body|br|button|canvas|caption|center|cite|code|col|colgroup|command + | content|data|datalist|dd|del|details|dfn|dialog|dir|div|dl|dt|element|em|embed|fieldset + | figcaption|figure|font|footer|form|frame|frameset|h[1-6]|head|header|hgroup|hr|html|i + | iframe|image|img|input|ins|isindex|kbd|keygen|label|legend|li|link|listing|main|map|mark + | marquee|math|menu|menuitem|meta|meter|multicol|nav|nextid|nobr|noembed|noframes|noscript + | object|ol|optgroup|option|output|p|param|picture|plaintext|pre|progress|q|rb|rp|rt|rtc + | ruby|s|samp|script|section|select|shadow|slot|small|source|spacer|span|strike|strong + | style|sub|summary|sup|table|tbody|td|template|textarea|tfoot|th|thead|time|title|tr + | track|tt|u|ul|var|video|wbr|xmp + + # SVG + | altGlyph|altGlyphDef|altGlyphItem|animate|animateColor|animateMotion|animateTransform + | circle|clipPath|color-profile|cursor|defs|desc|discard|ellipse|feBlend|feColorMatrix + | feComponentTransfer|feComposite|feConvolveMatrix|feDiffuseLighting|feDisplacementMap + | feDistantLight|feDropShadow|feFlood|feFuncA|feFuncB|feFuncG|feFuncR|feGaussianBlur + | feImage|feMerge|feMergeNode|feMorphology|feOffset|fePointLight|feSpecularLighting + | feSpotLight|feTile|feTurbulence|filter|font-face|font-face-format|font-face-name + | font-face-src|font-face-uri|foreignObject|g|glyph|glyphRef|hatch|hatchpath|hkern + | line|linearGradient|marker|mask|mesh|meshgradient|meshpatch|meshrow|metadata + | missing-glyph|mpath|path|pattern|polygon|polyline|radialGradient|rect|set|solidcolor + | stop|svg|switch|symbol|text|textPath|tref|tspan|use|view|vkern + + # MathML + | annotation|annotation-xml|maction|maligngroup|malignmark|math|menclose|merror|mfenced + | mfrac|mglyph|mi|mlabeledtr|mlongdiv|mmultiscripts|mn|mo|mover|mpadded|mphantom|mroot + | mrow|ms|mscarries|mscarry|msgroup|msline|mspace|msqrt|msrow|mstack|mstyle|msub|msubsup + | msup|mtable|mtd|mtext|mtr|munder|munderover|semantics + ) + (?=[+~>\\s,.\\#|){:\\[]|/\\*|$) + ''' + 'name': 'entity.name.tag.css' + 'unicode-range': + 'captures': + '0': + 'name': 'constant.other.unicode-range.css' + '1': + 'name': 'punctuation.separator.dash.unicode-range.css' + 'match': '(?<![\\w-])[Uu]\\+[0-9A-Fa-f?]{1,6}(?:(-)[0-9A-Fa-f]{1,6})?(?![\\w-])' + 'url': + 'begin': '(?i)(?<![\\w@-])(url)(\\()' + 'beginCaptures': + '1': + 'name': 'support.function.url.css' + '2': + 'name': 'punctuation.section.function.begin.bracket.round.css' + 'end': '\\)' + 'endCaptures': + '0': + 'name': 'punctuation.section.function.end.bracket.round.css' + 'name': 'meta.function.url.css' + 'patterns': [ + { + 'match': '[^\'")\\s]+' + 'name': 'variable.parameter.url.css' + } + { + 'include': '#string' + } + { + 'include': '#comment-block' + } + { + 'include': '#escapes' + } + ] diff --git a/packages/language-css/grammars/tree-sitter-css.cson b/packages/language-css/grammars/tree-sitter-css.cson new file mode 100644 index 000000000..4ecb631c2 --- /dev/null +++ b/packages/language-css/grammars/tree-sitter-css.cson @@ -0,0 +1,107 @@ +name: 'CSS' +scopeName: 'source.css' +type: 'tree-sitter' +parser: 'tree-sitter-css' + +fileTypes: [ + 'css' +] + +injectionRegExp: '(css|CSS)' + +folds: [ + { + start: {index: 0, type: '{'}, + end: {index: -1, type: '}'} + } + { + type: 'comment' + } +] + +comments: + start: '/*' + end: '*/' + +scopes: + 'stylesheet': 'source.css' + 'comment': 'comment' + + 'tag_name': 'entity.name.tag' + 'nesting_selector, universal_selector': 'entity.name.tag' + 'sibling_selector > "~"': 'keyword.operator.combinator' + 'child_selector > ">"': 'keyword.operator.combinator' + 'adjacent_sibling_selector > "+"': 'keyword.operator.combinator' + 'attribute_selector > "="': 'keyword.operator.pattern' + 'attribute_selector > "^="': 'keyword.operator.pattern' + 'attribute_selector > "|="': 'keyword.operator.pattern' + 'attribute_selector > "~="': 'keyword.operator.pattern' + 'attribute_selector > "$="': 'keyword.operator.pattern' + 'attribute_selector > "*="': 'keyword.operator.pattern' + 'attribute_selector > plain_value': 'string.unquoted.attribute-value' + 'pseudo_element_selector > tag_name': 'entity.other.attribute-name.pseudo-element' + 'pseudo_class_selector > class_name': 'entity.other.attribute-name.pseudo-class' + 'class_name': 'entity.other.attribute-name.class' + 'id_name': 'entity.other.attribute-name.id' + 'namespace_name': 'entity.namespace.name' + 'function_name': 'support.function' + + 'property_name, plain_value': [ + {match: '^--', scopes: 'variable.css'} + ] + + 'property_name': 'support.property-name' + 'attribute_name': 'entity.other.attribute-name' + + ' + "@media", + "@import", + "@charset", + "@namespace", + "@supports", + "@keyframes", + at_keyword + ': 'keyword.control.at-rule' + + 'to, from': 'keyword.control' + + 'important': 'keyword.other.important.css' + + 'string_value': 'string' + 'color_value': 'constant.other.color' + 'integer_value': 'numeric.constant' + 'integer_value > unit': 'keyword.other.unit' + 'float_value': 'numeric.constant' + 'float_value > unit': 'keyword.other.unit' + 'plain_value': [ + {match:'^(aqua|black|blue|fuchsia|gray|green|lime|maroon|navy|olive|orange|purple|red|silver|teal|white|yellow)$', scopes:'support.constant.color.w3c-standard'} + {match:'^(aliceblue|antiquewhite|aquamarine|azure|beige|bisque|blanchedalmond|blueviolet|brown|burlywood + |cadetblue|chartreuse|chocolate|coral|cornflowerblue|cornsilk|crimson|cyan|darkblue|darkcyan + |darkgoldenrod|darkgray|darkgreen|darkgrey|darkkhaki|darkmagenta|darkolivegreen|darkorange + |darkorchid|darkred|darksalmon|darkseagreen|darkslateblue|darkslategray|darkslategrey|darkturquoise + |darkviolet|deeppink|deepskyblue|dimgray|dimgrey|dodgerblue|firebrick|floralwhite|forestgreen + |gainsboro|ghostwhite|gold|goldenrod|greenyellow|grey|honeydew|hotpink|indianred|indigo|ivory|khaki + |lavender|lavenderblush|lawngreen|lemonchiffon|lightblue|lightcoral|lightcyan|lightgoldenrodyellow + |lightgray|lightgreen|lightgrey|lightpink|lightsalmon|lightseagreen|lightskyblue|lightslategray + |lightslategrey|lightsteelblue|lightyellow|limegreen|linen|magenta|mediumaquamarine|mediumblue + |mediumorchid|mediumpurple|mediumseagreen|mediumslateblue|mediumspringgreen|mediumturquoise + |mediumvioletred|midnightblue|mintcream|mistyrose|moccasin|navajowhite|oldlace|olivedrab|orangered + |orchid|palegoldenrod|palegreen|paleturquoise|palevioletred|papayawhip|peachpuff|peru|pink|plum + |powderblue|rebeccapurple|rosybrown|royalblue|saddlebrown|salmon|sandybrown|seagreen|seashell + |sienna|skyblue|slateblue|slategray|slategrey|snow|springgreen|steelblue|tan|thistle|tomato + |transparent|turquoise|violet|wheat|whitesmoke|yellowgreen)$', scopes: 'support.constant.color.w3c-extended'} + 'support.constant.property-value.css' + ] + + 'feature_name': 'support.type.property-name' + + 'color_value > "#"': 'punctuation.definition.constant.css' + 'id_selector > "#"': 'punctuation.definition.entity.css' + 'selectors > ","': 'punctuation.separator.list.comma.css' + + '"and", "or", "not", "only"': 'keyword.operator' + 'keyword_query': 'keyword.operator' + 'binary_expression > "+"': 'keyword.operator' + 'binary_expression > "-"': 'keyword.operator' + 'binary_expression > "/"': 'keyword.operator' + 'binary_expression > "*"': 'keyword.operator' diff --git a/packages/language-css/package-lock.json b/packages/language-css/package-lock.json new file mode 100644 index 000000000..abb1afd12 --- /dev/null +++ b/packages/language-css/package-lock.json @@ -0,0 +1,169 @@ +{ + "name": "language-css", + "version": "0.45.4", + "lockfileVersion": 1, + "requires": true, + "dependencies": { + "balanced-match": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.0.tgz", + "integrity": "sha1-ibTRmasr7kneFk6gK4nORi1xt2c=", + "dev": true + }, + "brace-expansion": { + "version": "1.1.11", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz", + "integrity": "sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==", + "dev": true, + "requires": { + "balanced-match": "^1.0.0", + "concat-map": "0.0.1" + } + }, + "coffee-script": { + "version": "1.11.1", + "resolved": "https://registry.npmjs.org/coffee-script/-/coffee-script-1.11.1.tgz", + "integrity": "sha1-vxxHrWREOg2V0S3ysUfMCk2q1uk=", + "dev": true + }, + "coffeelint": { + "version": "1.16.2", + "resolved": "https://registry.npmjs.org/coffeelint/-/coffeelint-1.16.2.tgz", + "integrity": "sha512-6mzgOo4zb17WfdrSui/cSUEgQ0AQkW3gXDht+6lHkfkqGUtSYKwGdGcXsDfAyuScVzTlTtKdfwkAlJWfqul7zg==", + "dev": true, + "requires": { + "coffee-script": "~1.11.0", + "glob": "^7.0.6", + "ignore": "^3.0.9", + "optimist": "^0.6.1", + "resolve": "^0.6.3", + "strip-json-comments": "^1.0.2" + } + }, + "concat-map": { + "version": "0.0.1", + "resolved": "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz", + "integrity": "sha1-2Klr13/Wjfd5OnMDajug1UBdR3s=", + "dev": true + }, + "fs.realpath": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/fs.realpath/-/fs.realpath-1.0.0.tgz", + "integrity": "sha1-FQStJSMVjKpA20onh8sBQRmU6k8=", + "dev": true + }, + "glob": { + "version": "7.1.6", + "resolved": "https://registry.npmjs.org/glob/-/glob-7.1.6.tgz", + "integrity": "sha512-LwaxwyZ72Lk7vZINtNNrywX0ZuLyStrdDtabefZKAY5ZGJhVtgdznluResxNmPitE0SAO+O26sWTHeKSI2wMBA==", + "dev": true, + "requires": { + "fs.realpath": "^1.0.0", + "inflight": "^1.0.4", + "inherits": "2", + "minimatch": "^3.0.4", + "once": "^1.3.0", + "path-is-absolute": "^1.0.0" + } + }, + "ignore": { + "version": "3.3.10", + "resolved": "https://registry.npmjs.org/ignore/-/ignore-3.3.10.tgz", + "integrity": "sha512-Pgs951kaMm5GXP7MOvxERINe3gsaVjUWFm+UZPSq9xYriQAksyhg0csnS0KXSNRD5NmNdapXEpjxG49+AKh/ug==", + "dev": true + }, + "inflight": { + "version": "1.0.6", + "resolved": "https://registry.npmjs.org/inflight/-/inflight-1.0.6.tgz", + "integrity": "sha1-Sb1jMdfQLQwJvJEKEHW6gWW1bfk=", + "dev": true, + "requires": { + "once": "^1.3.0", + "wrappy": "1" + } + }, + "inherits": { + "version": "2.0.4", + "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.4.tgz", + "integrity": "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==", + "dev": true + }, + "minimatch": { + "version": "3.0.4", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.0.4.tgz", + "integrity": "sha512-yJHVQEhyqPLUTgt9B83PXu6W3rx4MvvHvSUvToogpwoGDOUQ+yDrR0HRot+yOCdCO7u4hX3pWft6kWBBcqh0UA==", + "dev": true, + "requires": { + "brace-expansion": "^1.1.7" + } + }, + "minimist": { + "version": "0.0.10", + "resolved": "https://registry.npmjs.org/minimist/-/minimist-0.0.10.tgz", + "integrity": "sha1-3j+YVD2/lggr5IrRoMfNqDYwHc8=", + "dev": true + }, + "nan": { + "version": "2.14.2", + "resolved": "https://registry.npmjs.org/nan/-/nan-2.14.2.tgz", + "integrity": "sha512-M2ufzIiINKCuDfBSAUr1vWQ+vuVcA9kqx8JJUsbQi6yf1uGRyb7HfpdfUr5qLXf3B/t8dPvcjhKMmlfnP47EzQ==" + }, + "once": { + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/once/-/once-1.4.0.tgz", + "integrity": "sha1-WDsap3WWHUsROsF9nFC6753Xa9E=", + "dev": true, + "requires": { + "wrappy": "1" + } + }, + "optimist": { + "version": "0.6.1", + "resolved": "https://registry.npmjs.org/optimist/-/optimist-0.6.1.tgz", + "integrity": "sha1-2j6nRob6IaGaERwybpDrFaAZZoY=", + "dev": true, + "requires": { + "minimist": "~0.0.1", + "wordwrap": "~0.0.2" + } + }, + "path-is-absolute": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/path-is-absolute/-/path-is-absolute-1.0.1.tgz", + "integrity": "sha1-F0uSaHNVNP+8es5r9TpanhtcX18=", + "dev": true + }, + "resolve": { + "version": "0.6.3", + "resolved": "https://registry.npmjs.org/resolve/-/resolve-0.6.3.tgz", + "integrity": "sha1-3ZV5gufnNt699TtYpN2RdUV13UY=", + "dev": true + }, + "strip-json-comments": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/strip-json-comments/-/strip-json-comments-1.0.4.tgz", + "integrity": "sha1-HhX7ysl9Pumb8tc7TGVrCCu6+5E=", + "dev": true + }, + "tree-sitter-css": { + "version": "0.19.0", + "resolved": "https://registry.npmjs.org/tree-sitter-css/-/tree-sitter-css-0.19.0.tgz", + "integrity": "sha512-LYCHS1V2bzeNJr8Mgh60H06qB8NNJyRJVgW1gKCEjcm5S48d8H9xOnrzIlsyLHaXFfnGWCrHJ6jxN6G3s5fJTA==", + "requires": { + "nan": "^2.14.1" + } + }, + "wordwrap": { + "version": "0.0.3", + "resolved": "https://registry.npmjs.org/wordwrap/-/wordwrap-0.0.3.tgz", + "integrity": "sha1-o9XabNXAvAAI03I0u68b7WMFkQc=", + "dev": true + }, + "wrappy": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz", + "integrity": "sha1-tSQ9jz7BqjXxNkYFvA0QNuMKtp8=", + "dev": true + } + } +} diff --git a/packages/language-css/package.json b/packages/language-css/package.json new file mode 100644 index 000000000..365f74382 --- /dev/null +++ b/packages/language-css/package.json @@ -0,0 +1,27 @@ +{ + "name": "language-css", + "description": "CSS support in Atom", + "keywords": [ + "tree-sitter" + ], + "version": "0.45.4", + "engines": { + "atom": "*", + "node": "*" + }, + "homepage": "http://atom.github.io/language-css", + "repository": { + "type": "git", + "url": "https://github.com/atom/language-css.git" + }, + "license": "MIT", + "bugs": { + "url": "https://github.com/atom/language-css/issues" + }, + "devDependencies": { + "coffeelint": "^1.10.1" + }, + "dependencies": { + "tree-sitter-css": "^0.19.0" + } +} diff --git a/packages/language-css/settings/language-css.cson b/packages/language-css/settings/language-css.cson new file mode 100644 index 000000000..a4beb427a --- /dev/null +++ b/packages/language-css/settings/language-css.cson @@ -0,0 +1,43 @@ +'.source.css': + 'core': + 'useTreeSitterParsers': false + 'editor': + 'commentStart': '/*' + 'commentEnd': '*/' + 'foldEndPattern': '(?<!\\*)\\*\\*/|^\\s*\\}|\\/*\\s*@end\\s*\\*\\/' + 'autocomplete': + 'extraWordCharacters': '-' + 'symbols': + 'selector': + 'selector': '.css.selector' + 'typePriority': 1 + +'.source.css .meta.property-value.css': + 'autocomplete': + 'symbols': + 'builtins': + 'suggestions': [{ + 'type': 'function' + 'snippet': "rgb(${1:255}, ${2:0}, ${3:0})${4:;}$0" + 'rightLabel': 'CSS builtin' + 'description': "Creates a Color from red (0-255), green (0-255), and blue (0-255)." + 'descriptionMoreURL': 'https://developer.mozilla.org/en-US/docs/Web/CSS/color_value#rgb()' + }, { + 'type': 'function' + 'snippet': "rgba(${1:255}, ${2:0}, ${3:0}, ${4:0.5})${5:;}$0" + 'rightLabel': 'CSS builtin' + 'description': "Creates a Color from red (0-255), green (0-255), blue (0-255), and alpha (0-1)." + 'descriptionMoreURL': 'https://developer.mozilla.org/en-US/docs/Web/CSS/color_value#rgba()' + }, { + 'type': 'function' + 'snippet': "hsl(${1:360}, ${2:100%}, ${3:100%})${4:;}$0" + 'rightLabel': 'CSS builtin' + 'description': "Creates a Color from hue (0-360), saturation (0-100%), and lightness (0-100%)." + 'descriptionMoreURL': 'https://developer.mozilla.org/en-US/docs/Web/CSS/color_value#hsl()' + },{ + 'type': 'function' + 'snippet': "hsla(${1:360}, ${2:100%}, ${3:100%}, ${4:0.5})${5:;}$0" + 'rightLabel': 'CSS builtin' + 'description': "Creates a Color from hue (0-360), saturation (0-100%), lightness (0-100%), and alpha (0-1)." + 'descriptionMoreURL': 'https://developer.mozilla.org/en-US/docs/Web/CSS/color_value#hsla()' + }] diff --git a/packages/language-css/snippets/language-css.cson b/packages/language-css/snippets/language-css.cson new file mode 100644 index 000000000..4b7b08126 --- /dev/null +++ b/packages/language-css/snippets/language-css.cson @@ -0,0 +1,51 @@ +'.source.css': + '!important': + 'prefix': '!' + 'body': '!important${1:;}$0' + '@charset': + 'prefix': 'charset' + 'body': '@charset "${1:UTF-8}";$0' + 'description': 'Specifies the character encoding used in the style sheet.' + 'descriptionMoreURL': 'https://developer.mozilla.org/en-US/docs/Web/CSS/@charset' + '@font-face': + 'prefix': 'fontface' + 'body': '@font-face {\n\t$1\n}$0' + 'description': 'Specify online fonts to display text on their web pages.' + 'descriptionMoreURL': 'https://developer.mozilla.org/en-US/docs/Web/CSS/@font-face' + '@import': + 'prefix': 'import' + 'body': '@import "$0";' + 'description': 'Import style rules from other style sheets.' + 'descriptionMoreURL': 'https://developer.mozilla.org/en-US/docs/Web/CSS/@import' + '@keyframes': + 'prefix': 'keyframes' + 'body': '@keyframes ${1:identifier} {\n\t$2\n}$0' + 'description': 'Specifies a CSS animation.' + 'descriptionMoreURL': 'https://developer.mozilla.org/en/docs/Web/CSS/@keyframes' + '@media': + 'prefix': 'media' + 'body': '@media ${1:query} {\n\t$2\n}$0' + 'description': 'A set of nested statements with a condition defined by a media query.' + 'descriptionMoreURL': 'https://developer.mozilla.org/en-US/docs/Web/CSS/@media' + '@supports': + 'prefix': 'supports' + 'body': '@supports ${1:condition} {\n\t$2\n}$0' + 'description': 'A set of nested statements with a condition defined by a supports condition.' + 'descriptionMoreURL': 'https://developer.mozilla.org/en-US/docs/Web/CSS/@supports' + +# We dont want the snippets to show in properties +'.source.css .meta.property-list': + '!important': + 'prefix': '!' + '@charset': + 'prefix': 'charset' + '@font-face': + 'prefix': 'fontface' + '@import': + 'prefix': 'import' + '@keyframes': + 'prefix': 'keyframes' + '@media': + 'prefix': 'media' + '@supports': + 'prefix': 'supports' diff --git a/packages/language-css/spec/css-spec.coffee b/packages/language-css/spec/css-spec.coffee new file mode 100644 index 000000000..e68cd02dd --- /dev/null +++ b/packages/language-css/spec/css-spec.coffee @@ -0,0 +1,3638 @@ +describe 'CSS grammar', -> + grammar = null + + beforeEach -> + atom.config.set('core.useTreeSitterParsers', false) + + waitsForPromise -> + atom.packages.activatePackage('language-css') + + runs -> + grammar = atom.grammars.grammarForScopeName('source.css') + + it 'parses the grammar', -> + expect(grammar).toBeTruthy() + expect(grammar.scopeName).toBe 'source.css' + + describe 'selectors', -> + it 'tokenizes type selectors', -> + {tokens} = grammar.tokenizeLine 'p {}' + expect(tokens[0]).toEqual value: 'p', scopes: ['source.css', 'meta.selector.css', 'entity.name.tag.css'] + + it 'tokenizes the universal selector', -> + {tokens} = grammar.tokenizeLine '*' + expect(tokens[0]).toEqual value: '*', scopes: ['source.css', 'meta.selector.css', 'entity.name.tag.wildcard.css'] + + it 'tokenises combinators', -> + {tokens} = grammar.tokenizeLine 'a > b + * ~ :not(.nah)' + expect(tokens[2]).toEqual value: '>', scopes: ['source.css', 'meta.selector.css', 'keyword.operator.combinator.css'] + expect(tokens[6]).toEqual value: '+', scopes: ['source.css', 'meta.selector.css', 'keyword.operator.combinator.css'] + expect(tokens[10]).toEqual value: '~', scopes: ['source.css', 'meta.selector.css', 'keyword.operator.combinator.css'] + + it 'highlights deprecated combinators', -> + {tokens} = grammar.tokenizeLine '.sooo /deep/ >>>_.>>>' + expect(tokens[3]).toEqual value: '/deep/', scopes: ['source.css', 'invalid.deprecated.combinator.css'] + expect(tokens[5]).toEqual value: '>>>', scopes: ['source.css', 'invalid.deprecated.combinator.css'] + + it 'tokenizes complex selectors', -> + {tokens} = grammar.tokenizeLine '[disabled], [disabled] + p' + expect(tokens[0]).toEqual value: '[', scopes: ["source.css", "meta.selector.css", "meta.attribute-selector.css", "punctuation.definition.entity.begin.bracket.square.css"] + expect(tokens[1]).toEqual value: 'disabled', scopes: ["source.css", "meta.selector.css", "meta.attribute-selector.css", "entity.other.attribute-name.css"] + expect(tokens[2]).toEqual value: ']', scopes: ["source.css", "meta.selector.css", "meta.attribute-selector.css", "punctuation.definition.entity.end.bracket.square.css"] + expect(tokens[3]).toEqual value: ',', scopes: ["source.css", "meta.selector.css", "punctuation.separator.list.comma.css"] + expect(tokens[5]).toEqual value: '[', scopes: ["source.css", "meta.selector.css", "meta.attribute-selector.css", "punctuation.definition.entity.begin.bracket.square.css"] + expect(tokens[6]).toEqual value: 'disabled', scopes: ["source.css", "meta.selector.css", "meta.attribute-selector.css", "entity.other.attribute-name.css"] + expect(tokens[7]).toEqual value: ']', scopes: ["source.css", "meta.selector.css", "meta.attribute-selector.css", "punctuation.definition.entity.end.bracket.square.css"] + expect(tokens[9]).toEqual value: '+', scopes: ["source.css", "meta.selector.css", "keyword.operator.combinator.css"] + expect(tokens[11]).toEqual value: 'p', scopes: ["source.css", "meta.selector.css", "entity.name.tag.css"] + + lines = grammar.tokenizeLines """ + [disabled]:not(:first-child)::before:hover + ~ div.object + + #id.thing:hover > strong ~ p::before, + a::last-of-type,/*Comment*/::selection > html[lang^=en-AU], + *>em.i.ly[data-name|="Life"] { } + """ + expect(lines[0][0]).toEqual value: '[', scopes: ['source.css', 'meta.selector.css', 'meta.attribute-selector.css', 'punctuation.definition.entity.begin.bracket.square.css'] + expect(lines[0][1]).toEqual value: 'disabled', scopes: ['source.css', 'meta.selector.css', 'meta.attribute-selector.css', 'entity.other.attribute-name.css'] + expect(lines[0][2]).toEqual value: ']', scopes: ['source.css', 'meta.selector.css', 'meta.attribute-selector.css', 'punctuation.definition.entity.end.bracket.square.css'] + expect(lines[0][3]).toEqual value: ':', scopes: ['source.css', 'meta.selector.css', 'entity.other.attribute-name.pseudo-class.css', 'punctuation.definition.entity.css'] + expect(lines[0][4]).toEqual value: 'not', scopes: ['source.css', 'meta.selector.css', 'entity.other.attribute-name.pseudo-class.css'] + expect(lines[0][5]).toEqual value: '(', scopes: ['source.css', 'meta.selector.css', 'punctuation.section.function.begin.bracket.round.css'] + expect(lines[0][6]).toEqual value: ':', scopes: ['source.css', 'meta.selector.css', 'entity.other.attribute-name.pseudo-class.css', 'punctuation.definition.entity.css'] + expect(lines[0][7]).toEqual value: 'first-child', scopes: ['source.css', 'meta.selector.css', 'entity.other.attribute-name.pseudo-class.css'] + expect(lines[0][8]).toEqual value: ')', scopes: ['source.css', 'meta.selector.css', 'punctuation.section.function.end.bracket.round.css'] + expect(lines[0][9]).toEqual value: '::', scopes: ['source.css', 'meta.selector.css', 'entity.other.attribute-name.pseudo-element.css', 'punctuation.definition.entity.css'] + expect(lines[0][10]).toEqual value: 'before', scopes: ['source.css', 'meta.selector.css', 'entity.other.attribute-name.pseudo-element.css'] + expect(lines[0][11]).toEqual value: ':', scopes: ['source.css', 'meta.selector.css', 'entity.other.attribute-name.pseudo-class.css', 'punctuation.definition.entity.css'] + expect(lines[0][12]).toEqual value: 'hover', scopes: ['source.css', 'meta.selector.css', 'entity.other.attribute-name.pseudo-class.css'] + expect(lines[1][1]).toEqual value: '~', scopes: ['source.css', 'meta.selector.css', 'keyword.operator.combinator.css'] + expect(lines[1][3]).toEqual value: 'div', scopes: ['source.css', 'meta.selector.css', 'entity.name.tag.css'] + expect(lines[1][4]).toEqual value: '.', scopes: ['source.css', 'meta.selector.css', 'entity.other.attribute-name.class.css', 'punctuation.definition.entity.css'] + expect(lines[1][5]).toEqual value: 'object', scopes: ['source.css', 'meta.selector.css', 'entity.other.attribute-name.class.css'] + expect(lines[2][1]).toEqual value: '+', scopes: ['source.css', 'meta.selector.css', 'keyword.operator.combinator.css'] + expect(lines[2][3]).toEqual value: '#', scopes: ['source.css', 'meta.selector.css', 'entity.other.attribute-name.id.css', 'punctuation.definition.entity.css'] + expect(lines[2][4]).toEqual value: 'id', scopes: ['source.css', 'meta.selector.css', 'entity.other.attribute-name.id.css'] + expect(lines[2][5]).toEqual value: '.', scopes: ['source.css', 'meta.selector.css', 'entity.other.attribute-name.class.css', 'punctuation.definition.entity.css'] + expect(lines[2][6]).toEqual value: 'thing', scopes: ['source.css', 'meta.selector.css', 'entity.other.attribute-name.class.css'] + expect(lines[2][7]).toEqual value: ':', scopes: ['source.css', 'meta.selector.css', 'entity.other.attribute-name.pseudo-class.css', 'punctuation.definition.entity.css'] + expect(lines[2][8]).toEqual value: 'hover', scopes: ['source.css', 'meta.selector.css', 'entity.other.attribute-name.pseudo-class.css'] + expect(lines[2][10]).toEqual value: '>', scopes: ['source.css', 'meta.selector.css', 'keyword.operator.combinator.css'] + expect(lines[2][12]).toEqual value: 'strong', scopes: ['source.css', 'meta.selector.css', 'entity.name.tag.css'] + expect(lines[2][14]).toEqual value: '~', scopes: ['source.css', 'meta.selector.css', 'keyword.operator.combinator.css'] + expect(lines[2][16]).toEqual value: 'p', scopes: ['source.css', 'meta.selector.css', 'entity.name.tag.css'] + expect(lines[2][17]).toEqual value: '::', scopes: ['source.css', 'meta.selector.css', 'entity.other.attribute-name.pseudo-element.css', 'punctuation.definition.entity.css'] + expect(lines[2][18]).toEqual value: 'before', scopes: ['source.css', 'meta.selector.css', 'entity.other.attribute-name.pseudo-element.css'] + expect(lines[2][19]).toEqual value: ',', scopes: ['source.css', 'meta.selector.css', 'punctuation.separator.list.comma.css'] + expect(lines[3][0]).toEqual value: 'a', scopes: ['source.css', 'meta.selector.css', 'entity.name.tag.css'] + expect(lines[3][1]).toEqual value: ':', scopes: ['source.css', 'meta.selector.css', 'entity.other.attribute-name.pseudo-class.css', 'punctuation.definition.entity.css'] + expect(lines[3][2]).toEqual value: ':', scopes: ['source.css', 'meta.selector.css', 'entity.other.attribute-name.pseudo-class.css', 'invalid.illegal.colon.css'] + expect(lines[3][3]).toEqual value: 'last-of-type', scopes: ['source.css', 'meta.selector.css', 'entity.other.attribute-name.pseudo-class.css'] + expect(lines[3][4]).toEqual value: ',', scopes: ['source.css', 'meta.selector.css', 'punctuation.separator.list.comma.css'] + expect(lines[3][5]).toEqual value: '/*', scopes: ['source.css', 'comment.block.css', 'punctuation.definition.comment.begin.css'] + expect(lines[3][6]).toEqual value: 'Comment', scopes: ['source.css', 'comment.block.css'] + expect(lines[3][7]).toEqual value: '*/', scopes: ['source.css', 'comment.block.css', 'punctuation.definition.comment.end.css'] + expect(lines[3][8]).toEqual value: '::', scopes: ['source.css', 'meta.selector.css', 'entity.other.attribute-name.pseudo-element.css', 'punctuation.definition.entity.css'] + expect(lines[3][9]).toEqual value: 'selection', scopes: ['source.css', 'meta.selector.css', 'entity.other.attribute-name.pseudo-element.css'] + expect(lines[3][11]).toEqual value: '>', scopes: ['source.css', 'meta.selector.css', 'keyword.operator.combinator.css'] + expect(lines[3][13]).toEqual value: 'html', scopes: ['source.css', 'meta.selector.css', 'entity.name.tag.css'] + expect(lines[3][14]).toEqual value: '[', scopes: ['source.css', 'meta.selector.css', 'meta.attribute-selector.css', 'punctuation.definition.entity.begin.bracket.square.css'] + expect(lines[3][15]).toEqual value: 'lang', scopes: ['source.css', 'meta.selector.css', 'meta.attribute-selector.css', 'entity.other.attribute-name.css'] + expect(lines[3][16]).toEqual value: '^=', scopes: ['source.css', 'meta.selector.css', 'meta.attribute-selector.css', 'keyword.operator.pattern.css'] + expect(lines[3][17]).toEqual value: 'en-AU', scopes: ['source.css', 'meta.selector.css', 'meta.attribute-selector.css', 'string.unquoted.attribute-value.css'] + expect(lines[3][18]).toEqual value: ']', scopes: ['source.css', 'meta.selector.css', 'meta.attribute-selector.css', 'punctuation.definition.entity.end.bracket.square.css'] + expect(lines[3][19]).toEqual value: ',', scopes: ['source.css', 'meta.selector.css', 'punctuation.separator.list.comma.css'] + expect(lines[4][0]).toEqual value: '*', scopes: ['source.css', 'meta.selector.css', 'entity.name.tag.wildcard.css'] + expect(lines[4][1]).toEqual value: '>', scopes: ['source.css', 'meta.selector.css', 'keyword.operator.combinator.css'] + expect(lines[4][2]).toEqual value: 'em', scopes: ['source.css', 'meta.selector.css', 'entity.name.tag.css'] + expect(lines[4][3]).toEqual value: '.', scopes: ['source.css', 'meta.selector.css', 'entity.other.attribute-name.class.css', 'punctuation.definition.entity.css'] + expect(lines[4][4]).toEqual value: 'i', scopes: ['source.css', 'meta.selector.css', 'entity.other.attribute-name.class.css'] + expect(lines[4][5]).toEqual value: '.', scopes: ['source.css', 'meta.selector.css', 'entity.other.attribute-name.class.css', 'punctuation.definition.entity.css'] + expect(lines[4][6]).toEqual value: 'ly', scopes: ['source.css', 'meta.selector.css', 'entity.other.attribute-name.class.css'] + expect(lines[4][7]).toEqual value: '[', scopes: ['source.css', 'meta.selector.css', 'meta.attribute-selector.css', 'punctuation.definition.entity.begin.bracket.square.css'] + expect(lines[4][8]).toEqual value: 'data-name', scopes: ['source.css', 'meta.selector.css', 'meta.attribute-selector.css', 'entity.other.attribute-name.css'] + expect(lines[4][9]).toEqual value: '|=', scopes: ['source.css', 'meta.selector.css', 'meta.attribute-selector.css', 'keyword.operator.pattern.css'] + expect(lines[4][10]).toEqual value: '"', scopes: ['source.css', 'meta.selector.css', 'meta.attribute-selector.css', 'string.quoted.double.css', 'punctuation.definition.string.begin.css'] + expect(lines[4][11]).toEqual value: 'Life', scopes: ['source.css', 'meta.selector.css', 'meta.attribute-selector.css', 'string.quoted.double.css'] + expect(lines[4][12]).toEqual value: '"', scopes: ['source.css', 'meta.selector.css', 'meta.attribute-selector.css', 'string.quoted.double.css', 'punctuation.definition.string.end.css'] + expect(lines[4][13]).toEqual value: ']', scopes: ['source.css', 'meta.selector.css', 'meta.attribute-selector.css', 'punctuation.definition.entity.end.bracket.square.css'] + expect(lines[4][15]).toEqual value: '{', scopes: ['source.css', 'meta.property-list.css', 'punctuation.section.property-list.begin.bracket.curly.css'] + expect(lines[4][17]).toEqual value: '}', scopes: ['source.css', 'meta.property-list.css', 'punctuation.section.property-list.end.bracket.curly.css'] + + describe 'custom elements (as type selectors)', -> + it 'only tokenizes identifiers beginning with [a-z]', -> + {tokens} = grammar.tokenizeLine 'pearl-1941 1941-pearl -pearl-1941' + expect(tokens[0]).toEqual value: 'pearl-1941', scopes: ['source.css', 'meta.selector.css', 'entity.name.tag.custom.css'] + expect(tokens[1]).toEqual value: ' 1941-pearl -pearl-1941', scopes: ['source.css', 'meta.selector.css'] + + it 'tokenizes custom elements containing non-ASCII letters', -> + {tokens} = grammar.tokenizeLine 'pokémon-ピカチュウ' + expect(tokens[0]).toEqual value: 'pokémon-ピカチュウ', scopes: ['source.css', 'meta.selector.css', 'entity.name.tag.custom.css'] + + it 'does not tokenize identifiers containing [A-Z]', -> + {tokens} = grammar.tokenizeLine 'Basecamp-schedule basecamp-Schedule' + expect(tokens[0]).toEqual value: 'Basecamp-schedule basecamp-Schedule', scopes: ['source.css', 'meta.selector.css'] + + it 'does not tokenize identifiers containing no hyphens', -> + {tokens} = grammar.tokenizeLine 'halo_night' + expect(tokens[0]).toEqual value: 'halo_night', scopes: ['source.css', 'meta.selector.css'] + + it 'does not tokenise identifiers following an @ symbol', -> + {tokens} = grammar.tokenizeLine('@some-weird-new-feature') + expect(tokens[0]).toEqual value: '@', scopes: ['source.css', 'meta.at-rule.header.css', 'keyword.control.at-rule.css', 'punctuation.definition.keyword.css'] + expect(tokens[1]).toEqual value: 'some-weird-new-feature', scopes: ['source.css', 'meta.at-rule.header.css', 'keyword.control.at-rule.css'] + + it 'does not tokenise identifiers in unfamiliar functions', -> + {tokens} = grammar.tokenizeLine('some-edgy-new-function()') + expect(tokens[0]).toEqual value: 'some-edgy-new-function(', scopes: ['source.css', 'meta.selector.css'] + expect(tokens[1]).toEqual value: ')', scopes: ['source.css'] + + describe 'attribute selectors', -> + it 'tokenizes attribute selectors without values', -> + {tokens} = grammar.tokenizeLine '[title]' + expect(tokens[0]).toEqual value: '[', scopes: ['source.css', 'meta.selector.css', 'meta.attribute-selector.css', 'punctuation.definition.entity.begin.bracket.square.css'] + expect(tokens[1]).toEqual value: 'title', scopes: ['source.css', 'meta.selector.css', 'meta.attribute-selector.css', 'entity.other.attribute-name.css'] + expect(tokens[2]).toEqual value: ']', scopes: ['source.css', 'meta.selector.css', 'meta.attribute-selector.css', 'punctuation.definition.entity.end.bracket.square.css'] + + it 'tokenizes attribute selectors with identifier values', -> + {tokens} = grammar.tokenizeLine '[hreflang|=fr]' + expect(tokens[0]).toEqual value: '[', scopes: ['source.css', 'meta.selector.css', 'meta.attribute-selector.css', 'punctuation.definition.entity.begin.bracket.square.css'] + expect(tokens[1]).toEqual value: 'hreflang', scopes: ['source.css', 'meta.selector.css', 'meta.attribute-selector.css', 'entity.other.attribute-name.css'] + expect(tokens[2]).toEqual value: '|=', scopes: ['source.css', 'meta.selector.css', 'meta.attribute-selector.css', 'keyword.operator.pattern.css'] + expect(tokens[3]).toEqual value: 'fr', scopes: ['source.css', 'meta.selector.css', 'meta.attribute-selector.css', 'string.unquoted.attribute-value.css'] + expect(tokens[4]).toEqual value: ']', scopes: ['source.css', 'meta.selector.css', 'meta.attribute-selector.css', 'punctuation.definition.entity.end.bracket.square.css'] + + it 'tokenizes attribute selectors with string values', -> + {tokens} = grammar.tokenizeLine '[href^="http://www.w3.org/"]' + expect(tokens[0]).toEqual value: '[', scopes: ['source.css', 'meta.selector.css', 'meta.attribute-selector.css', 'punctuation.definition.entity.begin.bracket.square.css'] + expect(tokens[1]).toEqual value: 'href', scopes: ['source.css', 'meta.selector.css', 'meta.attribute-selector.css', 'entity.other.attribute-name.css'] + expect(tokens[2]).toEqual value: '^=', scopes: ['source.css', 'meta.selector.css', 'meta.attribute-selector.css', 'keyword.operator.pattern.css'] + expect(tokens[3]).toEqual value: '"', scopes: ['source.css', 'meta.selector.css', 'meta.attribute-selector.css', 'string.quoted.double.css', 'punctuation.definition.string.begin.css'] + expect(tokens[4]).toEqual value: 'http://www.w3.org/', scopes: ['source.css', 'meta.selector.css', 'meta.attribute-selector.css', 'string.quoted.double.css'] + expect(tokens[5]).toEqual value: '"', scopes: ['source.css', 'meta.selector.css', 'meta.attribute-selector.css', 'string.quoted.double.css', 'punctuation.definition.string.end.css'] + expect(tokens[6]).toEqual value: ']', scopes: ['source.css', 'meta.selector.css', 'meta.attribute-selector.css', 'punctuation.definition.entity.end.bracket.square.css'] + + it 'tokenizes CSS qualified attribute names with wildcard prefix', -> + {tokens} = grammar.tokenizeLine '[*|title]' + expect(tokens[0]).toEqual value: '[', scopes: ['source.css', 'meta.selector.css', 'meta.attribute-selector.css', 'punctuation.definition.entity.begin.bracket.square.css'] + expect(tokens[1]).toEqual value: '*', scopes: ['source.css', 'meta.selector.css', 'meta.attribute-selector.css', 'entity.other.namespace-prefix.css'] + expect(tokens[2]).toEqual value: '|', scopes: ['source.css', 'meta.selector.css', 'meta.attribute-selector.css', 'punctuation.separator.css'] + expect(tokens[3]).toEqual value: 'title', scopes: ['source.css', 'meta.selector.css', 'meta.attribute-selector.css', 'entity.other.attribute-name.css'] + expect(tokens[4]).toEqual value: ']', scopes: ['source.css', 'meta.selector.css', 'meta.attribute-selector.css', 'punctuation.definition.entity.end.bracket.square.css'] + + it 'tokenizes CSS qualified attribute names with namespace prefix', -> + {tokens} = grammar.tokenizeLine '[marvel|origin=radiation]' + expect(tokens[0]).toEqual value: '[', scopes: ['source.css', 'meta.selector.css', 'meta.attribute-selector.css', 'punctuation.definition.entity.begin.bracket.square.css'] + expect(tokens[1]).toEqual value: 'marvel', scopes: ['source.css', 'meta.selector.css', 'meta.attribute-selector.css', 'entity.other.namespace-prefix.css'] + expect(tokens[2]).toEqual value: '|', scopes: ['source.css', 'meta.selector.css', 'meta.attribute-selector.css', 'punctuation.separator.css'] + expect(tokens[3]).toEqual value: 'origin', scopes: ['source.css', 'meta.selector.css', 'meta.attribute-selector.css', 'entity.other.attribute-name.css'] + expect(tokens[4]).toEqual value: '=', scopes: ['source.css', 'meta.selector.css', 'meta.attribute-selector.css', 'keyword.operator.pattern.css'] + expect(tokens[5]).toEqual value: 'radiation', scopes: ['source.css', 'meta.selector.css', 'meta.attribute-selector.css', 'string.unquoted.attribute-value.css'] + expect(tokens[6]).toEqual value: ']', scopes: ['source.css', 'meta.selector.css', 'meta.attribute-selector.css', 'punctuation.definition.entity.end.bracket.square.css'] + + it 'tokenizes CSS qualified attribute names without namespace prefix', -> + {tokens} = grammar.tokenizeLine '[|data-hp="75"]' + expect(tokens[0]).toEqual value: '[', scopes: ['source.css', 'meta.selector.css', 'meta.attribute-selector.css', 'punctuation.definition.entity.begin.bracket.square.css'] + expect(tokens[1]).toEqual value: '|', scopes: ['source.css', 'meta.selector.css', 'meta.attribute-selector.css', 'punctuation.separator.css'] + expect(tokens[2]).toEqual value: 'data-hp', scopes: ['source.css', 'meta.selector.css', 'meta.attribute-selector.css', 'entity.other.attribute-name.css'] + expect(tokens[3]).toEqual value: '=', scopes: ['source.css', 'meta.selector.css', 'meta.attribute-selector.css', 'keyword.operator.pattern.css'] + expect(tokens[4]).toEqual value: '"', scopes: ['source.css', 'meta.selector.css', 'meta.attribute-selector.css', 'string.quoted.double.css', 'punctuation.definition.string.begin.css'] + expect(tokens[5]).toEqual value: '75', scopes: ['source.css', 'meta.selector.css', 'meta.attribute-selector.css', 'string.quoted.double.css'] + expect(tokens[6]).toEqual value: '"', scopes: ['source.css', 'meta.selector.css', 'meta.attribute-selector.css', 'string.quoted.double.css', 'punctuation.definition.string.end.css'] + expect(tokens[7]).toEqual value: ']', scopes: ['source.css', 'meta.selector.css', 'meta.attribute-selector.css', 'punctuation.definition.entity.end.bracket.square.css'] + + it 'tokenises compound ID/attribute selectors', -> + {tokens} = grammar.tokenizeLine('#div[id="0"]{ }') + expect(tokens[0]).toEqual value: '#', scopes: ['source.css', 'meta.selector.css', 'entity.other.attribute-name.id.css', 'punctuation.definition.entity.css'] + expect(tokens[1]).toEqual value: 'div', scopes: ['source.css', 'meta.selector.css', 'entity.other.attribute-name.id.css'] + expect(tokens[2]).toEqual value: '[', scopes: ['source.css', 'meta.selector.css', 'meta.attribute-selector.css', 'punctuation.definition.entity.begin.bracket.square.css'] + expect(tokens[3]).toEqual value: 'id', scopes: ['source.css', 'meta.selector.css', 'meta.attribute-selector.css', 'entity.other.attribute-name.css'] + expect(tokens[8]).toEqual value: ']', scopes: ['source.css', 'meta.selector.css', 'meta.attribute-selector.css', 'punctuation.definition.entity.end.bracket.square.css'] + + {tokens} = grammar.tokenizeLine('.bar#div[id="0"]') + expect(tokens[0]).toEqual value: '.', scopes: ['source.css', 'meta.selector.css', 'entity.other.attribute-name.class.css', 'punctuation.definition.entity.css'] + expect(tokens[1]).toEqual value: 'bar', scopes: ['source.css', 'meta.selector.css', 'entity.other.attribute-name.class.css'] + expect(tokens[2]).toEqual value: '#', scopes: ['source.css', 'meta.selector.css', 'entity.other.attribute-name.id.css', 'punctuation.definition.entity.css'] + expect(tokens[3]).toEqual value: 'div', scopes: ['source.css', 'meta.selector.css', 'entity.other.attribute-name.id.css'] + expect(tokens[4]).toEqual value: '[', scopes: ['source.css', 'meta.selector.css', 'meta.attribute-selector.css', 'punctuation.definition.entity.begin.bracket.square.css'] + expect(tokens[5]).toEqual value: 'id', scopes: ['source.css', 'meta.selector.css', 'meta.attribute-selector.css', 'entity.other.attribute-name.css'] + + it 'tokenises compound class/attribute selectors', -> + {tokens} = grammar.tokenizeLine('.div[id="0"]{ }') + expect(tokens[0]).toEqual value: '.', scopes: ['source.css', 'meta.selector.css', 'entity.other.attribute-name.class.css', 'punctuation.definition.entity.css'] + expect(tokens[1]).toEqual value: 'div', scopes: ['source.css', 'meta.selector.css', 'entity.other.attribute-name.class.css'] + expect(tokens[2]).toEqual value: '[', scopes: ['source.css', 'meta.selector.css', 'meta.attribute-selector.css', 'punctuation.definition.entity.begin.bracket.square.css'] + expect(tokens[3]).toEqual value: 'id', scopes: ['source.css', 'meta.selector.css', 'meta.attribute-selector.css', 'entity.other.attribute-name.css'] + expect(tokens[8]).toEqual value: ']', scopes: ['source.css', 'meta.selector.css', 'meta.attribute-selector.css', 'punctuation.definition.entity.end.bracket.square.css'] + + {tokens} = grammar.tokenizeLine('#bar.div[id]') + expect(tokens[0]).toEqual value: '#', scopes: ['source.css', 'meta.selector.css', 'entity.other.attribute-name.id.css', 'punctuation.definition.entity.css'] + expect(tokens[1]).toEqual value: 'bar', scopes: ['source.css', 'meta.selector.css', 'entity.other.attribute-name.id.css'] + expect(tokens[2]).toEqual value: '.', scopes: ['source.css', 'meta.selector.css', 'entity.other.attribute-name.class.css', 'punctuation.definition.entity.css'] + expect(tokens[3]).toEqual value: 'div', scopes: ['source.css', 'meta.selector.css', 'entity.other.attribute-name.class.css'] + expect(tokens[4]).toEqual value: '[', scopes: ['source.css', 'meta.selector.css', 'meta.attribute-selector.css', 'punctuation.definition.entity.begin.bracket.square.css'] + expect(tokens[5]).toEqual value: 'id', scopes: ['source.css', 'meta.selector.css', 'meta.attribute-selector.css', 'entity.other.attribute-name.css'] + expect(tokens[6]).toEqual value: ']', scopes: ['source.css', 'meta.selector.css', 'meta.attribute-selector.css', 'punctuation.definition.entity.end.bracket.square.css'] + + it 'allows whitespace to be inserted between tokens', -> + {tokens} = grammar.tokenizeLine('span[ er|lang |= "%%" ]') + expect(tokens[1]).toEqual value: '[', scopes: ['source.css', 'meta.selector.css', 'meta.attribute-selector.css', 'punctuation.definition.entity.begin.bracket.square.css'] + expect(tokens[2]).toEqual value: ' ', scopes: ['source.css', 'meta.selector.css', 'meta.attribute-selector.css'] + expect(tokens[3]).toEqual value: 'er', scopes: ['source.css', 'meta.selector.css', 'meta.attribute-selector.css', 'entity.other.namespace-prefix.css'] + expect(tokens[4]).toEqual value: '|', scopes: ['source.css', 'meta.selector.css', 'meta.attribute-selector.css', 'punctuation.separator.css'] + expect(tokens[5]).toEqual value: 'lang', scopes: ['source.css', 'meta.selector.css', 'meta.attribute-selector.css', 'entity.other.attribute-name.css'] + expect(tokens[6]).toEqual value: ' ', scopes: ['source.css', 'meta.selector.css', 'meta.attribute-selector.css'] + expect(tokens[7]).toEqual value: '|=', scopes: ['source.css', 'meta.selector.css', 'meta.attribute-selector.css', 'keyword.operator.pattern.css'] + expect(tokens[8]).toEqual value: ' ', scopes: ['source.css', 'meta.selector.css', 'meta.attribute-selector.css'] + expect(tokens[9]).toEqual value: '"', scopes: ['source.css', 'meta.selector.css', 'meta.attribute-selector.css', 'string.quoted.double.css', 'punctuation.definition.string.begin.css'] + expect(tokens[10]).toEqual value: '%%', scopes: ['source.css', 'meta.selector.css', 'meta.attribute-selector.css', 'string.quoted.double.css'] + expect(tokens[11]).toEqual value: '"', scopes: ['source.css', 'meta.selector.css', 'meta.attribute-selector.css', 'string.quoted.double.css', 'punctuation.definition.string.end.css'] + expect(tokens[12]).toEqual value: ' ', scopes: ['source.css', 'meta.selector.css', 'meta.attribute-selector.css'] + expect(tokens[13]).toEqual value: ']', scopes: ['source.css', 'meta.selector.css', 'meta.attribute-selector.css', 'punctuation.definition.entity.end.bracket.square.css'] + + it 'tokenises escape sequences inside attribute selectors', -> + {tokens} = grammar.tokenizeLine('a[name\\[0\\]="value"]') + expect(tokens[2]).toEqual value: 'name', scopes: ['source.css', 'meta.selector.css', 'meta.attribute-selector.css', 'entity.other.attribute-name.css'] + expect(tokens[3]).toEqual value: '\\[', scopes: ['source.css', 'meta.selector.css', 'meta.attribute-selector.css', 'entity.other.attribute-name.css', 'constant.character.escape.css'] + expect(tokens[4]).toEqual value: '0', scopes: ['source.css', 'meta.selector.css', 'meta.attribute-selector.css', 'entity.other.attribute-name.css'] + expect(tokens[5]).toEqual value: '\\]', scopes: ['source.css', 'meta.selector.css', 'meta.attribute-selector.css', 'entity.other.attribute-name.css', 'constant.character.escape.css'] + expect(tokens[6]).toEqual value: '=', scopes: ['source.css', 'meta.selector.css', 'meta.attribute-selector.css', 'keyword.operator.pattern.css'] + expect(tokens[10]).toEqual value: ']', scopes: ['source.css', 'meta.selector.css', 'meta.attribute-selector.css', 'punctuation.definition.entity.end.bracket.square.css'] + + it 'tokenises escape sequences inside namespace prefixes', -> + {tokens} = grammar.tokenizeLine('a[name\\ space|Get\\ It\\?="kek"]') + expect(tokens[2]).toEqual value: 'name', scopes: ['source.css', 'meta.selector.css', 'meta.attribute-selector.css', 'entity.other.namespace-prefix.css'] + expect(tokens[3]).toEqual value: '\\ ', scopes: ['source.css', 'meta.selector.css', 'meta.attribute-selector.css', 'entity.other.namespace-prefix.css', 'constant.character.escape.css'] + expect(tokens[4]).toEqual value: 'space', scopes: ['source.css', 'meta.selector.css', 'meta.attribute-selector.css', 'entity.other.namespace-prefix.css'] + expect(tokens[5]).toEqual value: '|', scopes: ['source.css', 'meta.selector.css', 'meta.attribute-selector.css', 'punctuation.separator.css'] + expect(tokens[6]).toEqual value: 'Get', scopes: ['source.css', 'meta.selector.css', 'meta.attribute-selector.css', 'entity.other.attribute-name.css'] + expect(tokens[7]).toEqual value: '\\ ', scopes: ['source.css', 'meta.selector.css', 'meta.attribute-selector.css', 'entity.other.attribute-name.css', 'constant.character.escape.css'] + expect(tokens[8]).toEqual value: 'It', scopes: ['source.css', 'meta.selector.css', 'meta.attribute-selector.css', 'entity.other.attribute-name.css'] + expect(tokens[9]).toEqual value: '\\?', scopes: ['source.css', 'meta.selector.css', 'meta.attribute-selector.css', 'entity.other.attribute-name.css', 'constant.character.escape.css'] + expect(tokens[10]).toEqual value: '=', scopes: ['source.css', 'meta.selector.css', 'meta.attribute-selector.css', 'keyword.operator.pattern.css'] + expect(tokens[14]).toEqual value: ']', scopes: ['source.css', 'meta.selector.css', 'meta.attribute-selector.css', 'punctuation.definition.entity.end.bracket.square.css'] + + it 'tokenises comments inside attribute selectors', -> + {tokens} = grammar.tokenizeLine('span[/*]*/lang]') + expect(tokens[0]).toEqual value: 'span', scopes: ['source.css', 'meta.selector.css', 'entity.name.tag.css'] + expect(tokens[1]).toEqual value: '[', scopes: ['source.css', 'meta.selector.css', 'meta.attribute-selector.css', 'punctuation.definition.entity.begin.bracket.square.css'] + expect(tokens[2]).toEqual value: '/*', scopes: ['source.css', 'meta.selector.css', 'meta.attribute-selector.css', 'comment.block.css', 'punctuation.definition.comment.begin.css'] + expect(tokens[3]).toEqual value: ']', scopes: ['source.css', 'meta.selector.css', 'meta.attribute-selector.css', 'comment.block.css'] + expect(tokens[4]).toEqual value: '*/', scopes: ['source.css', 'meta.selector.css', 'meta.attribute-selector.css', 'comment.block.css', 'punctuation.definition.comment.end.css'] + expect(tokens[5]).toEqual value: 'lang', scopes: ['source.css', 'meta.selector.css', 'meta.attribute-selector.css', 'entity.other.attribute-name.css'] + expect(tokens[6]).toEqual value: ']', scopes: ['source.css', 'meta.selector.css', 'meta.attribute-selector.css', 'punctuation.definition.entity.end.bracket.square.css'] + + it 'tokenises quoted strings in attribute selectors', -> + {tokens} = grammar.tokenizeLine('a[href^="#"] a[href^= "#"] a[href^="#" ]') + expect(tokens[4]).toEqual value: '"', scopes: ['source.css', 'meta.selector.css', 'meta.attribute-selector.css', 'string.quoted.double.css', 'punctuation.definition.string.begin.css'] + expect(tokens[5]).toEqual value: '#', scopes: ['source.css', 'meta.selector.css', 'meta.attribute-selector.css', 'string.quoted.double.css'] + expect(tokens[6]).toEqual value: '"', scopes: ['source.css', 'meta.selector.css', 'meta.attribute-selector.css', 'string.quoted.double.css', 'punctuation.definition.string.end.css'] + expect(tokens[12]).toEqual value: '^=', scopes: ['source.css', 'meta.selector.css', 'meta.attribute-selector.css', 'keyword.operator.pattern.css'] + expect(tokens[13]).toEqual value: ' ', scopes: ['source.css', 'meta.selector.css', 'meta.attribute-selector.css'] + expect(tokens[14]).toEqual value: '"', scopes: ['source.css', 'meta.selector.css', 'meta.attribute-selector.css', 'string.quoted.double.css', 'punctuation.definition.string.begin.css'] + expect(tokens[15]).toEqual value: '#', scopes: ['source.css', 'meta.selector.css', 'meta.attribute-selector.css', 'string.quoted.double.css'] + expect(tokens[16]).toEqual value: '"', scopes: ['source.css', 'meta.selector.css', 'meta.attribute-selector.css', 'string.quoted.double.css', 'punctuation.definition.string.end.css'] + expect(tokens[23]).toEqual value: '"', scopes: ['source.css', 'meta.selector.css', 'meta.attribute-selector.css', 'string.quoted.double.css', 'punctuation.definition.string.begin.css'] + expect(tokens[24]).toEqual value: '#', scopes: ['source.css', 'meta.selector.css', 'meta.attribute-selector.css', 'string.quoted.double.css'] + expect(tokens[25]).toEqual value: '"', scopes: ['source.css', 'meta.selector.css', 'meta.attribute-selector.css', 'string.quoted.double.css', 'punctuation.definition.string.end.css'] + expect(tokens[26]).toEqual value: ' ', scopes: ['source.css', 'meta.selector.css', 'meta.attribute-selector.css'] + expect(tokens[27]).toEqual value: ']', scopes: ['source.css', 'meta.selector.css', 'meta.attribute-selector.css', 'punctuation.definition.entity.end.bracket.square.css'] + + {tokens} = grammar.tokenizeLine("a[href^='#'] a[href^= '#'] a[href^='#' ]") + expect(tokens[4]).toEqual value: "'", scopes: ['source.css', 'meta.selector.css', 'meta.attribute-selector.css', 'string.quoted.single.css', 'punctuation.definition.string.begin.css'] + expect(tokens[5]).toEqual value: '#', scopes: ['source.css', 'meta.selector.css', 'meta.attribute-selector.css', 'string.quoted.single.css'] + expect(tokens[6]).toEqual value: "'", scopes: ['source.css', 'meta.selector.css', 'meta.attribute-selector.css', 'string.quoted.single.css', 'punctuation.definition.string.end.css'] + expect(tokens[12]).toEqual value: '^=', scopes: ['source.css', 'meta.selector.css', 'meta.attribute-selector.css', 'keyword.operator.pattern.css'] + expect(tokens[13]).toEqual value: ' ', scopes: ['source.css', 'meta.selector.css', 'meta.attribute-selector.css'] + expect(tokens[14]).toEqual value: "'", scopes: ['source.css', 'meta.selector.css', 'meta.attribute-selector.css', 'string.quoted.single.css', 'punctuation.definition.string.begin.css'] + expect(tokens[15]).toEqual value: '#', scopes: ['source.css', 'meta.selector.css', 'meta.attribute-selector.css', 'string.quoted.single.css'] + expect(tokens[16]).toEqual value: "'", scopes: ['source.css', 'meta.selector.css', 'meta.attribute-selector.css', 'string.quoted.single.css', 'punctuation.definition.string.end.css'] + expect(tokens[23]).toEqual value: "'", scopes: ['source.css', 'meta.selector.css', 'meta.attribute-selector.css', 'string.quoted.single.css', 'punctuation.definition.string.begin.css'] + expect(tokens[24]).toEqual value: '#', scopes: ['source.css', 'meta.selector.css', 'meta.attribute-selector.css', 'string.quoted.single.css'] + expect(tokens[25]).toEqual value: "'", scopes: ['source.css', 'meta.selector.css', 'meta.attribute-selector.css', 'string.quoted.single.css', 'punctuation.definition.string.end.css'] + expect(tokens[26]).toEqual value: ' ', scopes: ['source.css', 'meta.selector.css', 'meta.attribute-selector.css'] + expect(tokens[27]).toEqual value: ']', scopes: ['source.css', 'meta.selector.css', 'meta.attribute-selector.css', 'punctuation.definition.entity.end.bracket.square.css'] + + it 'tokenises unquoted strings in attribute selectors', -> + {tokens} = grammar.tokenizeLine('span[class~=Java]') + expect(tokens[3]).toEqual value: '~=', scopes: ['source.css', 'meta.selector.css', 'meta.attribute-selector.css', 'keyword.operator.pattern.css'] + expect(tokens[4]).toEqual value: 'Java', scopes: ['source.css', 'meta.selector.css', 'meta.attribute-selector.css', 'string.unquoted.attribute-value.css'] + expect(tokens[5]).toEqual value: ']', scopes: ['source.css', 'meta.selector.css', 'meta.attribute-selector.css', 'punctuation.definition.entity.end.bracket.square.css'] + + {tokens} = grammar.tokenizeLine('span[class^= 0xDEADCAFE=|~BEEFBABE ]') + expect(tokens[3]).toEqual value: '^=', scopes: ['source.css', 'meta.selector.css', 'meta.attribute-selector.css', 'keyword.operator.pattern.css'] + expect(tokens[4]).toEqual value: ' ', scopes: ['source.css', 'meta.selector.css', 'meta.attribute-selector.css'] + expect(tokens[5]).toEqual value: '0xDEADCAFE=|~BEEFBABE', scopes: ['source.css', 'meta.selector.css', 'meta.attribute-selector.css', 'string.unquoted.attribute-value.css'] + expect(tokens[6]).toEqual value: ' ', scopes: ['source.css', 'meta.selector.css', 'meta.attribute-selector.css'] + expect(tokens[7]).toEqual value: ']', scopes: ['source.css', 'meta.selector.css', 'meta.attribute-selector.css', 'punctuation.definition.entity.end.bracket.square.css'] + + it 'tokenises escape sequences in unquoted strings', -> + {tokens} = grammar.tokenizeLine('a[name\\[0\\]=a\\BAD\\AF\\]a\\ i] {}') + expect(tokens[6]).toEqual value: '=', scopes: ['source.css', 'meta.selector.css', 'meta.attribute-selector.css', 'keyword.operator.pattern.css'] + expect(tokens[7]).toEqual value: 'a', scopes: ['source.css', 'meta.selector.css', 'meta.attribute-selector.css', 'string.unquoted.attribute-value.css'] + expect(tokens[8]).toEqual value: '\\BAD', scopes: ['source.css', 'meta.selector.css', 'meta.attribute-selector.css', 'string.unquoted.attribute-value.css', 'constant.character.escape.codepoint.css'] + expect(tokens[9]).toEqual value: '\\AF', scopes: ['source.css', 'meta.selector.css', 'meta.attribute-selector.css', 'string.unquoted.attribute-value.css', 'constant.character.escape.codepoint.css'] + expect(tokens[10]).toEqual value: '\\]', scopes: ['source.css', 'meta.selector.css', 'meta.attribute-selector.css', 'string.unquoted.attribute-value.css', 'constant.character.escape.css'] + expect(tokens[11]).toEqual value: 'a', scopes: ['source.css', 'meta.selector.css', 'meta.attribute-selector.css', 'string.unquoted.attribute-value.css'] + expect(tokens[12]).toEqual value: '\\ ', scopes: ['source.css', 'meta.selector.css', 'meta.attribute-selector.css', 'string.unquoted.attribute-value.css', 'constant.character.escape.css'] + expect(tokens[13]).toEqual value: 'i', scopes: ['source.css', 'meta.selector.css', 'meta.attribute-selector.css', 'string.unquoted.attribute-value.css'] + expect(tokens[14]).toEqual value: ']', scopes: ['source.css', 'meta.selector.css', 'meta.attribute-selector.css', 'punctuation.definition.entity.end.bracket.square.css'] + expect(tokens[16]).toEqual value: '{', scopes: ['source.css', 'meta.property-list.css', 'punctuation.section.property-list.begin.bracket.curly.css'] + + it 'tokenises the ignore-case modifier at the end of a selector', -> + {tokens} = grammar.tokenizeLine('a[attr=val i] a[attr="val" i] a[attr=\'val\'I] a[val^= \'"\'i] a[attr= i] a[attr= i i]') + expect(tokens[6]).toEqual value: 'i', scopes: ['source.css', 'meta.selector.css', 'meta.attribute-selector.css', 'storage.modifier.ignore-case.css'] + expect(tokens[7]).toEqual value: ']', scopes: ['source.css', 'meta.selector.css', 'meta.attribute-selector.css', 'punctuation.definition.entity.end.bracket.square.css'] + expect(tokens[16]).toEqual value: ' ', scopes: ['source.css', 'meta.selector.css', 'meta.attribute-selector.css'] + expect(tokens[17]).toEqual value: 'i', scopes: ['source.css', 'meta.selector.css', 'meta.attribute-selector.css', 'storage.modifier.ignore-case.css'] + expect(tokens[26]).toEqual value: "'", scopes: ['source.css', 'meta.selector.css', 'meta.attribute-selector.css', 'string.quoted.single.css', 'punctuation.definition.string.end.css'] + expect(tokens[27]).toEqual value: 'I', scopes: ['source.css', 'meta.selector.css', 'meta.attribute-selector.css', 'storage.modifier.ignore-case.css'] + expect(tokens[28]).toEqual value: ']', scopes: ['source.css', 'meta.selector.css', 'meta.attribute-selector.css', 'punctuation.definition.entity.end.bracket.square.css'] + expect(tokens[34]).toEqual value: ' ', scopes: ['source.css', 'meta.selector.css', 'meta.attribute-selector.css'] + expect(tokens[35]).toEqual value: "'", scopes: ['source.css', 'meta.selector.css', 'meta.attribute-selector.css', 'string.quoted.single.css', 'punctuation.definition.string.begin.css'] + expect(tokens[36]).toEqual value: '"', scopes: ['source.css', 'meta.selector.css', 'meta.attribute-selector.css', 'string.quoted.single.css'] + expect(tokens[37]).toEqual value: "'", scopes: ['source.css', 'meta.selector.css', 'meta.attribute-selector.css', 'string.quoted.single.css', 'punctuation.definition.string.end.css'] + expect(tokens[38]).toEqual value: 'i', scopes: ['source.css', 'meta.selector.css', 'meta.attribute-selector.css', 'storage.modifier.ignore-case.css'] + expect(tokens[39]).toEqual value: ']', scopes: ['source.css', 'meta.selector.css', 'meta.attribute-selector.css', 'punctuation.definition.entity.end.bracket.square.css'] + expect(tokens[44]).toEqual value: '=', scopes: ['source.css', 'meta.selector.css', 'meta.attribute-selector.css', 'keyword.operator.pattern.css'] + expect(tokens[45]).toEqual value: ' ', scopes: ['source.css', 'meta.selector.css', 'meta.attribute-selector.css'] + expect(tokens[46]).toEqual value: 'i', scopes: ['source.css', 'meta.selector.css', 'meta.attribute-selector.css', 'string.unquoted.attribute-value.css'] + expect(tokens[47]).toEqual value: ']', scopes: ['source.css', 'meta.selector.css', 'meta.attribute-selector.css', 'punctuation.definition.entity.end.bracket.square.css'] + expect(tokens[52]).toEqual value: '=', scopes: ['source.css', 'meta.selector.css', 'meta.attribute-selector.css', 'keyword.operator.pattern.css'] + expect(tokens[53]).toEqual value: ' ', scopes: ['source.css', 'meta.selector.css', 'meta.attribute-selector.css'] + expect(tokens[54]).toEqual value: 'i', scopes: ['source.css', 'meta.selector.css', 'meta.attribute-selector.css', 'string.unquoted.attribute-value.css'] + expect(tokens[55]).toEqual value: ' ', scopes: ['source.css', 'meta.selector.css', 'meta.attribute-selector.css'] + expect(tokens[56]).toEqual value: 'i', scopes: ['source.css', 'meta.selector.css', 'meta.attribute-selector.css', 'storage.modifier.ignore-case.css'] + expect(tokens[57]).toEqual value: ']', scopes: ['source.css', 'meta.selector.css', 'meta.attribute-selector.css', 'punctuation.definition.entity.end.bracket.square.css'] + + it 'tokenises attribute selectors spanning multiple lines', -> + lines = grammar.tokenizeLines """ + span[ + \\x20{2} + ns|lang/**/ + |= + "pt"] + """ + expect(lines[0][0]).toEqual value: 'span', scopes: ['source.css', 'meta.selector.css', 'entity.name.tag.css'] + expect(lines[0][1]).toEqual value: '[', scopes: ['source.css', 'meta.selector.css', 'meta.attribute-selector.css', 'punctuation.definition.entity.begin.bracket.square.css'] + expect(lines[1][0]).toEqual value: ' ', scopes: ['source.css', 'meta.selector.css', 'meta.attribute-selector.css'] + expect(lines[2][1]).toEqual value: 'ns', scopes: ['source.css', 'meta.selector.css', 'meta.attribute-selector.css', 'entity.other.namespace-prefix.css'] + expect(lines[2][2]).toEqual value: '|', scopes: ['source.css', 'meta.selector.css', 'meta.attribute-selector.css', 'punctuation.separator.css'] + expect(lines[2][3]).toEqual value: 'lang', scopes: ['source.css', 'meta.selector.css', 'meta.attribute-selector.css', 'entity.other.attribute-name.css'] + expect(lines[2][4]).toEqual value: '/*', scopes: ['source.css', 'meta.selector.css', 'meta.attribute-selector.css', 'comment.block.css', 'punctuation.definition.comment.begin.css'] + expect(lines[2][5]).toEqual value: '*/', scopes: ['source.css', 'meta.selector.css', 'meta.attribute-selector.css', 'comment.block.css', 'punctuation.definition.comment.end.css'] + expect(lines[3][1]).toEqual value: '|=', scopes: ['source.css', 'meta.selector.css', 'meta.attribute-selector.css', 'keyword.operator.pattern.css'] + expect(lines[4][0]).toEqual value: '"', scopes: ['source.css', 'meta.selector.css', 'meta.attribute-selector.css', 'string.quoted.double.css', 'punctuation.definition.string.begin.css'] + expect(lines[4][1]).toEqual value: 'pt', scopes: ['source.css', 'meta.selector.css', 'meta.attribute-selector.css', 'string.quoted.double.css'] + expect(lines[4][2]).toEqual value: '"', scopes: ['source.css', 'meta.selector.css', 'meta.attribute-selector.css', 'string.quoted.double.css', 'punctuation.definition.string.end.css'] + expect(lines[4][3]).toEqual value: ']', scopes: ['source.css', 'meta.selector.css', 'meta.attribute-selector.css', 'punctuation.definition.entity.end.bracket.square.css'] + + lines = grammar.tokenizeLines """ + span[/*=== + ==|span[/*} + ====*/*|lang/*]=*/~=/*"|"*/"en-AU"/* + | + */ + i] + """ + expect(lines[0][2]).toEqual value: '/*', scopes: ['source.css', 'meta.selector.css', 'meta.attribute-selector.css', 'comment.block.css', 'punctuation.definition.comment.begin.css'] + expect(lines[0][3]).toEqual value: '===', scopes: ['source.css', 'meta.selector.css', 'meta.attribute-selector.css', 'comment.block.css'] + expect(lines[1][0]).toEqual value: '==|span[/*}', scopes: ['source.css', 'meta.selector.css', 'meta.attribute-selector.css', 'comment.block.css'] + expect(lines[2][0]).toEqual value: '====', scopes: ['source.css', 'meta.selector.css', 'meta.attribute-selector.css', 'comment.block.css'] + expect(lines[2][1]).toEqual value: '*/', scopes: ['source.css', 'meta.selector.css', 'meta.attribute-selector.css', 'comment.block.css', 'punctuation.definition.comment.end.css'] + expect(lines[2][2]).toEqual value: '*', scopes: ['source.css', 'meta.selector.css', 'meta.attribute-selector.css', 'entity.other.namespace-prefix.css'] + expect(lines[2][3]).toEqual value: '|', scopes: ['source.css', 'meta.selector.css', 'meta.attribute-selector.css', 'punctuation.separator.css'] + expect(lines[2][4]).toEqual value: 'lang', scopes: ['source.css', 'meta.selector.css', 'meta.attribute-selector.css', 'entity.other.attribute-name.css'] + expect(lines[2][5]).toEqual value: '/*', scopes: ['source.css', 'meta.selector.css', 'meta.attribute-selector.css', 'comment.block.css', 'punctuation.definition.comment.begin.css'] + expect(lines[2][6]).toEqual value: ']=', scopes: ['source.css', 'meta.selector.css', 'meta.attribute-selector.css', 'comment.block.css'] + expect(lines[2][7]).toEqual value: '*/', scopes: ['source.css', 'meta.selector.css', 'meta.attribute-selector.css', 'comment.block.css', 'punctuation.definition.comment.end.css'] + expect(lines[2][8]).toEqual value: '~=', scopes: ['source.css', 'meta.selector.css', 'meta.attribute-selector.css', 'keyword.operator.pattern.css'] + expect(lines[2][9]).toEqual value: '/*', scopes: ['source.css', 'meta.selector.css', 'meta.attribute-selector.css', 'comment.block.css', 'punctuation.definition.comment.begin.css'] + expect(lines[2][10]).toEqual value: '"|"', scopes: ['source.css', 'meta.selector.css', 'meta.attribute-selector.css', 'comment.block.css'] + expect(lines[2][11]).toEqual value: '*/', scopes: ['source.css', 'meta.selector.css', 'meta.attribute-selector.css', 'comment.block.css', 'punctuation.definition.comment.end.css'] + expect(lines[2][12]).toEqual value: '"', scopes: ['source.css', 'meta.selector.css', 'meta.attribute-selector.css', 'string.quoted.double.css', 'punctuation.definition.string.begin.css'] + expect(lines[2][13]).toEqual value: 'en-AU', scopes: ['source.css', 'meta.selector.css', 'meta.attribute-selector.css', 'string.quoted.double.css'] + expect(lines[2][14]).toEqual value: '"', scopes: ['source.css', 'meta.selector.css', 'meta.attribute-selector.css', 'string.quoted.double.css', 'punctuation.definition.string.end.css'] + expect(lines[2][15]).toEqual value: '/*', scopes: ['source.css', 'meta.selector.css', 'meta.attribute-selector.css', 'comment.block.css', 'punctuation.definition.comment.begin.css'] + expect(lines[3][0]).toEqual value: ' |', scopes: ['source.css', 'meta.selector.css', 'meta.attribute-selector.css', 'comment.block.css'] + expect(lines[4][0]).toEqual value: '*/', scopes: ['source.css', 'meta.selector.css', 'meta.attribute-selector.css', 'comment.block.css', 'punctuation.definition.comment.end.css'] + expect(lines[5][0]).toEqual value: 'i', scopes: ['source.css', 'meta.selector.css', 'meta.attribute-selector.css', 'storage.modifier.ignore-case.css'] + expect(lines[5][1]).toEqual value: ']', scopes: ['source.css', 'meta.selector.css', 'meta.attribute-selector.css', 'punctuation.definition.entity.end.bracket.square.css'] + + describe 'class selectors', -> + it 'tokenizes class selectors containing non-ASCII letters', -> + {tokens} = grammar.tokenizeLine '.étendard' + expect(tokens[0]).toEqual value: '.', scopes: ['source.css', 'meta.selector.css', 'entity.other.attribute-name.class.css', 'punctuation.definition.entity.css'] + expect(tokens[1]).toEqual value: 'étendard', scopes: ['source.css', 'meta.selector.css', 'entity.other.attribute-name.class.css'] + + {tokens} = grammar.tokenizeLine '.スポンサー' + expect(tokens[0]).toEqual value: '.', scopes: ['source.css', 'meta.selector.css', 'entity.other.attribute-name.class.css', 'punctuation.definition.entity.css'] + expect(tokens[1]).toEqual value: 'スポンサー', scopes: ['source.css', 'meta.selector.css', 'entity.other.attribute-name.class.css'] + + it 'tokenizes a class selector consisting of two hypens', -> + {tokens} = grammar.tokenizeLine '.--' + expect(tokens[0]).toEqual value: '.', scopes: ['source.css', 'meta.selector.css', 'entity.other.attribute-name.class.css', 'punctuation.definition.entity.css'] + expect(tokens[1]).toEqual value: '--', scopes: ['source.css', 'meta.selector.css', 'entity.other.attribute-name.class.css'] + + it 'tokenizes class selectors consisting of one (valid) character', -> + {tokens} = grammar.tokenizeLine '._' + expect(tokens[0]).toEqual value: '.', scopes: ['source.css', 'meta.selector.css', 'entity.other.attribute-name.class.css', 'punctuation.definition.entity.css'] + expect(tokens[1]).toEqual value: '_', scopes: ['source.css', 'meta.selector.css', 'entity.other.attribute-name.class.css'] + + it 'tokenises class selectors starting with an escape sequence', -> + {tokens} = grammar.tokenizeLine '.\\33\\44-model {' + expect(tokens[0]).toEqual value: '.', scopes: ['source.css', 'meta.selector.css', 'entity.other.attribute-name.class.css', 'punctuation.definition.entity.css'] + expect(tokens[1]).toEqual value: '\\33', scopes: ['source.css', 'meta.selector.css', 'entity.other.attribute-name.class.css', 'constant.character.escape.codepoint.css'] + expect(tokens[2]).toEqual value: '\\44', scopes: ['source.css', 'meta.selector.css', 'entity.other.attribute-name.class.css', 'constant.character.escape.codepoint.css'] + expect(tokens[3]).toEqual value: '-model', scopes: ['source.css', 'meta.selector.css', 'entity.other.attribute-name.class.css'] + expect(tokens[5]).toEqual value: '{', scopes: ['source.css', 'meta.property-list.css', 'punctuation.section.property-list.begin.bracket.curly.css'] + + it 'tokenises class selectors ending with an escape sequence', -> + {tokens} = grammar.tokenizeLine '.la\\{tex\\} {' + expect(tokens[0]).toEqual value: '.', scopes: ['source.css', 'meta.selector.css', 'entity.other.attribute-name.class.css', 'punctuation.definition.entity.css'] + expect(tokens[1]).toEqual value: 'la', scopes: ['source.css', 'meta.selector.css', 'entity.other.attribute-name.class.css'] + expect(tokens[2]).toEqual value: '\\{', scopes: ['source.css', 'meta.selector.css', 'entity.other.attribute-name.class.css', 'constant.character.escape.css'] + expect(tokens[3]).toEqual value: 'tex', scopes: ['source.css', 'meta.selector.css', 'entity.other.attribute-name.class.css'] + expect(tokens[4]).toEqual value: '\\}', scopes: ['source.css', 'meta.selector.css', 'entity.other.attribute-name.class.css', 'constant.character.escape.css'] + expect(tokens[6]).toEqual value: '{', scopes: ['source.css', 'meta.property-list.css', 'punctuation.section.property-list.begin.bracket.curly.css'] + + it 'marks a class invalid if it contains unescaped ASCII punctuation or symbols other than "-" and "_"', -> + {tokens} = grammar.tokenizeLine '.B&W{' + expect(tokens[0]).toEqual value: '.', scopes: ['source.css', 'meta.selector.css', 'invalid.illegal.bad-identifier.css', 'punctuation.definition.entity.css'] + expect(tokens[1]).toEqual value: 'B&W', scopes: ['source.css', 'meta.selector.css', 'invalid.illegal.bad-identifier.css'] + expect(tokens[2]).toEqual value: '{', scopes: ['source.css', 'meta.property-list.css', 'punctuation.section.property-list.begin.bracket.curly.css'] + + it 'marks a class invalid if it starts with ASCII digits ([0-9])', -> + {tokens} = grammar.tokenizeLine '.666{' + expect(tokens[0]).toEqual value: '.', scopes: ['source.css', 'meta.selector.css', 'invalid.illegal.bad-identifier.css', 'punctuation.definition.entity.css'] + expect(tokens[1]).toEqual value: '666', scopes: ['source.css', 'meta.selector.css', 'invalid.illegal.bad-identifier.css'] + expect(tokens[2]).toEqual value: '{', scopes: ['source.css', 'meta.property-list.css', 'punctuation.section.property-list.begin.bracket.curly.css'] + + it 'marks a class invalid if it starts with "-" followed by ASCII digits', -> + {tokens} = grammar.tokenizeLine '.-911-{' + expect(tokens[0]).toEqual value: '.', scopes: ['source.css', 'meta.selector.css', 'invalid.illegal.bad-identifier.css', 'punctuation.definition.entity.css'] + expect(tokens[1]).toEqual value: '-911-', scopes: ['source.css', 'meta.selector.css', 'invalid.illegal.bad-identifier.css'] + expect(tokens[2]).toEqual value: '{', scopes: ['source.css', 'meta.property-list.css', 'punctuation.section.property-list.begin.bracket.curly.css'] + + it 'marks a class invalid if it consists of only one hyphen', -> + {tokens} = grammar.tokenizeLine '.-{' + expect(tokens[0]).toEqual value: '.', scopes: ['source.css', 'meta.selector.css', 'invalid.illegal.bad-identifier.css', 'punctuation.definition.entity.css'] + expect(tokens[1]).toEqual value: '-', scopes: ['source.css', 'meta.selector.css', 'invalid.illegal.bad-identifier.css'] + expect(tokens[2]).toEqual value: '{', scopes: ['source.css', 'meta.property-list.css', 'punctuation.section.property-list.begin.bracket.curly.css'] + + describe 'id selectors', -> + it 'tokenizes id selectors consisting of ASCII letters', -> + {tokens} = grammar.tokenizeLine '#unicorn' + expect(tokens[0]).toEqual value: '#', scopes: ['source.css', 'meta.selector.css', 'entity.other.attribute-name.id.css', 'punctuation.definition.entity.css'] + expect(tokens[1]).toEqual value: 'unicorn', scopes: ['source.css', 'meta.selector.css', 'entity.other.attribute-name.id.css'] + + it 'tokenizes id selectors containing non-ASCII letters', -> + {tokens} = grammar.tokenizeLine '#洪荒之力' + expect(tokens[0]).toEqual value: '#', scopes: ['source.css', 'meta.selector.css', 'entity.other.attribute-name.id.css', 'punctuation.definition.entity.css'] + expect(tokens[1]).toEqual value: '洪荒之力', scopes: ['source.css', 'meta.selector.css', 'entity.other.attribute-name.id.css'] + + it 'tokenizes id selectors containing [0-9], "-", or "_"', -> + {tokens} = grammar.tokenizeLine '#_zer0-day' + expect(tokens[0]).toEqual value: '#', scopes: ['source.css', 'meta.selector.css', 'entity.other.attribute-name.id.css', 'punctuation.definition.entity.css'] + expect(tokens[1]).toEqual value: '_zer0-day', scopes: ['source.css', 'meta.selector.css', 'entity.other.attribute-name.id.css'] + + it 'tokenizes id selectors beginning with two hyphens', -> + {tokens} = grammar.tokenizeLine '#--d3bug--' + expect(tokens[0]).toEqual value: '#', scopes: ['source.css', 'meta.selector.css', 'entity.other.attribute-name.id.css', 'punctuation.definition.entity.css'] + expect(tokens[1]).toEqual value: '--d3bug--', scopes: ['source.css', 'meta.selector.css', 'entity.other.attribute-name.id.css'] + + it 'marks an id invalid if it contains ASCII punctuation or symbols other than "-" and "_"', -> + {tokens} = grammar.tokenizeLine '#sort!{' + expect(tokens[0]).toEqual value: '#', scopes: ['source.css', 'meta.selector.css', 'invalid.illegal.bad-identifier.css', 'punctuation.definition.entity.css'] + expect(tokens[1]).toEqual value: 'sort!', scopes: ['source.css', 'meta.selector.css', 'invalid.illegal.bad-identifier.css'] + expect(tokens[2]).toEqual value: '{', scopes: ['source.css', 'meta.property-list.css', 'punctuation.section.property-list.begin.bracket.curly.css'] + + it 'marks an id invalid if it starts with ASCII digits ([0-9])', -> + {tokens} = grammar.tokenizeLine '#666{' + expect(tokens[0]).toEqual value: '#', scopes: ['source.css', 'meta.selector.css', 'invalid.illegal.bad-identifier.css', 'punctuation.definition.entity.css'] + expect(tokens[1]).toEqual value: '666', scopes: ['source.css', 'meta.selector.css', 'invalid.illegal.bad-identifier.css'] + expect(tokens[2]).toEqual value: '{', scopes: ['source.css', 'meta.property-list.css', 'punctuation.section.property-list.begin.bracket.curly.css'] + + it 'marks an id invalid if it starts with "-" followed by ASCII digits', -> + {tokens} = grammar.tokenizeLine '#-911-{' + expect(tokens[0]).toEqual value: '#', scopes: ['source.css', 'meta.selector.css', 'invalid.illegal.bad-identifier.css', 'punctuation.definition.entity.css'] + expect(tokens[1]).toEqual value: '-911-', scopes: ['source.css', 'meta.selector.css', 'invalid.illegal.bad-identifier.css'] + expect(tokens[2]).toEqual value: '{', scopes: ['source.css', 'meta.property-list.css', 'punctuation.section.property-list.begin.bracket.curly.css'] + + it 'marks an id invalid if it consists of one hyphen only', -> + {tokens} = grammar.tokenizeLine '#-{' + expect(tokens[0]).toEqual value: '#', scopes: ['source.css', 'meta.selector.css', 'invalid.illegal.bad-identifier.css', 'punctuation.definition.entity.css'] + expect(tokens[1]).toEqual value: '-', scopes: ['source.css', 'meta.selector.css', 'invalid.illegal.bad-identifier.css'] + expect(tokens[2]).toEqual value: '{', scopes: ['source.css', 'meta.property-list.css', 'punctuation.section.property-list.begin.bracket.curly.css'] + + it 'tokenises ID selectors starting with an escape sequence', -> + {tokens} = grammar.tokenizeLine '#\\33\\44-model {' + expect(tokens[0]).toEqual value: '#', scopes: ['source.css', 'meta.selector.css', 'entity.other.attribute-name.id.css', 'punctuation.definition.entity.css'] + expect(tokens[1]).toEqual value: '\\33', scopes: ['source.css', 'meta.selector.css', 'entity.other.attribute-name.id.css', 'constant.character.escape.codepoint.css'] + expect(tokens[2]).toEqual value: '\\44', scopes: ['source.css', 'meta.selector.css', 'entity.other.attribute-name.id.css', 'constant.character.escape.codepoint.css'] + expect(tokens[3]).toEqual value: '-model', scopes: ['source.css', 'meta.selector.css', 'entity.other.attribute-name.id.css'] + expect(tokens[5]).toEqual value: '{', scopes: ['source.css', 'meta.property-list.css', 'punctuation.section.property-list.begin.bracket.curly.css'] + + it 'tokenises ID selectors ending with an escape sequence', -> + {tokens} = grammar.tokenizeLine '#la\\{tex\\} {' + expect(tokens[0]).toEqual value: '#', scopes: ['source.css', 'meta.selector.css', 'entity.other.attribute-name.id.css', 'punctuation.definition.entity.css'] + expect(tokens[1]).toEqual value: 'la', scopes: ['source.css', 'meta.selector.css', 'entity.other.attribute-name.id.css'] + expect(tokens[2]).toEqual value: '\\{', scopes: ['source.css', 'meta.selector.css', 'entity.other.attribute-name.id.css', 'constant.character.escape.css'] + expect(tokens[3]).toEqual value: 'tex', scopes: ['source.css', 'meta.selector.css', 'entity.other.attribute-name.id.css'] + expect(tokens[4]).toEqual value: '\\}', scopes: ['source.css', 'meta.selector.css', 'entity.other.attribute-name.id.css', 'constant.character.escape.css'] + expect(tokens[6]).toEqual value: '{', scopes: ['source.css', 'meta.property-list.css', 'punctuation.section.property-list.begin.bracket.curly.css'] + + describe 'namespace prefixes', -> + it 'tokenises arbitrary namespace prefixes', -> + {tokens} = grammar.tokenizeLine('foo|h1 { }') + expect(tokens[0]).toEqual value: 'foo', scopes: ['source.css', 'meta.selector.css', 'entity.other.namespace-prefix.css'] + expect(tokens[1]).toEqual value: '|', scopes: ['source.css', 'meta.selector.css', 'punctuation.separator.css'] + expect(tokens[2]).toEqual value: 'h1', scopes: ['source.css', 'meta.selector.css', 'entity.name.tag.css'] + expect(tokens[3]).toEqual value: ' ', scopes: ['source.css'] + expect(tokens[4]).toEqual value: '{', scopes: ['source.css', 'meta.property-list.css', 'punctuation.section.property-list.begin.bracket.curly.css'] + expect(tokens[6]).toEqual value: '}', scopes: ['source.css', 'meta.property-list.css', 'punctuation.section.property-list.end.bracket.curly.css'] + + it 'tokenises anonymous namespace prefixes', -> + {tokens} = grammar.tokenizeLine('*|abbr {}') + expect(tokens[0]).toEqual value: '*', scopes: ['source.css', 'meta.selector.css', 'entity.other.namespace-prefix.css'] + expect(tokens[1]).toEqual value: '|', scopes: ['source.css', 'meta.selector.css', 'punctuation.separator.css'] + expect(tokens[2]).toEqual value: 'abbr', scopes: ['source.css', 'meta.selector.css', 'entity.name.tag.css'] + expect(tokens[3]).toEqual value: ' ', scopes: ['source.css'] + expect(tokens[4]).toEqual value: '{', scopes: ['source.css', 'meta.property-list.css', 'punctuation.section.property-list.begin.bracket.curly.css'] + expect(tokens[5]).toEqual value: '}', scopes: ['source.css', 'meta.property-list.css', 'punctuation.section.property-list.end.bracket.curly.css'] + + {tokens} = grammar.tokenizeLine('*|* {}') + expect(tokens[0]).toEqual value: '*', scopes: ['source.css', 'meta.selector.css', 'entity.other.namespace-prefix.css'] + expect(tokens[1]).toEqual value: '|', scopes: ['source.css', 'meta.selector.css', 'punctuation.separator.css'] + expect(tokens[2]).toEqual value: '*', scopes: ['source.css', 'meta.selector.css', 'entity.name.tag.wildcard.css'] + expect(tokens[3]).toEqual value: ' ', scopes: ['source.css'] + expect(tokens[4]).toEqual value: '{', scopes: ['source.css', 'meta.property-list.css', 'punctuation.section.property-list.begin.bracket.curly.css'] + expect(tokens[5]).toEqual value: '}', scopes: ['source.css', 'meta.property-list.css', 'punctuation.section.property-list.end.bracket.curly.css'] + + {tokens} = grammar.tokenizeLine('foo|* { }') + expect(tokens[0]).toEqual value: 'foo', scopes: ['source.css', 'meta.selector.css', 'entity.other.namespace-prefix.css'] + expect(tokens[1]).toEqual value: '|', scopes: ['source.css', 'meta.selector.css', 'punctuation.separator.css'] + expect(tokens[2]).toEqual value: '*', scopes: ['source.css', 'meta.selector.css', 'entity.name.tag.wildcard.css'] + expect(tokens[4]).toEqual value: '{', scopes: ['source.css', 'meta.property-list.css', 'punctuation.section.property-list.begin.bracket.curly.css'] + expect(tokens[6]).toEqual value: '}', scopes: ['source.css', 'meta.property-list.css', 'punctuation.section.property-list.end.bracket.curly.css'] + + {tokens} = grammar.tokenizeLine('|[svg|attr=name]{}') + expect(tokens[0]).toEqual value: '|', scopes: ['source.css', 'meta.selector.css', 'punctuation.separator.css'] + expect(tokens[1]).toEqual value: '[', scopes: ['source.css', 'meta.selector.css', 'meta.attribute-selector.css', 'punctuation.definition.entity.begin.bracket.square.css'] + expect(tokens[2]).toEqual value: 'svg', scopes: ['source.css', 'meta.selector.css', 'meta.attribute-selector.css', 'entity.other.namespace-prefix.css'] + expect(tokens[3]).toEqual value: '|', scopes: ['source.css', 'meta.selector.css', 'meta.attribute-selector.css', 'punctuation.separator.css'] + expect(tokens[4]).toEqual value: 'attr', scopes: ['source.css', 'meta.selector.css', 'meta.attribute-selector.css', 'entity.other.attribute-name.css'] + + it 'tokenises the "no-namespace" prefix', -> + {tokens} = grammar.tokenizeLine('|h1 { }') + expect(tokens[0]).toEqual value: '|', scopes: ['source.css', 'meta.selector.css', 'punctuation.separator.css'] + expect(tokens[1]).toEqual value: 'h1', scopes: ['source.css', 'meta.selector.css', 'entity.name.tag.css'] + expect(tokens[3]).toEqual value: '{', scopes: ['source.css', 'meta.property-list.css', 'punctuation.section.property-list.begin.bracket.curly.css'] + expect(tokens[5]).toEqual value: '}', scopes: ['source.css', 'meta.property-list.css', 'punctuation.section.property-list.end.bracket.curly.css'] + + it "doesn't tokenise prefixes without a selector", -> + {tokens} = grammar.tokenizeLine('*| { }') + expect(tokens[0]).toEqual value: '*', scopes: ['source.css', 'meta.selector.css', 'entity.name.tag.wildcard.css'] + expect(tokens[1]).toEqual value: '|', scopes: ['source.css', 'meta.selector.css'] + expect(tokens[2]).toEqual value: ' ', scopes: ['source.css'] + expect(tokens[3]).toEqual value: '{', scopes: ['source.css', 'meta.property-list.css', 'punctuation.section.property-list.begin.bracket.curly.css'] + expect(tokens[5]).toEqual value: '}', scopes: ['source.css', 'meta.property-list.css', 'punctuation.section.property-list.end.bracket.curly.css'] + + {tokens} = grammar.tokenizeLine('*|{ }') + expect(tokens[0]).toEqual value: '*', scopes: ['source.css', 'meta.selector.css', 'entity.name.tag.wildcard.css'] + expect(tokens[1]).toEqual value: '|', scopes: ['source.css', 'meta.selector.css'] + expect(tokens[2]).toEqual value: '{', scopes: ['source.css', 'meta.property-list.css', 'punctuation.section.property-list.begin.bracket.curly.css'] + expect(tokens[4]).toEqual value: '}', scopes: ['source.css', 'meta.property-list.css', 'punctuation.section.property-list.end.bracket.curly.css'] + + describe 'at-rules', -> + describe '@charset', -> + # NB: \A won't match if "tokenizeLine" is used; "tokenizeLines" is used here to circumvent this limitation + it 'tokenises @charset rules at the start of a file', -> + lines = grammar.tokenizeLines '@charset "US-ASCII";' + expect(lines[0][0]).toEqual value: '@', scopes: ['source.css', 'meta.at-rule.charset.css', 'keyword.control.at-rule.charset.css', 'punctuation.definition.keyword.css'] + expect(lines[0][1]).toEqual value: 'charset', scopes: ['source.css', 'meta.at-rule.charset.css', 'keyword.control.at-rule.charset.css'] + expect(lines[0][2]).toEqual value: ' ', scopes: ['source.css', 'meta.at-rule.charset.css'] + expect(lines[0][3]).toEqual value: '"', scopes: ['source.css', 'meta.at-rule.charset.css', 'string.quoted.double.css', 'punctuation.definition.string.begin.css'] + expect(lines[0][4]).toEqual value: 'US-ASCII', scopes: ['source.css', 'meta.at-rule.charset.css', 'string.quoted.double.css'] + expect(lines[0][5]).toEqual value: '"', scopes: ['source.css', 'meta.at-rule.charset.css', 'string.quoted.double.css', 'punctuation.definition.string.end.css'] + expect(lines[0][6]).toEqual value: ';', scopes: ['source.css', 'meta.at-rule.charset.css', 'punctuation.terminator.rule.css'] + + lines = grammar.tokenizeLines('/* Not the first line */\n@charset "UTF-8";') + expect(lines[0][0]).toEqual value: '/*', scopes: ['source.css', 'comment.block.css', 'punctuation.definition.comment.begin.css'] + expect(lines[0][1]).toEqual value: ' Not the first line ', scopes: ['source.css', 'comment.block.css'] + expect(lines[0][2]).toEqual value: '*/', scopes: ['source.css', 'comment.block.css', 'punctuation.definition.comment.end.css'] + expect(lines[1][0]).toEqual value: '@', scopes: ['source.css', 'meta.at-rule.header.css', 'keyword.control.at-rule.css', 'punctuation.definition.keyword.css'] + expect(lines[1][1]).toEqual value: 'charset', scopes: ['source.css', 'meta.at-rule.header.css', 'keyword.control.at-rule.css'] + + it 'highlights invalid @charset statements', -> + lines = grammar.tokenizeLines " @charset 'US-ASCII';" + expect(lines[0][0]).toEqual value: ' ', scopes: ['source.css', 'meta.at-rule.charset.css', 'invalid.illegal.leading-whitespace.charset.css'] + expect(lines[0][1]).toEqual value: '@', scopes: ['source.css', 'meta.at-rule.charset.css', 'keyword.control.at-rule.charset.css', 'punctuation.definition.keyword.css'] + expect(lines[0][2]).toEqual value: 'charset', scopes: ['source.css', 'meta.at-rule.charset.css', 'keyword.control.at-rule.charset.css'] + expect(lines[0][4]).toEqual value: "'US-ASCII'", scopes: ['source.css', 'meta.at-rule.charset.css', 'invalid.illegal.not-double-quoted.charset.css'] + expect(lines[0][5]).toEqual value: ';', scopes: ['source.css', 'meta.at-rule.charset.css', 'punctuation.terminator.rule.css'] + + lines = grammar.tokenizeLines '@charset "iso-8859-15";' + expect(lines[0][0]).toEqual value: '@', scopes: ['source.css', 'meta.at-rule.charset.css', 'keyword.control.at-rule.charset.css', 'punctuation.definition.keyword.css'] + expect(lines[0][1]).toEqual value: 'charset', scopes: ['source.css', 'meta.at-rule.charset.css', 'keyword.control.at-rule.charset.css'] + expect(lines[0][2]).toEqual value: ' ', scopes: ['source.css', 'meta.at-rule.charset.css', 'invalid.illegal.whitespace.charset.css'] + expect(lines[0][3]).toEqual value: '"', scopes: ['source.css', 'meta.at-rule.charset.css', 'string.quoted.double.css', 'punctuation.definition.string.begin.css'] + expect(lines[0][4]).toEqual value: 'iso-8859-15', scopes: ['source.css', 'meta.at-rule.charset.css', 'string.quoted.double.css'] + expect(lines[0][5]).toEqual value: '"', scopes: ['source.css', 'meta.at-rule.charset.css', 'string.quoted.double.css', 'punctuation.definition.string.end.css'] + expect(lines[0][6]).toEqual value: ';', scopes: ['source.css', 'meta.at-rule.charset.css', 'punctuation.terminator.rule.css'] + + lines = grammar.tokenizeLines '@charset"US-ASCII";' + expect(lines[0][0]).toEqual value: '@charset"US-ASCII"', scopes: ['source.css', 'meta.at-rule.charset.css', 'invalid.illegal.no-whitespace.charset.css'] + expect(lines[0][1]).toEqual value: ';', scopes: ['source.css', 'meta.at-rule.charset.css', 'punctuation.terminator.rule.css'] + + lines = grammar.tokenizeLines '@charset "UTF-8" ;' + expect(lines[0][0]).toEqual value: '@', scopes: ['source.css', 'meta.at-rule.charset.css', 'keyword.control.at-rule.charset.css', 'punctuation.definition.keyword.css'] + expect(lines[0][1]).toEqual value: 'charset', scopes: ['source.css', 'meta.at-rule.charset.css', 'keyword.control.at-rule.charset.css'] + expect(lines[0][2]).toEqual value: ' ', scopes: ['source.css', 'meta.at-rule.charset.css'] + expect(lines[0][3]).toEqual value: '"', scopes: ['source.css', 'meta.at-rule.charset.css', 'string.quoted.double.css', 'punctuation.definition.string.begin.css'] + expect(lines[0][4]).toEqual value: 'UTF-8', scopes: ['source.css', 'meta.at-rule.charset.css', 'string.quoted.double.css'] + expect(lines[0][5]).toEqual value: '"', scopes: ['source.css', 'meta.at-rule.charset.css', 'string.quoted.double.css', 'punctuation.definition.string.end.css'] + expect(lines[0][6]).toEqual value: ' ', scopes: ['source.css', 'meta.at-rule.charset.css', 'invalid.illegal.unexpected-characters.charset.css'] + expect(lines[0][7]).toEqual value: ';', scopes: ['source.css', 'meta.at-rule.charset.css', 'punctuation.terminator.rule.css'] + + lines = grammar.tokenizeLines '@charset "WTF-8" /* Nope */ ;' + expect(lines[0][0]).toEqual value: '@', scopes: ['source.css', 'meta.at-rule.charset.css', 'keyword.control.at-rule.charset.css', 'punctuation.definition.keyword.css'] + expect(lines[0][1]).toEqual value: 'charset', scopes: ['source.css', 'meta.at-rule.charset.css', 'keyword.control.at-rule.charset.css'] + expect(lines[0][2]).toEqual value: ' ', scopes: ['source.css', 'meta.at-rule.charset.css'] + expect(lines[0][3]).toEqual value: '"', scopes: ['source.css', 'meta.at-rule.charset.css', 'string.quoted.double.css', 'punctuation.definition.string.begin.css'] + expect(lines[0][4]).toEqual value: 'WTF-8', scopes: ['source.css', 'meta.at-rule.charset.css', 'string.quoted.double.css'] + expect(lines[0][5]).toEqual value: '"', scopes: ['source.css', 'meta.at-rule.charset.css', 'string.quoted.double.css', 'punctuation.definition.string.end.css'] + expect(lines[0][6]).toEqual value: ' /* Nope */ ', scopes: ['source.css', 'meta.at-rule.charset.css', 'invalid.illegal.unexpected-characters.charset.css'] + expect(lines[0][7]).toEqual value: ';', scopes: ['source.css', 'meta.at-rule.charset.css', 'punctuation.terminator.rule.css'] + + lines = grammar.tokenizeLines '@charset "UTF-8' + expect(lines[0][0]).toEqual value: '@', scopes: ['source.css', 'meta.at-rule.charset.css', 'keyword.control.at-rule.charset.css', 'punctuation.definition.keyword.css'] + expect(lines[0][1]).toEqual value: 'charset', scopes: ['source.css', 'meta.at-rule.charset.css', 'keyword.control.at-rule.charset.css'] + expect(lines[0][2]).toEqual value: ' ', scopes: ['source.css', 'meta.at-rule.charset.css'] + expect(lines[0][3]).toEqual value: '"UTF-8', scopes: ['source.css', 'meta.at-rule.charset.css', 'invalid.illegal.unclosed-string.charset.css'] + + lines = grammar.tokenizeLines "@CHARSET 'US-ASCII';" + expect(lines[0][0]).toEqual value: '@CHARSET', scopes: ['source.css', 'meta.at-rule.charset.css', 'invalid.illegal.not-lowercase.charset.css'] + expect(lines[0][1]).toEqual value: " 'US-ASCII'", scopes: ['source.css', 'meta.at-rule.charset.css'] + expect(lines[0][2]).toEqual value: ';', scopes: ['source.css', 'meta.at-rule.charset.css', 'punctuation.terminator.rule.css'] + + describe '@import', -> + it 'tokenises @import statements', -> + {tokens} = grammar.tokenizeLine('@import url("file.css");') + expect(tokens[0]).toEqual value: '@', scopes: ['source.css', 'meta.at-rule.import.css', 'keyword.control.at-rule.import.css', 'punctuation.definition.keyword.css'] + expect(tokens[1]).toEqual value: 'import', scopes: ['source.css', 'meta.at-rule.import.css', 'keyword.control.at-rule.import.css'] + expect(tokens[3]).toEqual value: 'url', scopes: ['source.css', 'meta.at-rule.import.css', 'meta.function.url.css', 'support.function.url.css'] + expect(tokens[4]).toEqual value: '(', scopes: ['source.css', 'meta.at-rule.import.css', 'meta.function.url.css', 'punctuation.section.function.begin.bracket.round.css'] + expect(tokens[5]).toEqual value: '"', scopes: ['source.css', 'meta.at-rule.import.css', 'meta.function.url.css', 'string.quoted.double.css', 'punctuation.definition.string.begin.css'] + expect(tokens[6]).toEqual value: 'file.css', scopes: ['source.css', 'meta.at-rule.import.css', 'meta.function.url.css', 'string.quoted.double.css'] + expect(tokens[7]).toEqual value: '"', scopes: ['source.css', 'meta.at-rule.import.css', 'meta.function.url.css', 'string.quoted.double.css', 'punctuation.definition.string.end.css'] + expect(tokens[8]).toEqual value: ')', scopes: ['source.css', 'meta.at-rule.import.css', 'meta.function.url.css', 'punctuation.section.function.end.bracket.round.css'] + expect(tokens[9]).toEqual value: ';', scopes: ['source.css', 'meta.at-rule.import.css', 'punctuation.terminator.rule.css'] + + {tokens} = grammar.tokenizeLine('@import "file.css";') + expect(tokens[0]).toEqual value: '@', scopes: ['source.css', 'meta.at-rule.import.css', 'keyword.control.at-rule.import.css', 'punctuation.definition.keyword.css'] + expect(tokens[1]).toEqual value: 'import', scopes: ['source.css', 'meta.at-rule.import.css', 'keyword.control.at-rule.import.css'] + expect(tokens[3]).toEqual value: '"', scopes: ['source.css', 'meta.at-rule.import.css', 'string.quoted.double.css', 'punctuation.definition.string.begin.css'] + expect(tokens[4]).toEqual value: 'file.css', scopes: ['source.css', 'meta.at-rule.import.css', 'string.quoted.double.css'] + expect(tokens[5]).toEqual value: '"', scopes: ['source.css', 'meta.at-rule.import.css', 'string.quoted.double.css', 'punctuation.definition.string.end.css'] + expect(tokens[6]).toEqual value: ';', scopes: ['source.css', 'meta.at-rule.import.css', 'punctuation.terminator.rule.css'] + + {tokens} = grammar.tokenizeLine("@import 'file.css';") + expect(tokens[3]).toEqual value: "'", scopes: ['source.css', 'meta.at-rule.import.css', 'string.quoted.single.css', 'punctuation.definition.string.begin.css'] + expect(tokens[4]).toEqual value: 'file.css', scopes: ['source.css', 'meta.at-rule.import.css', 'string.quoted.single.css'] + expect(tokens[5]).toEqual value: "'", scopes: ['source.css', 'meta.at-rule.import.css', 'string.quoted.single.css', 'punctuation.definition.string.end.css'] + + it "doesn't let injected comments impact parameter matching", -> + {tokens} = grammar.tokenizeLine('@import /* url("name"); */ "1.css";') + expect(tokens[0]).toEqual value: '@', scopes: ['source.css', 'meta.at-rule.import.css', 'keyword.control.at-rule.import.css', 'punctuation.definition.keyword.css'] + expect(tokens[1]).toEqual value: 'import', scopes: ['source.css', 'meta.at-rule.import.css', 'keyword.control.at-rule.import.css'] + expect(tokens[3]).toEqual value: '/*', scopes: ['source.css', 'meta.at-rule.import.css', 'comment.block.css', 'punctuation.definition.comment.begin.css'] + expect(tokens[4]).toEqual value: ' url("name"); ', scopes: ['source.css', 'meta.at-rule.import.css', 'comment.block.css'] + expect(tokens[5]).toEqual value: '*/', scopes: ['source.css', 'meta.at-rule.import.css', 'comment.block.css', 'punctuation.definition.comment.end.css'] + expect(tokens[7]).toEqual value: '"', scopes: ['source.css', 'meta.at-rule.import.css', 'string.quoted.double.css', 'punctuation.definition.string.begin.css'] + expect(tokens[8]).toEqual value: '1.css', scopes: ['source.css', 'meta.at-rule.import.css', 'string.quoted.double.css'] + expect(tokens[9]).toEqual value: '"', scopes: ['source.css', 'meta.at-rule.import.css', 'string.quoted.double.css', 'punctuation.definition.string.end.css'] + expect(tokens[10]).toEqual value: ';', scopes: ['source.css', 'meta.at-rule.import.css', 'punctuation.terminator.rule.css'] + + {tokens} = grammar.tokenizeLine('@import/* Comment */"2.css";') + expect(tokens[0]).toEqual value: '@', scopes: ['source.css', 'meta.at-rule.import.css', 'keyword.control.at-rule.import.css', 'punctuation.definition.keyword.css'] + expect(tokens[1]).toEqual value: 'import', scopes: ['source.css', 'meta.at-rule.import.css', 'keyword.control.at-rule.import.css'] + expect(tokens[2]).toEqual value: '/*', scopes: ['source.css', 'meta.at-rule.import.css', 'comment.block.css', 'punctuation.definition.comment.begin.css'] + expect(tokens[3]).toEqual value: ' Comment ', scopes: ['source.css', 'meta.at-rule.import.css', 'comment.block.css'] + expect(tokens[4]).toEqual value: '*/', scopes: ['source.css', 'meta.at-rule.import.css', 'comment.block.css', 'punctuation.definition.comment.end.css'] + expect(tokens[5]).toEqual value: '"', scopes: ['source.css', 'meta.at-rule.import.css', 'string.quoted.double.css', 'punctuation.definition.string.begin.css'] + expect(tokens[6]).toEqual value: '2.css', scopes: ['source.css', 'meta.at-rule.import.css', 'string.quoted.double.css'] + expect(tokens[7]).toEqual value: '"', scopes: ['source.css', 'meta.at-rule.import.css', 'string.quoted.double.css', 'punctuation.definition.string.end.css'] + expect(tokens[8]).toEqual value: ';', scopes: ['source.css', 'meta.at-rule.import.css', 'punctuation.terminator.rule.css'] + + it 'correctly handles word boundaries', -> + {tokens} = grammar.tokenizeLine('@import"file.css";') + expect(tokens[0]).toEqual value: '@', scopes: ['source.css', 'meta.at-rule.import.css', 'keyword.control.at-rule.import.css', 'punctuation.definition.keyword.css'] + expect(tokens[1]).toEqual value: 'import', scopes: ['source.css', 'meta.at-rule.import.css', 'keyword.control.at-rule.import.css'] + expect(tokens[2]).toEqual value: '"', scopes: ['source.css', 'meta.at-rule.import.css', 'string.quoted.double.css', 'punctuation.definition.string.begin.css'] + expect(tokens[3]).toEqual value: 'file.css', scopes: ['source.css', 'meta.at-rule.import.css', 'string.quoted.double.css'] + expect(tokens[4]).toEqual value: '"', scopes: ['source.css', 'meta.at-rule.import.css', 'string.quoted.double.css', 'punctuation.definition.string.end.css'] + expect(tokens[5]).toEqual value: ';', scopes: ['source.css', 'meta.at-rule.import.css', 'punctuation.terminator.rule.css'] + + {tokens} = grammar.tokenizeLine('@import-file.css;') + expect(tokens[0]).toEqual value: '@', scopes: ['source.css', 'meta.at-rule.header.css', 'keyword.control.at-rule.css', 'punctuation.definition.keyword.css'] + expect(tokens[1]).toEqual value: 'import-file', scopes: ['source.css', 'meta.at-rule.header.css', 'keyword.control.at-rule.css'] + expect(tokens[2]).toEqual value: '.css', scopes: ['source.css', 'meta.at-rule.header.css'] + expect(tokens[3]).toEqual value: ';', scopes: ['source.css', 'meta.at-rule.header.css', 'punctuation.terminator.rule.css'] + + it 'matches a URL that starts on the next line', -> + lines = grammar.tokenizeLines '@import\nurl("file.css");' + expect(lines[0][0]).toEqual value: '@', scopes: ['source.css', 'meta.at-rule.import.css', 'keyword.control.at-rule.import.css', 'punctuation.definition.keyword.css'] + expect(lines[0][1]).toEqual value: 'import', scopes: ['source.css', 'meta.at-rule.import.css', 'keyword.control.at-rule.import.css'] + expect(lines[0][2]).toEqual value: '', scopes: ['source.css', 'meta.at-rule.import.css'] + expect(lines[1][0]).toEqual value: 'url', scopes: ['source.css', 'meta.at-rule.import.css', 'meta.function.url.css', 'support.function.url.css'] + expect(lines[1][1]).toEqual value: '(', scopes: ['source.css', 'meta.at-rule.import.css', 'meta.function.url.css', 'punctuation.section.function.begin.bracket.round.css'] + expect(lines[1][2]).toEqual value: '"', scopes: ['source.css', 'meta.at-rule.import.css', 'meta.function.url.css', 'string.quoted.double.css', 'punctuation.definition.string.begin.css'] + expect(lines[1][3]).toEqual value: 'file.css', scopes: ['source.css', 'meta.at-rule.import.css', 'meta.function.url.css', 'string.quoted.double.css'] + expect(lines[1][4]).toEqual value: '"', scopes: ['source.css', 'meta.at-rule.import.css', 'meta.function.url.css', 'string.quoted.double.css', 'punctuation.definition.string.end.css'] + expect(lines[1][5]).toEqual value: ')', scopes: ['source.css', 'meta.at-rule.import.css', 'meta.function.url.css', 'punctuation.section.function.end.bracket.round.css'] + expect(lines[1][6]).toEqual value: ';', scopes: ['source.css', 'meta.at-rule.import.css', 'punctuation.terminator.rule.css'] + + it 'matches comments inside query lists', -> + {tokens} = grammar.tokenizeLine('@import url("1.css") print /* url(";"); */ all;') + expect(tokens[0]).toEqual value: '@', scopes: ['source.css', 'meta.at-rule.import.css', 'keyword.control.at-rule.import.css', 'punctuation.definition.keyword.css'] + expect(tokens[1]).toEqual value: 'import', scopes: ['source.css', 'meta.at-rule.import.css', 'keyword.control.at-rule.import.css'] + expect(tokens[3]).toEqual value: 'url', scopes: ['source.css', 'meta.at-rule.import.css', 'meta.function.url.css', 'support.function.url.css'] + expect(tokens[4]).toEqual value: '(', scopes: ['source.css', 'meta.at-rule.import.css', 'meta.function.url.css', 'punctuation.section.function.begin.bracket.round.css'] + expect(tokens[5]).toEqual value: '"', scopes: ['source.css', 'meta.at-rule.import.css', 'meta.function.url.css', 'string.quoted.double.css', 'punctuation.definition.string.begin.css'] + expect(tokens[6]).toEqual value: '1.css', scopes: ['source.css', 'meta.at-rule.import.css', 'meta.function.url.css', 'string.quoted.double.css'] + expect(tokens[7]).toEqual value: '"', scopes: ['source.css', 'meta.at-rule.import.css', 'meta.function.url.css', 'string.quoted.double.css', 'punctuation.definition.string.end.css'] + expect(tokens[8]).toEqual value: ')', scopes: ['source.css', 'meta.at-rule.import.css', 'meta.function.url.css', 'punctuation.section.function.end.bracket.round.css'] + expect(tokens[10]).toEqual value: 'print', scopes: ['source.css', 'meta.at-rule.import.css', 'support.constant.media.css'] + expect(tokens[12]).toEqual value: '/*', scopes: ['source.css', 'meta.at-rule.import.css', 'comment.block.css', 'punctuation.definition.comment.begin.css'] + expect(tokens[13]).toEqual value: ' url(";"); ', scopes: ['source.css', 'meta.at-rule.import.css', 'comment.block.css'] + expect(tokens[14]).toEqual value: '*/', scopes: ['source.css', 'meta.at-rule.import.css', 'comment.block.css', 'punctuation.definition.comment.end.css'] + expect(tokens[16]).toEqual value: 'all', scopes: ['source.css', 'meta.at-rule.import.css', 'support.constant.media.css'] + expect(tokens[17]).toEqual value: ';', scopes: ['source.css', 'meta.at-rule.import.css', 'punctuation.terminator.rule.css'] + + it 'highlights deprecated media types', -> + {tokens} = grammar.tokenizeLine('@import "astral.css" projection;') + expect(tokens[0]).toEqual value: '@', scopes: ['source.css', 'meta.at-rule.import.css', 'keyword.control.at-rule.import.css', 'punctuation.definition.keyword.css'] + expect(tokens[1]).toEqual value: 'import', scopes: ['source.css', 'meta.at-rule.import.css', 'keyword.control.at-rule.import.css'] + expect(tokens[3]).toEqual value: '"', scopes: ['source.css', 'meta.at-rule.import.css', 'string.quoted.double.css', 'punctuation.definition.string.begin.css'] + expect(tokens[4]).toEqual value: 'astral.css', scopes: ['source.css', 'meta.at-rule.import.css', 'string.quoted.double.css'] + expect(tokens[5]).toEqual value: '"', scopes: ['source.css', 'meta.at-rule.import.css', 'string.quoted.double.css', 'punctuation.definition.string.end.css'] + expect(tokens[7]).toEqual value: 'projection', scopes: ['source.css', 'meta.at-rule.import.css', 'invalid.deprecated.constant.media.css'] + expect(tokens[8]).toEqual value: ';', scopes: ['source.css', 'meta.at-rule.import.css', 'punctuation.terminator.rule.css'] + + it 'highlights media features in query lists', -> + {tokens} = grammar.tokenizeLine('@import url(\'landscape.css\') screen and (orientation:landscape);') + expect(tokens[0]).toEqual value: '@', scopes: ['source.css', 'meta.at-rule.import.css', 'keyword.control.at-rule.import.css', 'punctuation.definition.keyword.css'] + expect(tokens[1]).toEqual value: 'import', scopes: ['source.css', 'meta.at-rule.import.css', 'keyword.control.at-rule.import.css'] + expect(tokens[3]).toEqual value: 'url', scopes: ['source.css', 'meta.at-rule.import.css', 'meta.function.url.css', 'support.function.url.css'] + expect(tokens[4]).toEqual value: '(', scopes: ['source.css', 'meta.at-rule.import.css', 'meta.function.url.css', 'punctuation.section.function.begin.bracket.round.css'] + expect(tokens[5]).toEqual value: '\'', scopes: ['source.css', 'meta.at-rule.import.css', 'meta.function.url.css', 'string.quoted.single.css', 'punctuation.definition.string.begin.css'] + expect(tokens[6]).toEqual value: 'landscape.css', scopes: ['source.css', 'meta.at-rule.import.css', 'meta.function.url.css', 'string.quoted.single.css'] + expect(tokens[7]).toEqual value: '\'', scopes: ['source.css', 'meta.at-rule.import.css', 'meta.function.url.css', 'string.quoted.single.css', 'punctuation.definition.string.end.css'] + expect(tokens[8]).toEqual value: ')', scopes: ['source.css', 'meta.at-rule.import.css', 'meta.function.url.css', 'punctuation.section.function.end.bracket.round.css'] + expect(tokens[10]).toEqual value: 'screen', scopes: ['source.css', 'meta.at-rule.import.css', 'support.constant.media.css'] + expect(tokens[12]).toEqual value: 'and', scopes: ['source.css', 'meta.at-rule.import.css', 'keyword.operator.logical.and.media.css'] + expect(tokens[14]).toEqual value: '(', scopes: ['source.css', 'meta.at-rule.import.css', 'punctuation.definition.parameters.begin.bracket.round.css'] + expect(tokens[15]).toEqual value: 'orientation', scopes: ['source.css', 'meta.at-rule.import.css', 'support.type.property-name.media.css'] + expect(tokens[16]).toEqual value: ':', scopes: ['source.css', 'meta.at-rule.import.css', 'punctuation.separator.key-value.css'] + expect(tokens[17]).toEqual value: 'landscape', scopes: ['source.css', 'meta.at-rule.import.css', 'support.constant.property-value.css'] + expect(tokens[18]).toEqual value: ')', scopes: ['source.css', 'meta.at-rule.import.css', 'punctuation.definition.parameters.end.bracket.round.css'] + expect(tokens[19]).toEqual value: ';', scopes: ['source.css', 'meta.at-rule.import.css', 'punctuation.terminator.rule.css'] + + describe '@media', -> + it 'tokenises @media keywords correctly', -> + {tokens} = grammar.tokenizeLine('@media(max-width: 37.5em) { }') + expect(tokens[0]).toEqual value: '@', scopes: ['source.css', 'meta.at-rule.media.header.css', 'keyword.control.at-rule.media.css', 'punctuation.definition.keyword.css'] + expect(tokens[1]).toEqual value: 'media', scopes: ['source.css', 'meta.at-rule.media.header.css', 'keyword.control.at-rule.media.css'] + expect(tokens[2]).toEqual value: '(', scopes: ['source.css', 'meta.at-rule.media.header.css', 'punctuation.definition.parameters.begin.bracket.round.css'] + expect(tokens[3]).toEqual value: 'max-width', scopes: ['source.css', 'meta.at-rule.media.header.css', 'support.type.property-name.media.css'] + expect(tokens[4]).toEqual value: ':', scopes: ['source.css', 'meta.at-rule.media.header.css', 'punctuation.separator.key-value.css'] + expect(tokens[6]).toEqual value: '37.5', scopes: ['source.css', 'meta.at-rule.media.header.css', 'constant.numeric.css'] + expect(tokens[7]).toEqual value: 'em', scopes: ['source.css', 'meta.at-rule.media.header.css', 'constant.numeric.css', 'keyword.other.unit.em.css'] + expect(tokens[8]).toEqual value: ')', scopes: ['source.css', 'meta.at-rule.media.header.css', 'punctuation.definition.parameters.end.bracket.round.css'] + expect(tokens[9]).toEqual value: ' ', scopes: ['source.css'] + expect(tokens[10]).toEqual value: '{', scopes: ['source.css', 'meta.at-rule.media.body.css', 'punctuation.section.media.begin.bracket.curly.css'] + expect(tokens[12]).toEqual value: '}', scopes: ['source.css', 'meta.at-rule.media.body.css', 'punctuation.section.media.end.bracket.curly.css'] + + {tokens} = grammar.tokenizeLine('@media not print and (max-width: 37.5em){ }') + expect(tokens[0]).toEqual value: '@', scopes: ['source.css', 'meta.at-rule.media.header.css', 'keyword.control.at-rule.media.css', 'punctuation.definition.keyword.css'] + expect(tokens[1]).toEqual value: 'media', scopes: ['source.css', 'meta.at-rule.media.header.css', 'keyword.control.at-rule.media.css'] + expect(tokens[3]).toEqual value: 'not', scopes: ['source.css', 'meta.at-rule.media.header.css', 'keyword.operator.logical.not.media.css'] + expect(tokens[5]).toEqual value: 'print', scopes: ['source.css', 'meta.at-rule.media.header.css', 'support.constant.media.css'] + expect(tokens[7]).toEqual value: 'and', scopes: ['source.css', 'meta.at-rule.media.header.css', 'keyword.operator.logical.and.media.css'] + expect(tokens[9]).toEqual value: '(', scopes: ['source.css', 'meta.at-rule.media.header.css', 'punctuation.definition.parameters.begin.bracket.round.css'] + expect(tokens[10]).toEqual value: 'max-width', scopes: ['source.css', 'meta.at-rule.media.header.css', 'support.type.property-name.media.css'] + expect(tokens[11]).toEqual value: ':', scopes: ['source.css', 'meta.at-rule.media.header.css', 'punctuation.separator.key-value.css'] + expect(tokens[13]).toEqual value: '37.5', scopes: ['source.css', 'meta.at-rule.media.header.css', 'constant.numeric.css'] + expect(tokens[14]).toEqual value: 'em', scopes: ['source.css', 'meta.at-rule.media.header.css', 'constant.numeric.css', 'keyword.other.unit.em.css'] + expect(tokens[15]).toEqual value: ')', scopes: ['source.css', 'meta.at-rule.media.header.css', 'punctuation.definition.parameters.end.bracket.round.css'] + expect(tokens[16]).toEqual value: '{', scopes: ['source.css', 'meta.at-rule.media.body.css', 'punctuation.section.media.begin.bracket.curly.css'] + expect(tokens[18]).toEqual value: '}', scopes: ['source.css', 'meta.at-rule.media.body.css', 'punctuation.section.media.end.bracket.curly.css'] + + it 'highlights deprecated media types', -> + {tokens} = grammar.tokenizeLine('@media (max-device-width: 2px){ }') + expect(tokens[0]).toEqual value: '@', scopes: ['source.css', 'meta.at-rule.media.header.css', 'keyword.control.at-rule.media.css', 'punctuation.definition.keyword.css'] + expect(tokens[1]).toEqual value: 'media', scopes: ['source.css', 'meta.at-rule.media.header.css', 'keyword.control.at-rule.media.css'] + expect(tokens[3]).toEqual value: '(', scopes: ['source.css', 'meta.at-rule.media.header.css', 'punctuation.definition.parameters.begin.bracket.round.css'] + expect(tokens[4]).toEqual value: 'max-device-width', scopes: ['source.css', 'meta.at-rule.media.header.css', 'support.type.property-name.media.css'] + expect(tokens[5]).toEqual value: ':', scopes: ['source.css', 'meta.at-rule.media.header.css', 'punctuation.separator.key-value.css'] + expect(tokens[7]).toEqual value: '2', scopes: ['source.css', 'meta.at-rule.media.header.css', 'constant.numeric.css'] + expect(tokens[8]).toEqual value: 'px', scopes: ['source.css', 'meta.at-rule.media.header.css', 'constant.numeric.css', 'keyword.other.unit.px.css'] + expect(tokens[9]).toEqual value: ')', scopes: ['source.css', 'meta.at-rule.media.header.css', 'punctuation.definition.parameters.end.bracket.round.css'] + expect(tokens[10]).toEqual value: '{', scopes: ['source.css', 'meta.at-rule.media.body.css', 'punctuation.section.media.begin.bracket.curly.css'] + expect(tokens[12]).toEqual value: '}', scopes: ['source.css', 'meta.at-rule.media.body.css', 'punctuation.section.media.end.bracket.curly.css'] + + it 'highlights vendored media features', -> + {tokens} = grammar.tokenizeLine('@media (-webkit-foo: bar){ b{ } }') + expect(tokens[3]).toEqual value: '(', scopes: ['source.css', 'meta.at-rule.media.header.css', 'punctuation.definition.parameters.begin.bracket.round.css'] + expect(tokens[4]).toEqual value: '-webkit-foo', scopes: ['source.css', 'meta.at-rule.media.header.css', 'support.type.vendored.property-name.media.css'] + expect(tokens[5]).toEqual value: ':', scopes: ['source.css', 'meta.at-rule.media.header.css', 'punctuation.separator.key-value.css'] + expect(tokens[6]).toEqual value: ' bar', scopes: ['source.css', 'meta.at-rule.media.header.css'] + expect(tokens[7]).toEqual value: ')', scopes: ['source.css', 'meta.at-rule.media.header.css', 'punctuation.definition.parameters.end.bracket.round.css'] + expect(tokens[8]).toEqual value: '{', scopes: ['source.css', 'meta.at-rule.media.body.css', 'punctuation.section.media.begin.bracket.curly.css'] + + {tokens} = grammar.tokenizeLine('@media screen and (-ms-high-contrast:black-on-white){ }') + expect(tokens[0]).toEqual value: '@', scopes: ['source.css', 'meta.at-rule.media.header.css', 'keyword.control.at-rule.media.css', 'punctuation.definition.keyword.css'] + expect(tokens[1]).toEqual value: 'media', scopes: ['source.css', 'meta.at-rule.media.header.css', 'keyword.control.at-rule.media.css'] + expect(tokens[3]).toEqual value: 'screen', scopes: ['source.css', 'meta.at-rule.media.header.css', 'support.constant.media.css'] + expect(tokens[5]).toEqual value: 'and', scopes: ['source.css', 'meta.at-rule.media.header.css', 'keyword.operator.logical.and.media.css'] + expect(tokens[7]).toEqual value: '(', scopes: ['source.css', 'meta.at-rule.media.header.css', 'punctuation.definition.parameters.begin.bracket.round.css'] + expect(tokens[8]).toEqual value: '-ms-high-contrast', scopes: ['source.css', 'meta.at-rule.media.header.css', 'support.type.vendored.property-name.media.css'] + expect(tokens[9]).toEqual value: ':', scopes: ['source.css', 'meta.at-rule.media.header.css', 'punctuation.separator.key-value.css'] + expect(tokens[10]).toEqual value: 'black-on-white', scopes: ['source.css', 'meta.at-rule.media.header.css'] + expect(tokens[11]).toEqual value: ')', scopes: ['source.css', 'meta.at-rule.media.header.css', 'punctuation.definition.parameters.end.bracket.round.css'] + expect(tokens[12]).toEqual value: '{', scopes: ['source.css', 'meta.at-rule.media.body.css', 'punctuation.section.media.begin.bracket.curly.css'] + expect(tokens[14]).toEqual value: '}', scopes: ['source.css', 'meta.at-rule.media.body.css', 'punctuation.section.media.end.bracket.curly.css'] + + {tokens} = grammar.tokenizeLine('@media (_moz-a:b){}') + expect(tokens[0]).toEqual value: '@', scopes: ['source.css', 'meta.at-rule.media.header.css', 'keyword.control.at-rule.media.css', 'punctuation.definition.keyword.css'] + expect(tokens[1]).toEqual value: 'media', scopes: ['source.css', 'meta.at-rule.media.header.css', 'keyword.control.at-rule.media.css'] + expect(tokens[3]).toEqual value: '(', scopes: ['source.css', 'meta.at-rule.media.header.css', 'punctuation.definition.parameters.begin.bracket.round.css'] + expect(tokens[4]).toEqual value: '_moz-a', scopes: ['source.css', 'meta.at-rule.media.header.css', 'support.type.vendored.property-name.media.css'] + expect(tokens[5]).toEqual value: ':', scopes: ['source.css', 'meta.at-rule.media.header.css', 'punctuation.separator.key-value.css'] + expect(tokens[6]).toEqual value: 'b', scopes: ['source.css', 'meta.at-rule.media.header.css'] + expect(tokens[7]).toEqual value: ')', scopes: ['source.css', 'meta.at-rule.media.header.css', 'punctuation.definition.parameters.end.bracket.round.css'] + + {tokens} = grammar.tokenizeLine('@media (-hp-foo:bar){}') + expect(tokens[0]).toEqual value: '@', scopes: ['source.css', 'meta.at-rule.media.header.css', 'keyword.control.at-rule.media.css', 'punctuation.definition.keyword.css'] + expect(tokens[1]).toEqual value: 'media', scopes: ['source.css', 'meta.at-rule.media.header.css', 'keyword.control.at-rule.media.css'] + expect(tokens[3]).toEqual value: '(', scopes: ['source.css', 'meta.at-rule.media.header.css', 'punctuation.definition.parameters.begin.bracket.round.css'] + expect(tokens[4]).toEqual value: '-hp-foo', scopes: ['source.css', 'meta.at-rule.media.header.css', 'support.type.vendored.property-name.media.css'] + expect(tokens[5]).toEqual value: ':', scopes: ['source.css', 'meta.at-rule.media.header.css', 'punctuation.separator.key-value.css'] + expect(tokens[6]).toEqual value: 'bar', scopes: ['source.css', 'meta.at-rule.media.header.css'] + expect(tokens[7]).toEqual value: ')', scopes: ['source.css', 'meta.at-rule.media.header.css', 'punctuation.definition.parameters.end.bracket.round.css'] + + {tokens} = grammar.tokenizeLine('@media (mso-page-size:wide){}') + expect(tokens[0]).toEqual value: '@', scopes: ['source.css', 'meta.at-rule.media.header.css', 'keyword.control.at-rule.media.css', 'punctuation.definition.keyword.css'] + expect(tokens[1]).toEqual value: 'media', scopes: ['source.css', 'meta.at-rule.media.header.css', 'keyword.control.at-rule.media.css'] + expect(tokens[3]).toEqual value: '(', scopes: ['source.css', 'meta.at-rule.media.header.css', 'punctuation.definition.parameters.begin.bracket.round.css'] + expect(tokens[4]).toEqual value: 'mso-page-size', scopes: ['source.css', 'meta.at-rule.media.header.css', 'support.type.vendored.property-name.media.css'] + expect(tokens[5]).toEqual value: ':', scopes: ['source.css', 'meta.at-rule.media.header.css', 'punctuation.separator.key-value.css'] + expect(tokens[6]).toEqual value: 'wide', scopes: ['source.css', 'meta.at-rule.media.header.css'] + expect(tokens[7]).toEqual value: ')', scopes: ['source.css', 'meta.at-rule.media.header.css', 'punctuation.definition.parameters.end.bracket.round.css'] + + it 'tokenises @media immediately following a closing brace', -> + {tokens} = grammar.tokenizeLine('h1 { }@media only screen { } h2 { }') + expect(tokens[0]).toEqual value: 'h1', scopes: ['source.css', 'meta.selector.css', 'entity.name.tag.css'] + expect(tokens[2]).toEqual value: '{', scopes: ['source.css', 'meta.property-list.css', 'punctuation.section.property-list.begin.bracket.curly.css'] + expect(tokens[4]).toEqual value: '}', scopes: ['source.css', 'meta.property-list.css', 'punctuation.section.property-list.end.bracket.curly.css'] + expect(tokens[5]).toEqual value: '@', scopes: ['source.css', 'meta.at-rule.media.header.css', 'keyword.control.at-rule.media.css', 'punctuation.definition.keyword.css'] + expect(tokens[6]).toEqual value: 'media', scopes: ['source.css', 'meta.at-rule.media.header.css', 'keyword.control.at-rule.media.css'] + expect(tokens[8]).toEqual value: 'only', scopes: ['source.css', 'meta.at-rule.media.header.css', 'keyword.operator.logical.only.media.css'] + expect(tokens[10]).toEqual value: 'screen', scopes: ['source.css', 'meta.at-rule.media.header.css', 'support.constant.media.css'] + expect(tokens[12]).toEqual value: '{', scopes: ['source.css', 'meta.at-rule.media.body.css', 'punctuation.section.media.begin.bracket.curly.css'] + expect(tokens[14]).toEqual value: '}', scopes: ['source.css', 'meta.at-rule.media.body.css', 'punctuation.section.media.end.bracket.curly.css'] + expect(tokens[16]).toEqual value: 'h2', scopes: ['source.css', 'meta.selector.css', 'entity.name.tag.css'] + expect(tokens[18]).toEqual value: '{', scopes: ['source.css', 'meta.property-list.css', 'punctuation.section.property-list.begin.bracket.curly.css'] + expect(tokens[20]).toEqual value: '}', scopes: ['source.css', 'meta.property-list.css', 'punctuation.section.property-list.end.bracket.curly.css'] + + {tokens} = grammar.tokenizeLine('h1 { }@media only screen { }h2 { }') + expect(tokens[0]).toEqual value: 'h1', scopes: ['source.css', 'meta.selector.css', 'entity.name.tag.css'] + expect(tokens[2]).toEqual value: '{', scopes: ['source.css', 'meta.property-list.css', 'punctuation.section.property-list.begin.bracket.curly.css'] + expect(tokens[4]).toEqual value: '}', scopes: ['source.css', 'meta.property-list.css', 'punctuation.section.property-list.end.bracket.curly.css'] + expect(tokens[5]).toEqual value: '@', scopes: ['source.css', 'meta.at-rule.media.header.css', 'keyword.control.at-rule.media.css', 'punctuation.definition.keyword.css'] + expect(tokens[6]).toEqual value: 'media', scopes: ['source.css', 'meta.at-rule.media.header.css', 'keyword.control.at-rule.media.css'] + expect(tokens[8]).toEqual value: 'only', scopes: ['source.css', 'meta.at-rule.media.header.css', 'keyword.operator.logical.only.media.css'] + expect(tokens[10]).toEqual value: 'screen', scopes: ['source.css', 'meta.at-rule.media.header.css', 'support.constant.media.css'] + expect(tokens[12]).toEqual value: '{', scopes: ['source.css', 'meta.at-rule.media.body.css', 'punctuation.section.media.begin.bracket.curly.css'] + expect(tokens[14]).toEqual value: '}', scopes: ['source.css', 'meta.at-rule.media.body.css', 'punctuation.section.media.end.bracket.curly.css'] + expect(tokens[15]).toEqual value: 'h2', scopes: ['source.css', 'meta.selector.css', 'entity.name.tag.css'] + expect(tokens[17]).toEqual value: '{', scopes: ['source.css', 'meta.property-list.css', 'punctuation.section.property-list.begin.bracket.curly.css'] + expect(tokens[19]).toEqual value: '}', scopes: ['source.css', 'meta.property-list.css', 'punctuation.section.property-list.end.bracket.curly.css'] + + it 'tokenises level 4 media-query syntax', -> + lines = grammar.tokenizeLines """ + @media (min-width >= 0px) + and (max-width <= 400) + and (min-height > 400) + and (max-height < 200) + """ + expect(lines[0][6]).toEqual value: '>=', scopes: ['source.css', 'meta.at-rule.media.header.css', 'keyword.operator.comparison.css'] + expect(lines[1][6]).toEqual value: '<=', scopes: ['source.css', 'meta.at-rule.media.header.css', 'keyword.operator.comparison.css'] + expect(lines[2][6]).toEqual value: '>', scopes: ['source.css', 'meta.at-rule.media.header.css', 'keyword.operator.comparison.css'] + expect(lines[3][6]).toEqual value: '<', scopes: ['source.css', 'meta.at-rule.media.header.css', 'keyword.operator.comparison.css'] + + it 'tokenises comments between media types', -> + {tokens} = grammar.tokenizeLine('@media/* */only/* */screen/* */and (min-width:1100px){}') + expect(tokens[0]).toEqual value: '@', scopes: ['source.css', 'meta.at-rule.media.header.css', 'keyword.control.at-rule.media.css', 'punctuation.definition.keyword.css'] + expect(tokens[1]).toEqual value: 'media', scopes: ['source.css', 'meta.at-rule.media.header.css', 'keyword.control.at-rule.media.css'] + expect(tokens[2]).toEqual value: '/*', scopes: ['source.css', 'meta.at-rule.media.header.css', 'comment.block.css', 'punctuation.definition.comment.begin.css'] + expect(tokens[4]).toEqual value: '*/', scopes: ['source.css', 'meta.at-rule.media.header.css', 'comment.block.css', 'punctuation.definition.comment.end.css'] + expect(tokens[5]).toEqual value: 'only', scopes: ['source.css', 'meta.at-rule.media.header.css', 'keyword.operator.logical.only.media.css'] + expect(tokens[6]).toEqual value: '/*', scopes: ['source.css', 'meta.at-rule.media.header.css', 'comment.block.css', 'punctuation.definition.comment.begin.css'] + expect(tokens[8]).toEqual value: '*/', scopes: ['source.css', 'meta.at-rule.media.header.css', 'comment.block.css', 'punctuation.definition.comment.end.css'] + expect(tokens[9]).toEqual value: 'screen', scopes: ['source.css', 'meta.at-rule.media.header.css', 'support.constant.media.css'] + expect(tokens[10]).toEqual value: '/*', scopes: ['source.css', 'meta.at-rule.media.header.css', 'comment.block.css', 'punctuation.definition.comment.begin.css'] + expect(tokens[12]).toEqual value: '*/', scopes: ['source.css', 'meta.at-rule.media.header.css', 'comment.block.css', 'punctuation.definition.comment.end.css'] + expect(tokens[13]).toEqual value: 'and', scopes: ['source.css', 'meta.at-rule.media.header.css', 'keyword.operator.logical.and.media.css'] + expect(tokens[15]).toEqual value: '(', scopes: ['source.css', 'meta.at-rule.media.header.css', 'punctuation.definition.parameters.begin.bracket.round.css'] + expect(tokens[16]).toEqual value: 'min-width', scopes: ['source.css', 'meta.at-rule.media.header.css', 'support.type.property-name.media.css'] + expect(tokens[17]).toEqual value: ':', scopes: ['source.css', 'meta.at-rule.media.header.css', 'punctuation.separator.key-value.css'] + expect(tokens[18]).toEqual value: '1100', scopes: ['source.css', 'meta.at-rule.media.header.css', 'constant.numeric.css'] + expect(tokens[19]).toEqual value: 'px', scopes: ['source.css', 'meta.at-rule.media.header.css', 'constant.numeric.css', 'keyword.other.unit.px.css'] + expect(tokens[20]).toEqual value: ')', scopes: ['source.css', 'meta.at-rule.media.header.css', 'punctuation.definition.parameters.end.bracket.round.css'] + expect(tokens[21]).toEqual value: '{', scopes: ['source.css', 'meta.at-rule.media.body.css', 'punctuation.section.media.begin.bracket.curly.css'] + expect(tokens[22]).toEqual value: '}', scopes: ['source.css', 'meta.at-rule.media.body.css', 'punctuation.section.media.end.bracket.curly.css'] + + it 'tokenises comments between media features', -> + {tokens} = grammar.tokenizeLine('@media/*=*/(max-width:/**/37.5em)/*=*/and/*=*/(/*=*/min-height/*:*/:/*=*/1.2em/*;*/){}') + expect(tokens[0]).toEqual value: '@', scopes: ['source.css', 'meta.at-rule.media.header.css', 'keyword.control.at-rule.media.css', 'punctuation.definition.keyword.css'] + expect(tokens[1]).toEqual value: 'media', scopes: ['source.css', 'meta.at-rule.media.header.css', 'keyword.control.at-rule.media.css'] + expect(tokens[2]).toEqual value: '/*', scopes: ['source.css', 'meta.at-rule.media.header.css', 'comment.block.css', 'punctuation.definition.comment.begin.css'] + expect(tokens[3]).toEqual value: '=', scopes: ['source.css', 'meta.at-rule.media.header.css', 'comment.block.css'] + expect(tokens[4]).toEqual value: '*/', scopes: ['source.css', 'meta.at-rule.media.header.css', 'comment.block.css', 'punctuation.definition.comment.end.css'] + expect(tokens[5]).toEqual value: '(', scopes: ['source.css', 'meta.at-rule.media.header.css', 'punctuation.definition.parameters.begin.bracket.round.css'] + expect(tokens[6]).toEqual value: 'max-width', scopes: ['source.css', 'meta.at-rule.media.header.css', 'support.type.property-name.media.css'] + expect(tokens[7]).toEqual value: ':', scopes: ['source.css', 'meta.at-rule.media.header.css', 'punctuation.separator.key-value.css'] + expect(tokens[8]).toEqual value: '/*', scopes: ['source.css', 'meta.at-rule.media.header.css', 'comment.block.css', 'punctuation.definition.comment.begin.css'] + expect(tokens[9]).toEqual value: '*/', scopes: ['source.css', 'meta.at-rule.media.header.css', 'comment.block.css', 'punctuation.definition.comment.end.css'] + expect(tokens[10]).toEqual value: '37.5', scopes: ['source.css', 'meta.at-rule.media.header.css', 'constant.numeric.css'] + expect(tokens[11]).toEqual value: 'em', scopes: ['source.css', 'meta.at-rule.media.header.css', 'constant.numeric.css', 'keyword.other.unit.em.css'] + expect(tokens[12]).toEqual value: ')', scopes: ['source.css', 'meta.at-rule.media.header.css', 'punctuation.definition.parameters.end.bracket.round.css'] + expect(tokens[13]).toEqual value: '/*', scopes: ['source.css', 'meta.at-rule.media.header.css', 'comment.block.css', 'punctuation.definition.comment.begin.css'] + expect(tokens[14]).toEqual value: '=', scopes: ['source.css', 'meta.at-rule.media.header.css', 'comment.block.css'] + expect(tokens[15]).toEqual value: '*/', scopes: ['source.css', 'meta.at-rule.media.header.css', 'comment.block.css', 'punctuation.definition.comment.end.css'] + expect(tokens[16]).toEqual value: 'and', scopes: ['source.css', 'meta.at-rule.media.header.css', 'keyword.operator.logical.and.media.css'] + expect(tokens[17]).toEqual value: '/*', scopes: ['source.css', 'meta.at-rule.media.header.css', 'comment.block.css', 'punctuation.definition.comment.begin.css'] + expect(tokens[18]).toEqual value: '=', scopes: ['source.css', 'meta.at-rule.media.header.css', 'comment.block.css'] + expect(tokens[19]).toEqual value: '*/', scopes: ['source.css', 'meta.at-rule.media.header.css', 'comment.block.css', 'punctuation.definition.comment.end.css'] + expect(tokens[20]).toEqual value: '(', scopes: ['source.css', 'meta.at-rule.media.header.css', 'punctuation.definition.parameters.begin.bracket.round.css'] + expect(tokens[21]).toEqual value: '/*', scopes: ['source.css', 'meta.at-rule.media.header.css', 'comment.block.css', 'punctuation.definition.comment.begin.css'] + expect(tokens[22]).toEqual value: '=', scopes: ['source.css', 'meta.at-rule.media.header.css', 'comment.block.css'] + expect(tokens[23]).toEqual value: '*/', scopes: ['source.css', 'meta.at-rule.media.header.css', 'comment.block.css', 'punctuation.definition.comment.end.css'] + expect(tokens[24]).toEqual value: 'min-height', scopes: ['source.css', 'meta.at-rule.media.header.css', 'support.type.property-name.media.css'] + expect(tokens[25]).toEqual value: '/*', scopes: ['source.css', 'meta.at-rule.media.header.css', 'comment.block.css', 'punctuation.definition.comment.begin.css'] + expect(tokens[26]).toEqual value: ':', scopes: ['source.css', 'meta.at-rule.media.header.css', 'comment.block.css'] + expect(tokens[27]).toEqual value: '*/', scopes: ['source.css', 'meta.at-rule.media.header.css', 'comment.block.css', 'punctuation.definition.comment.end.css'] + expect(tokens[28]).toEqual value: ':', scopes: ['source.css', 'meta.at-rule.media.header.css', 'punctuation.separator.key-value.css'] + expect(tokens[29]).toEqual value: '/*', scopes: ['source.css', 'meta.at-rule.media.header.css', 'comment.block.css', 'punctuation.definition.comment.begin.css'] + expect(tokens[30]).toEqual value: '=', scopes: ['source.css', 'meta.at-rule.media.header.css', 'comment.block.css'] + expect(tokens[31]).toEqual value: '*/', scopes: ['source.css', 'meta.at-rule.media.header.css', 'comment.block.css', 'punctuation.definition.comment.end.css'] + expect(tokens[32]).toEqual value: '1.2', scopes: ['source.css', 'meta.at-rule.media.header.css', 'constant.numeric.css'] + expect(tokens[33]).toEqual value: 'em', scopes: ['source.css', 'meta.at-rule.media.header.css', 'constant.numeric.css', 'keyword.other.unit.em.css'] + expect(tokens[34]).toEqual value: '/*', scopes: ['source.css', 'meta.at-rule.media.header.css', 'comment.block.css', 'punctuation.definition.comment.begin.css'] + expect(tokens[35]).toEqual value: ';', scopes: ['source.css', 'meta.at-rule.media.header.css', 'comment.block.css'] + expect(tokens[36]).toEqual value: '*/', scopes: ['source.css', 'meta.at-rule.media.header.css', 'comment.block.css', 'punctuation.definition.comment.end.css'] + expect(tokens[37]).toEqual value: ')', scopes: ['source.css', 'meta.at-rule.media.header.css', 'punctuation.definition.parameters.end.bracket.round.css'] + expect(tokens[38]).toEqual value: '{', scopes: ['source.css', 'meta.at-rule.media.body.css', 'punctuation.section.media.begin.bracket.curly.css'] + expect(tokens[39]).toEqual value: '}', scopes: ['source.css', 'meta.at-rule.media.body.css', 'punctuation.section.media.end.bracket.curly.css'] + + it 'matches media queries across lines', -> + lines = grammar.tokenizeLines """ + @media only screen and (min-width : /* 40 */ + 320px), + not print and (max-width: 480px) /* kek */ and (-webkit-min-device-pixel-ratio /*:*/ : 2), + only speech and (min-width: 10em), /* wat */ (-webkit-min-device-pixel-ratio: 2) { } + """ + expect(lines[0][0]).toEqual value: '@', scopes: ['source.css', 'meta.at-rule.media.header.css', 'keyword.control.at-rule.media.css', 'punctuation.definition.keyword.css'] + expect(lines[0][1]).toEqual value: 'media', scopes: ['source.css', 'meta.at-rule.media.header.css', 'keyword.control.at-rule.media.css'] + expect(lines[0][3]).toEqual value: 'only', scopes: ['source.css', 'meta.at-rule.media.header.css', 'keyword.operator.logical.only.media.css'] + expect(lines[0][5]).toEqual value: 'screen', scopes: ['source.css', 'meta.at-rule.media.header.css', 'support.constant.media.css'] + expect(lines[0][7]).toEqual value: 'and', scopes: ['source.css', 'meta.at-rule.media.header.css', 'keyword.operator.logical.and.media.css'] + expect(lines[0][9]).toEqual value: '(', scopes: ['source.css', 'meta.at-rule.media.header.css', 'punctuation.definition.parameters.begin.bracket.round.css'] + expect(lines[0][10]).toEqual value: 'min-width', scopes: ['source.css', 'meta.at-rule.media.header.css', 'support.type.property-name.media.css'] + expect(lines[0][12]).toEqual value: ':', scopes: ['source.css', 'meta.at-rule.media.header.css', 'punctuation.separator.key-value.css'] + expect(lines[0][14]).toEqual value: '/*', scopes: ['source.css', 'meta.at-rule.media.header.css', 'comment.block.css', 'punctuation.definition.comment.begin.css'] + expect(lines[0][15]).toEqual value: ' 40 ', scopes: ['source.css', 'meta.at-rule.media.header.css', 'comment.block.css'] + expect(lines[0][16]).toEqual value: '*/', scopes: ['source.css', 'meta.at-rule.media.header.css', 'comment.block.css', 'punctuation.definition.comment.end.css'] + expect(lines[1][1]).toEqual value: '320', scopes: ['source.css', 'meta.at-rule.media.header.css', 'constant.numeric.css'] + expect(lines[1][2]).toEqual value: 'px', scopes: ['source.css', 'meta.at-rule.media.header.css', 'constant.numeric.css', 'keyword.other.unit.px.css'] + expect(lines[1][3]).toEqual value: ')', scopes: ['source.css', 'meta.at-rule.media.header.css', 'punctuation.definition.parameters.end.bracket.round.css'] + expect(lines[1][4]).toEqual value: ',', scopes: ['source.css', 'meta.at-rule.media.header.css', 'punctuation.separator.list.comma.css'] + expect(lines[2][1]).toEqual value: 'not', scopes: ['source.css', 'meta.at-rule.media.header.css', 'keyword.operator.logical.not.media.css'] + expect(lines[2][3]).toEqual value: 'print', scopes: ['source.css', 'meta.at-rule.media.header.css', 'support.constant.media.css'] + expect(lines[2][5]).toEqual value: 'and', scopes: ['source.css', 'meta.at-rule.media.header.css', 'keyword.operator.logical.and.media.css'] + expect(lines[2][7]).toEqual value: '(', scopes: ['source.css', 'meta.at-rule.media.header.css', 'punctuation.definition.parameters.begin.bracket.round.css'] + expect(lines[2][8]).toEqual value: 'max-width', scopes: ['source.css', 'meta.at-rule.media.header.css', 'support.type.property-name.media.css'] + expect(lines[2][9]).toEqual value: ':', scopes: ['source.css', 'meta.at-rule.media.header.css', 'punctuation.separator.key-value.css'] + expect(lines[2][11]).toEqual value: '480', scopes: ['source.css', 'meta.at-rule.media.header.css', 'constant.numeric.css'] + expect(lines[2][12]).toEqual value: 'px', scopes: ['source.css', 'meta.at-rule.media.header.css', 'constant.numeric.css', 'keyword.other.unit.px.css'] + expect(lines[2][13]).toEqual value: ')', scopes: ['source.css', 'meta.at-rule.media.header.css', 'punctuation.definition.parameters.end.bracket.round.css'] + expect(lines[2][15]).toEqual value: '/*', scopes: ['source.css', 'meta.at-rule.media.header.css', 'comment.block.css', 'punctuation.definition.comment.begin.css'] + expect(lines[2][16]).toEqual value: ' kek ', scopes: ['source.css', 'meta.at-rule.media.header.css', 'comment.block.css'] + expect(lines[2][17]).toEqual value: '*/', scopes: ['source.css', 'meta.at-rule.media.header.css', 'comment.block.css', 'punctuation.definition.comment.end.css'] + expect(lines[2][19]).toEqual value: 'and', scopes: ['source.css', 'meta.at-rule.media.header.css', 'keyword.operator.logical.and.media.css'] + expect(lines[2][21]).toEqual value: '(', scopes: ['source.css', 'meta.at-rule.media.header.css', 'punctuation.definition.parameters.begin.bracket.round.css'] + expect(lines[2][22]).toEqual value: '-webkit-min-device-pixel-ratio', scopes: ['source.css', 'meta.at-rule.media.header.css', 'support.type.vendored.property-name.media.css'] + expect(lines[2][24]).toEqual value: '/*', scopes: ['source.css', 'meta.at-rule.media.header.css', 'comment.block.css', 'punctuation.definition.comment.begin.css'] + expect(lines[2][25]).toEqual value: ':', scopes: ['source.css', 'meta.at-rule.media.header.css', 'comment.block.css'] + expect(lines[2][26]).toEqual value: '*/', scopes: ['source.css', 'meta.at-rule.media.header.css', 'comment.block.css', 'punctuation.definition.comment.end.css'] + expect(lines[2][28]).toEqual value: ':', scopes: ['source.css', 'meta.at-rule.media.header.css', 'punctuation.separator.key-value.css'] + expect(lines[2][30]).toEqual value: '2', scopes: ['source.css', 'meta.at-rule.media.header.css', 'constant.numeric.css'] + expect(lines[2][31]).toEqual value: ')', scopes: ['source.css', 'meta.at-rule.media.header.css', 'punctuation.definition.parameters.end.bracket.round.css'] + expect(lines[2][32]).toEqual value: ',', scopes: ['source.css', 'meta.at-rule.media.header.css', 'punctuation.separator.list.comma.css'] + expect(lines[3][0]).toEqual value: 'only', scopes: ['source.css', 'meta.at-rule.media.header.css', 'keyword.operator.logical.only.media.css'] + expect(lines[3][2]).toEqual value: 'speech', scopes: ['source.css', 'meta.at-rule.media.header.css', 'support.constant.media.css'] + expect(lines[3][4]).toEqual value: 'and', scopes: ['source.css', 'meta.at-rule.media.header.css', 'keyword.operator.logical.and.media.css'] + expect(lines[3][6]).toEqual value: '(', scopes: ['source.css', 'meta.at-rule.media.header.css', 'punctuation.definition.parameters.begin.bracket.round.css'] + expect(lines[3][7]).toEqual value: 'min-width', scopes: ['source.css', 'meta.at-rule.media.header.css', 'support.type.property-name.media.css'] + expect(lines[3][8]).toEqual value: ':', scopes: ['source.css', 'meta.at-rule.media.header.css', 'punctuation.separator.key-value.css'] + expect(lines[3][10]).toEqual value: '10', scopes: ['source.css', 'meta.at-rule.media.header.css', 'constant.numeric.css'] + expect(lines[3][11]).toEqual value: 'em', scopes: ['source.css', 'meta.at-rule.media.header.css', 'constant.numeric.css', 'keyword.other.unit.em.css'] + expect(lines[3][12]).toEqual value: ')', scopes: ['source.css', 'meta.at-rule.media.header.css', 'punctuation.definition.parameters.end.bracket.round.css'] + expect(lines[3][13]).toEqual value: ',', scopes: ['source.css', 'meta.at-rule.media.header.css', 'punctuation.separator.list.comma.css'] + expect(lines[3][15]).toEqual value: '/*', scopes: ['source.css', 'meta.at-rule.media.header.css', 'comment.block.css', 'punctuation.definition.comment.begin.css'] + expect(lines[3][16]).toEqual value: ' wat ', scopes: ['source.css', 'meta.at-rule.media.header.css', 'comment.block.css'] + expect(lines[3][17]).toEqual value: '*/', scopes: ['source.css', 'meta.at-rule.media.header.css', 'comment.block.css', 'punctuation.definition.comment.end.css'] + expect(lines[3][19]).toEqual value: '(', scopes: ['source.css', 'meta.at-rule.media.header.css', 'punctuation.definition.parameters.begin.bracket.round.css'] + expect(lines[3][20]).toEqual value: '-webkit-min-device-pixel-ratio', scopes: ['source.css', 'meta.at-rule.media.header.css', 'support.type.vendored.property-name.media.css'] + expect(lines[3][21]).toEqual value: ':', scopes: ['source.css', 'meta.at-rule.media.header.css', 'punctuation.separator.key-value.css'] + expect(lines[3][23]).toEqual value: '2', scopes: ['source.css', 'meta.at-rule.media.header.css', 'constant.numeric.css'] + expect(lines[3][24]).toEqual value: ')', scopes: ['source.css', 'meta.at-rule.media.header.css', 'punctuation.definition.parameters.end.bracket.round.css'] + expect(lines[3][26]).toEqual value: '{', scopes: ['source.css', 'meta.at-rule.media.body.css', 'punctuation.section.media.begin.bracket.curly.css'] + expect(lines[3][28]).toEqual value: '}', scopes: ['source.css', 'meta.at-rule.media.body.css', 'punctuation.section.media.end.bracket.curly.css'] + + it 'highlights invalid commas', -> + {tokens} = grammar.tokenizeLine('@media , {}') + expect(tokens[0]).toEqual value: '@', scopes: ['source.css', 'meta.at-rule.media.header.css', 'keyword.control.at-rule.media.css', 'punctuation.definition.keyword.css'] + expect(tokens[1]).toEqual value: 'media', scopes: ['source.css', 'meta.at-rule.media.header.css', 'keyword.control.at-rule.media.css'] + expect(tokens[3]).toEqual value: ',', scopes: ['source.css', 'meta.at-rule.media.header.css', 'invalid.illegal.comma.css'] + expect(tokens[5]).toEqual value: '{', scopes: ['source.css', 'meta.at-rule.media.body.css', 'punctuation.section.media.begin.bracket.curly.css'] + expect(tokens[6]).toEqual value: '}', scopes: ['source.css', 'meta.at-rule.media.body.css', 'punctuation.section.media.end.bracket.curly.css'] + + {tokens} = grammar.tokenizeLine('@media , ,screen {}') + expect(tokens[0]).toEqual value: '@', scopes: ['source.css', 'meta.at-rule.media.header.css', 'keyword.control.at-rule.media.css', 'punctuation.definition.keyword.css'] + expect(tokens[1]).toEqual value: 'media', scopes: ['source.css', 'meta.at-rule.media.header.css', 'keyword.control.at-rule.media.css'] + expect(tokens[3]).toEqual value: ', ,', scopes: ['source.css', 'meta.at-rule.media.header.css', 'invalid.illegal.comma.css'] + expect(tokens[4]).toEqual value: 'screen', scopes: ['source.css', 'meta.at-rule.media.header.css', 'support.constant.media.css'] + expect(tokens[6]).toEqual value: '{', scopes: ['source.css', 'meta.at-rule.media.body.css', 'punctuation.section.media.begin.bracket.curly.css'] + expect(tokens[7]).toEqual value: '}', scopes: ['source.css', 'meta.at-rule.media.body.css', 'punctuation.section.media.end.bracket.curly.css'] + + it 'allows spaces inside ratio values', -> + {tokens} = grammar.tokenizeLine('@media (min-aspect-ratio: 3 / 4) and (max-aspect-ratio: 20 / 17) {}') + expect(tokens[7]).toEqual value: '3', scopes: ['source.css', 'meta.at-rule.media.header.css', 'meta.ratio.css', 'constant.numeric.css'] + expect(tokens[8]).toEqual value: ' ', scopes: ['source.css', 'meta.at-rule.media.header.css', 'meta.ratio.css'] + expect(tokens[9]).toEqual value: '/', scopes: ['source.css', 'meta.at-rule.media.header.css', 'meta.ratio.css', 'keyword.operator.arithmetic.css'] + expect(tokens[10]).toEqual value: ' ', scopes: ['source.css', 'meta.at-rule.media.header.css', 'meta.ratio.css'] + expect(tokens[11]).toEqual value: '4', scopes: ['source.css', 'meta.at-rule.media.header.css', 'meta.ratio.css', 'constant.numeric.css'] + expect(tokens[20]).toEqual value: '20', scopes: ['source.css', 'meta.at-rule.media.header.css', 'meta.ratio.css', 'constant.numeric.css'] + expect(tokens[21]).toEqual value: ' ', scopes: ['source.css', 'meta.at-rule.media.header.css', 'meta.ratio.css'] + expect(tokens[22]).toEqual value: '/', scopes: ['source.css', 'meta.at-rule.media.header.css', 'meta.ratio.css', 'keyword.operator.arithmetic.css'] + expect(tokens[23]).toEqual value: ' ', scopes: ['source.css', 'meta.at-rule.media.header.css', 'meta.ratio.css'] + expect(tokens[24]).toEqual value: '17', scopes: ['source.css', 'meta.at-rule.media.header.css', 'meta.ratio.css', 'constant.numeric.css'] + + describe '@keyframes', -> + it 'tokenises keyframe lists correctly', -> + lines = grammar.tokenizeLines """ + @keyframes important1 { + from { margin-top: 50px; + margin-bottom: 100px } + 50% { margin-top: 150px !important; } /* Ignored */ + to { margin-top: 100px; } + } + """ + expect(lines[0][0]).toEqual value: '@', scopes: ['source.css', 'meta.at-rule.keyframes.header.css', 'keyword.control.at-rule.keyframes.css', 'punctuation.definition.keyword.css'] + expect(lines[0][1]).toEqual value: 'keyframes', scopes: ['source.css', 'meta.at-rule.keyframes.header.css', 'keyword.control.at-rule.keyframes.css'] + expect(lines[0][3]).toEqual value: 'important1', scopes: ['source.css', 'meta.at-rule.keyframes.header.css', 'variable.parameter.keyframe-list.css'] + expect(lines[0][4]).toEqual value: ' ', scopes: ['source.css'] + expect(lines[0][5]).toEqual value: '{', scopes: ['source.css', 'meta.at-rule.keyframes.body.css', 'punctuation.section.keyframes.begin.bracket.curly.css'] + expect(lines[1][1]).toEqual value: 'from', scopes: ['source.css', 'meta.at-rule.keyframes.body.css', 'entity.other.keyframe-offset.css'] + expect(lines[1][3]).toEqual value: '{', scopes: ['source.css', 'meta.at-rule.keyframes.body.css', 'meta.property-list.css', 'punctuation.section.property-list.begin.bracket.curly.css'] + expect(lines[1][5]).toEqual value: 'margin-top', scopes: ['source.css', 'meta.at-rule.keyframes.body.css', 'meta.property-list.css', 'meta.property-name.css', 'support.type.property-name.css'] + expect(lines[1][6]).toEqual value: ':', scopes: ['source.css', 'meta.at-rule.keyframes.body.css', 'meta.property-list.css', 'punctuation.separator.key-value.css'] + expect(lines[1][8]).toEqual value: '50', scopes: ['source.css', 'meta.at-rule.keyframes.body.css', 'meta.property-list.css', 'meta.property-value.css', 'constant.numeric.css'] + expect(lines[1][9]).toEqual value: 'px', scopes: ['source.css', 'meta.at-rule.keyframes.body.css', 'meta.property-list.css', 'meta.property-value.css', 'constant.numeric.css', 'keyword.other.unit.px.css'] + expect(lines[1][10]).toEqual value: ';', scopes: ['source.css', 'meta.at-rule.keyframes.body.css', 'meta.property-list.css', 'punctuation.terminator.rule.css'] + expect(lines[2][1]).toEqual value: 'margin-bottom', scopes: ['source.css', 'meta.at-rule.keyframes.body.css', 'meta.property-list.css', 'meta.property-name.css', 'support.type.property-name.css'] + expect(lines[2][2]).toEqual value: ':', scopes: ['source.css', 'meta.at-rule.keyframes.body.css', 'meta.property-list.css', 'punctuation.separator.key-value.css'] + expect(lines[2][4]).toEqual value: '100', scopes: ['source.css', 'meta.at-rule.keyframes.body.css', 'meta.property-list.css', 'meta.property-value.css', 'constant.numeric.css'] + expect(lines[2][5]).toEqual value: 'px', scopes: ['source.css', 'meta.at-rule.keyframes.body.css', 'meta.property-list.css', 'meta.property-value.css', 'constant.numeric.css', 'keyword.other.unit.px.css'] + expect(lines[2][7]).toEqual value: '}', scopes: ['source.css', 'meta.at-rule.keyframes.body.css', 'meta.property-list.css', 'punctuation.section.property-list.end.bracket.curly.css'] + expect(lines[3][1]).toEqual value: '50%', scopes: ['source.css', 'meta.at-rule.keyframes.body.css', 'entity.other.keyframe-offset.percentage.css'] + expect(lines[3][3]).toEqual value: '{', scopes: ['source.css', 'meta.at-rule.keyframes.body.css', 'meta.property-list.css', 'punctuation.section.property-list.begin.bracket.curly.css'] + expect(lines[3][5]).toEqual value: 'margin-top', scopes: ['source.css', 'meta.at-rule.keyframes.body.css', 'meta.property-list.css', 'meta.property-name.css', 'support.type.property-name.css'] + expect(lines[3][6]).toEqual value: ':', scopes: ['source.css', 'meta.at-rule.keyframes.body.css', 'meta.property-list.css', 'punctuation.separator.key-value.css'] + expect(lines[3][8]).toEqual value: '150', scopes: ['source.css', 'meta.at-rule.keyframes.body.css', 'meta.property-list.css', 'meta.property-value.css', 'constant.numeric.css'] + expect(lines[3][9]).toEqual value: 'px', scopes: ['source.css', 'meta.at-rule.keyframes.body.css', 'meta.property-list.css', 'meta.property-value.css', 'constant.numeric.css', 'keyword.other.unit.px.css'] + expect(lines[3][11]).toEqual value: '!important', scopes: ['source.css', 'meta.at-rule.keyframes.body.css', 'meta.property-list.css', 'meta.property-value.css', 'keyword.other.important.css'] + expect(lines[3][12]).toEqual value: ';', scopes: ['source.css', 'meta.at-rule.keyframes.body.css', 'meta.property-list.css', 'punctuation.terminator.rule.css'] + expect(lines[3][14]).toEqual value: '}', scopes: ['source.css', 'meta.at-rule.keyframes.body.css', 'meta.property-list.css', 'punctuation.section.property-list.end.bracket.curly.css'] + expect(lines[3][16]).toEqual value: '/*', scopes: ['source.css', 'meta.at-rule.keyframes.body.css', 'comment.block.css', 'punctuation.definition.comment.begin.css'] + expect(lines[3][17]).toEqual value: ' Ignored ', scopes: ['source.css', 'meta.at-rule.keyframes.body.css', 'comment.block.css'] + expect(lines[3][18]).toEqual value: '*/', scopes: ['source.css', 'meta.at-rule.keyframes.body.css', 'comment.block.css', 'punctuation.definition.comment.end.css'] + expect(lines[4][1]).toEqual value: 'to', scopes: ['source.css', 'meta.at-rule.keyframes.body.css', 'entity.other.keyframe-offset.css'] + expect(lines[4][3]).toEqual value: '{', scopes: ['source.css', 'meta.at-rule.keyframes.body.css', 'meta.property-list.css', 'punctuation.section.property-list.begin.bracket.curly.css'] + expect(lines[4][5]).toEqual value: 'margin-top', scopes: ['source.css', 'meta.at-rule.keyframes.body.css', 'meta.property-list.css', 'meta.property-name.css', 'support.type.property-name.css'] + expect(lines[4][6]).toEqual value: ':', scopes: ['source.css', 'meta.at-rule.keyframes.body.css', 'meta.property-list.css', 'punctuation.separator.key-value.css'] + expect(lines[4][8]).toEqual value: '100', scopes: ['source.css', 'meta.at-rule.keyframes.body.css', 'meta.property-list.css', 'meta.property-value.css', 'constant.numeric.css'] + expect(lines[4][9]).toEqual value: 'px', scopes: ['source.css', 'meta.at-rule.keyframes.body.css', 'meta.property-list.css', 'meta.property-value.css', 'constant.numeric.css', 'keyword.other.unit.px.css'] + expect(lines[4][10]).toEqual value: ';', scopes: ['source.css', 'meta.at-rule.keyframes.body.css', 'meta.property-list.css', 'punctuation.terminator.rule.css'] + expect(lines[4][12]).toEqual value: '}', scopes: ['source.css', 'meta.at-rule.keyframes.body.css', 'meta.property-list.css', 'punctuation.section.property-list.end.bracket.curly.css'] + expect(lines[5][0]).toEqual value: '}', scopes: ['source.css', 'meta.at-rule.keyframes.body.css', 'punctuation.section.keyframes.end.bracket.curly.css'] + + it 'matches injected comments', -> + lines = grammar.tokenizeLines """ + @keyframes/*{*/___IDENT__/*} + { Nah { margin-top: 2em; } + */{ from + """ + expect(lines[0][0]).toEqual value: '@', scopes: ['source.css', 'meta.at-rule.keyframes.header.css', 'keyword.control.at-rule.keyframes.css', 'punctuation.definition.keyword.css'] + expect(lines[0][1]).toEqual value: 'keyframes', scopes: ['source.css', 'meta.at-rule.keyframes.header.css', 'keyword.control.at-rule.keyframes.css'] + expect(lines[0][2]).toEqual value: '/*', scopes: ['source.css', 'meta.at-rule.keyframes.header.css', 'comment.block.css', 'punctuation.definition.comment.begin.css'] + expect(lines[0][3]).toEqual value: '{', scopes: ['source.css', 'meta.at-rule.keyframes.header.css', 'comment.block.css'] + expect(lines[0][4]).toEqual value: '*/', scopes: ['source.css', 'meta.at-rule.keyframes.header.css', 'comment.block.css', 'punctuation.definition.comment.end.css'] + expect(lines[0][5]).toEqual value: '___IDENT__', scopes: ['source.css', 'meta.at-rule.keyframes.header.css', 'variable.parameter.keyframe-list.css'] + expect(lines[0][6]).toEqual value: '/*', scopes: ['source.css', 'meta.at-rule.keyframes.header.css', 'comment.block.css', 'punctuation.definition.comment.begin.css'] + expect(lines[0][7]).toEqual value: '}', scopes: ['source.css', 'meta.at-rule.keyframes.header.css', 'comment.block.css'] + expect(lines[1][0]).toEqual value: ' { Nah { margin-top: 2em; }', scopes: ['source.css', 'meta.at-rule.keyframes.header.css', 'comment.block.css'] + expect(lines[2][0]).toEqual value: '*/', scopes: ['source.css', 'meta.at-rule.keyframes.header.css', 'comment.block.css', 'punctuation.definition.comment.end.css'] + expect(lines[2][1]).toEqual value: '{', scopes: ['source.css', 'meta.at-rule.keyframes.body.css', 'punctuation.section.keyframes.begin.bracket.curly.css'] + expect(lines[2][3]).toEqual value: 'from', scopes: ['source.css', 'meta.at-rule.keyframes.body.css', 'entity.other.keyframe-offset.css'] + + it 'matches offset keywords case-insensitively', -> + {tokens} = grammar.tokenizeLine('@keyframes Give-them-both { fROm { } To {} }') + expect(tokens[0]).toEqual value: '@', scopes: ['source.css', 'meta.at-rule.keyframes.header.css', 'keyword.control.at-rule.keyframes.css', 'punctuation.definition.keyword.css'] + expect(tokens[1]).toEqual value: 'keyframes', scopes: ['source.css', 'meta.at-rule.keyframes.header.css', 'keyword.control.at-rule.keyframes.css'] + expect(tokens[3]).toEqual value: 'Give-them-both', scopes: ['source.css', 'meta.at-rule.keyframes.header.css', 'variable.parameter.keyframe-list.css'] + expect(tokens[4]).toEqual value: ' ', scopes: ['source.css'] + expect(tokens[5]).toEqual value: '{', scopes: ['source.css', 'meta.at-rule.keyframes.body.css', 'punctuation.section.keyframes.begin.bracket.curly.css'] + expect(tokens[7]).toEqual value: 'fROm', scopes: ['source.css', 'meta.at-rule.keyframes.body.css', 'entity.other.keyframe-offset.css'] + expect(tokens[9]).toEqual value: '{', scopes: ['source.css', 'meta.at-rule.keyframes.body.css', 'meta.property-list.css', 'punctuation.section.property-list.begin.bracket.curly.css'] + expect(tokens[11]).toEqual value: '}', scopes: ['source.css', 'meta.at-rule.keyframes.body.css', 'meta.property-list.css', 'punctuation.section.property-list.end.bracket.curly.css'] + expect(tokens[13]).toEqual value: 'To', scopes: ['source.css', 'meta.at-rule.keyframes.body.css', 'entity.other.keyframe-offset.css'] + expect(tokens[15]).toEqual value: '{', scopes: ['source.css', 'meta.at-rule.keyframes.body.css', 'meta.property-list.css', 'punctuation.section.property-list.begin.bracket.curly.css'] + expect(tokens[16]).toEqual value: '}', scopes: ['source.css', 'meta.at-rule.keyframes.body.css', 'meta.property-list.css', 'punctuation.section.property-list.end.bracket.curly.css'] + expect(tokens[18]).toEqual value: '}', scopes: ['source.css', 'meta.at-rule.keyframes.body.css', 'punctuation.section.keyframes.end.bracket.curly.css'] + + it 'matches percentile offsets', -> + {tokens} = grammar.tokenizeLine('@keyframes identifier { -50.2% } @keyframes ident2 { .25%}') + expect(tokens[7]).toEqual value: '-50.2%', scopes: ['source.css', 'meta.at-rule.keyframes.body.css', 'entity.other.keyframe-offset.percentage.css'] + expect(tokens[18]).toEqual value: '.25%', scopes: ['source.css', 'meta.at-rule.keyframes.body.css', 'entity.other.keyframe-offset.percentage.css'] + + it 'highlights escape sequences inside identifiers', -> + {tokens} = grammar.tokenizeLine '@keyframes A\\1F602Z' + expect(tokens[0]).toEqual value: '@', scopes: ['source.css', 'meta.at-rule.keyframes.header.css', 'keyword.control.at-rule.keyframes.css', 'punctuation.definition.keyword.css'] + expect(tokens[1]).toEqual value: 'keyframes', scopes: ['source.css', 'meta.at-rule.keyframes.header.css', 'keyword.control.at-rule.keyframes.css'] + expect(tokens[3]).toEqual value: 'A', scopes: ['source.css', 'meta.at-rule.keyframes.header.css', 'variable.parameter.keyframe-list.css'] + expect(tokens[4]).toEqual value: '\\1F602', scopes: ['source.css', 'meta.at-rule.keyframes.header.css', 'variable.parameter.keyframe-list.css', 'constant.character.escape.codepoint.css'] + expect(tokens[5]).toEqual value: 'Z', scopes: ['source.css', 'meta.at-rule.keyframes.header.css', 'variable.parameter.keyframe-list.css'] + + describe '@supports', -> + it 'tokenises feature queries', -> + {tokens} = grammar.tokenizeLine('@supports (font-size: 1em) { }') + expect(tokens[0]).toEqual value: '@', scopes: ['source.css', 'meta.at-rule.supports.header.css', 'keyword.control.at-rule.supports.css', 'punctuation.definition.keyword.css'] + expect(tokens[1]).toEqual value: 'supports', scopes: ['source.css', 'meta.at-rule.supports.header.css', 'keyword.control.at-rule.supports.css'] + expect(tokens[2]).toEqual value: ' ', scopes: ['source.css', 'meta.at-rule.supports.header.css'] + expect(tokens[3]).toEqual value: '(', scopes: ['source.css', 'meta.at-rule.supports.header.css', 'meta.feature-query.css', 'punctuation.definition.condition.begin.bracket.round.css'] + expect(tokens[4]).toEqual value: 'font-size', scopes: ['source.css', 'meta.at-rule.supports.header.css', 'meta.feature-query.css', 'meta.property-name.css', 'support.type.property-name.css'] + expect(tokens[5]).toEqual value: ':', scopes: ['source.css', 'meta.at-rule.supports.header.css', 'meta.feature-query.css', 'punctuation.separator.key-value.css'] + expect(tokens[7]).toEqual value: '1', scopes: ['source.css', 'meta.at-rule.supports.header.css', 'meta.feature-query.css', 'meta.property-value.css', 'constant.numeric.css'] + expect(tokens[8]).toEqual value: 'em', scopes: ['source.css', 'meta.at-rule.supports.header.css', 'meta.feature-query.css', 'meta.property-value.css', 'constant.numeric.css', 'keyword.other.unit.em.css'] + expect(tokens[9]).toEqual value: ')', scopes: ['source.css', 'meta.at-rule.supports.header.css', 'meta.feature-query.css', 'punctuation.definition.condition.end.bracket.round.css'] + expect(tokens[10]).toEqual value: ' ', scopes: ['source.css'] + expect(tokens[11]).toEqual value: '{', scopes: ['source.css', 'meta.at-rule.supports.body.css', 'punctuation.section.supports.begin.bracket.curly.css'] + expect(tokens[13]).toEqual value: '}', scopes: ['source.css', 'meta.at-rule.supports.body.css', 'punctuation.section.supports.end.bracket.curly.css'] + + it 'matches logical operators', -> + lines = grammar.tokenizeLines """ + @supports not (font-size: 1em){ } + @supports (font-size: 1em) and (font-size: 1em){ } + @supports (font-size: 1em) or (font-size: 1em){ } + """ + expect(lines[0][3]).toEqual value: 'not', scopes: ['source.css', 'meta.at-rule.supports.header.css', 'keyword.operator.logical.feature.not.css'] + expect(lines[1][11]).toEqual value: 'and', scopes: ['source.css', 'meta.at-rule.supports.header.css', 'keyword.operator.logical.feature.and.css'] + expect(lines[2][11]).toEqual value: 'or', scopes: ['source.css', 'meta.at-rule.supports.header.css', 'keyword.operator.logical.feature.or.css'] + + it 'matches custom variables in feature queries', -> + {tokens} = grammar.tokenizeLine('@supports (--foo: green){}') + expect(tokens[3]).toEqual value: '(', scopes: ['source.css', 'meta.at-rule.supports.header.css', 'meta.feature-query.css', 'punctuation.definition.condition.begin.bracket.round.css'] + expect(tokens[4]).toEqual value: '--foo', scopes: ['source.css', 'meta.at-rule.supports.header.css', 'meta.feature-query.css', 'variable.css'] + expect(tokens[5]).toEqual value: ':', scopes: ['source.css', 'meta.at-rule.supports.header.css', 'meta.feature-query.css', 'punctuation.separator.key-value.css'] + expect(tokens[7]).toEqual value: 'green', scopes: ['source.css', 'meta.at-rule.supports.header.css', 'meta.feature-query.css', 'meta.property-value.css', 'support.constant.color.w3c-standard-color-name.css'] + expect(tokens[8]).toEqual value: ')', scopes: ['source.css', 'meta.at-rule.supports.header.css', 'meta.feature-query.css', 'punctuation.definition.condition.end.bracket.round.css'] + + it "doesn't mistake brackets in string literals for feature queries", -> + lines = grammar.tokenizeLines """ + @supports not ((tab-size:4) or (-moz-tab-size:4)){ + body::before{content: "Come on, Microsoft (Get it together already)…"; } + } + """ + expect(lines[0][0]).toEqual value: '@', scopes: ['source.css', 'meta.at-rule.supports.header.css', 'keyword.control.at-rule.supports.css', 'punctuation.definition.keyword.css'] + expect(lines[0][1]).toEqual value: 'supports', scopes: ['source.css', 'meta.at-rule.supports.header.css', 'keyword.control.at-rule.supports.css'] + expect(lines[0][3]).toEqual value: 'not', scopes: ['source.css', 'meta.at-rule.supports.header.css', 'keyword.operator.logical.feature.not.css'] + expect(lines[0][7]).toEqual value: 'tab-size', scopes: ['source.css', 'meta.at-rule.supports.header.css', 'meta.feature-query.css', 'meta.feature-query.css', 'meta.property-name.css', 'support.type.property-name.css'] + expect(lines[0][12]).toEqual value: 'or', scopes: ['source.css', 'meta.at-rule.supports.header.css', 'meta.feature-query.css', 'keyword.operator.logical.feature.or.css'] + expect(lines[0][15]).toEqual value: '-moz-tab-size', scopes: ['source.css', 'meta.at-rule.supports.header.css', 'meta.feature-query.css', 'meta.feature-query.css', 'meta.property-name.css', 'support.type.vendored.property-name.css'] + expect(lines[0][20]).toEqual value: '{', scopes: ['source.css', 'meta.at-rule.supports.body.css', 'punctuation.section.supports.begin.bracket.curly.css'] + expect(lines[1][1]).toEqual value: 'body', scopes: ['source.css', 'meta.at-rule.supports.body.css', 'meta.selector.css', 'entity.name.tag.css'] + expect(lines[1][2]).toEqual value: '::', scopes: ['source.css', 'meta.at-rule.supports.body.css', 'meta.selector.css', 'entity.other.attribute-name.pseudo-element.css', 'punctuation.definition.entity.css'] + expect(lines[1][3]).toEqual value: 'before', scopes: ['source.css', 'meta.at-rule.supports.body.css', 'meta.selector.css', 'entity.other.attribute-name.pseudo-element.css'] + expect(lines[1][4]).toEqual value: '{', scopes: ['source.css', 'meta.at-rule.supports.body.css', 'meta.property-list.css', 'punctuation.section.property-list.begin.bracket.curly.css'] + expect(lines[1][5]).toEqual value: 'content', scopes: ['source.css', 'meta.at-rule.supports.body.css', 'meta.property-list.css', 'meta.property-name.css', 'support.type.property-name.css'] + expect(lines[1][6]).toEqual value: ':', scopes: ['source.css', 'meta.at-rule.supports.body.css', 'meta.property-list.css', 'punctuation.separator.key-value.css'] + expect(lines[1][8]).toEqual value: '"', scopes: ['source.css', 'meta.at-rule.supports.body.css', 'meta.property-list.css', 'meta.property-value.css', 'string.quoted.double.css', 'punctuation.definition.string.begin.css'] + expect(lines[1][9]).toEqual value: 'Come on, Microsoft (Get it together already)…', scopes: ['source.css', 'meta.at-rule.supports.body.css', 'meta.property-list.css', 'meta.property-value.css', 'string.quoted.double.css'] + expect(lines[1][10]).toEqual value: '"', scopes: ['source.css', 'meta.at-rule.supports.body.css', 'meta.property-list.css', 'meta.property-value.css', 'string.quoted.double.css', 'punctuation.definition.string.end.css'] + expect(lines[1][11]).toEqual value: ';', scopes: ['source.css', 'meta.at-rule.supports.body.css', 'meta.property-list.css', 'punctuation.terminator.rule.css'] + expect(lines[1][13]).toEqual value: '}', scopes: ['source.css', 'meta.at-rule.supports.body.css', 'meta.property-list.css', 'punctuation.section.property-list.end.bracket.curly.css'] + expect(lines[2][0]).toEqual value: '}', scopes: ['source.css', 'meta.at-rule.supports.body.css', 'punctuation.section.supports.end.bracket.curly.css'] + + it 'tokenises multiple feature queries', -> + {tokens} = grammar.tokenizeLine('@supports (display:table-cell) or ((display:list-item) and (display:run-in)){') + expect(tokens[0]).toEqual value: '@', scopes: ['source.css', 'meta.at-rule.supports.header.css', 'keyword.control.at-rule.supports.css', 'punctuation.definition.keyword.css'] + expect(tokens[1]).toEqual value: 'supports', scopes: ['source.css', 'meta.at-rule.supports.header.css', 'keyword.control.at-rule.supports.css'] + expect(tokens[3]).toEqual value: '(', scopes: ['source.css', 'meta.at-rule.supports.header.css', 'meta.feature-query.css', 'punctuation.definition.condition.begin.bracket.round.css'] + expect(tokens[4]).toEqual value: 'display', scopes: ['source.css', 'meta.at-rule.supports.header.css', 'meta.feature-query.css', 'meta.property-name.css', 'support.type.property-name.css'] + expect(tokens[5]).toEqual value: ':', scopes: ['source.css', 'meta.at-rule.supports.header.css', 'meta.feature-query.css', 'punctuation.separator.key-value.css'] + expect(tokens[6]).toEqual value: 'table-cell', scopes: ['source.css', 'meta.at-rule.supports.header.css', 'meta.feature-query.css', 'meta.property-value.css', 'support.constant.property-value.css'] + expect(tokens[7]).toEqual value: ')', scopes: ['source.css', 'meta.at-rule.supports.header.css', 'meta.feature-query.css', 'punctuation.definition.condition.end.bracket.round.css'] + expect(tokens[9]).toEqual value: 'or', scopes: ['source.css', 'meta.at-rule.supports.header.css', 'keyword.operator.logical.feature.or.css'] + expect(tokens[11]).toEqual value: '(', scopes: ['source.css', 'meta.at-rule.supports.header.css', 'meta.feature-query.css', 'punctuation.definition.condition.begin.bracket.round.css'] + expect(tokens[12]).toEqual value: '(', scopes: ['source.css', 'meta.at-rule.supports.header.css', 'meta.feature-query.css', 'meta.feature-query.css', 'punctuation.definition.condition.begin.bracket.round.css'] + expect(tokens[13]).toEqual value: 'display', scopes: ['source.css', 'meta.at-rule.supports.header.css', 'meta.feature-query.css', 'meta.feature-query.css', 'meta.property-name.css', 'support.type.property-name.css'] + expect(tokens[14]).toEqual value: ':', scopes: ['source.css', 'meta.at-rule.supports.header.css', 'meta.feature-query.css', 'meta.feature-query.css', 'punctuation.separator.key-value.css'] + expect(tokens[15]).toEqual value: 'list-item', scopes: ['source.css', 'meta.at-rule.supports.header.css', 'meta.feature-query.css', 'meta.feature-query.css', 'meta.property-value.css', 'support.constant.property-value.css'] + expect(tokens[16]).toEqual value: ')', scopes: ['source.css', 'meta.at-rule.supports.header.css', 'meta.feature-query.css', 'meta.feature-query.css', 'punctuation.definition.condition.end.bracket.round.css'] + expect(tokens[18]).toEqual value: 'and', scopes: ['source.css', 'meta.at-rule.supports.header.css', 'meta.feature-query.css', 'keyword.operator.logical.feature.and.css'] + expect(tokens[20]).toEqual value: '(', scopes: ['source.css', 'meta.at-rule.supports.header.css', 'meta.feature-query.css', 'meta.feature-query.css', 'punctuation.definition.condition.begin.bracket.round.css'] + expect(tokens[21]).toEqual value: 'display', scopes: ['source.css', 'meta.at-rule.supports.header.css', 'meta.feature-query.css', 'meta.feature-query.css', 'meta.property-name.css', 'support.type.property-name.css'] + expect(tokens[22]).toEqual value: ':', scopes: ['source.css', 'meta.at-rule.supports.header.css', 'meta.feature-query.css', 'meta.feature-query.css', 'punctuation.separator.key-value.css'] + expect(tokens[23]).toEqual value: 'run-in', scopes: ['source.css', 'meta.at-rule.supports.header.css', 'meta.feature-query.css', 'meta.feature-query.css', 'meta.property-value.css', 'support.constant.property-value.css'] + expect(tokens[24]).toEqual value: ')', scopes: ['source.css', 'meta.at-rule.supports.header.css', 'meta.feature-query.css', 'meta.feature-query.css', 'punctuation.definition.condition.end.bracket.round.css'] + expect(tokens[25]).toEqual value: ')', scopes: ['source.css', 'meta.at-rule.supports.header.css', 'meta.feature-query.css', 'punctuation.definition.condition.end.bracket.round.css'] + expect(tokens[26]).toEqual value: '{', scopes: ['source.css', 'meta.at-rule.supports.body.css', 'punctuation.section.supports.begin.bracket.curly.css'] + + it 'embeds rulesets and other at-rules', -> + lines = grammar.tokenizeLines """ + @supports (animation-name: test) { + #node { + animation-name: test; + } + body > header[data-name="attr"] ~ *:not(:first-child){ + content: "😂👌" + } + @keyframes important1 { + from { + margin-top: 50px; + margin-bottom: 100px + } + 50% { margin-top: 150px !important; } /* Ignored */ + to { margin-top: 100px; } + } + } + """ + expect(lines[0][0]).toEqual value: '@', scopes: ['source.css', 'meta.at-rule.supports.header.css', 'keyword.control.at-rule.supports.css', 'punctuation.definition.keyword.css'] + expect(lines[0][1]).toEqual value: 'supports', scopes: ['source.css', 'meta.at-rule.supports.header.css', 'keyword.control.at-rule.supports.css'] + expect(lines[0][3]).toEqual value: '(', scopes: ['source.css', 'meta.at-rule.supports.header.css', 'meta.feature-query.css', 'punctuation.definition.condition.begin.bracket.round.css'] + expect(lines[0][4]).toEqual value: 'animation-name', scopes: ['source.css', 'meta.at-rule.supports.header.css', 'meta.feature-query.css', 'meta.property-name.css', 'support.type.property-name.css'] + expect(lines[0][5]).toEqual value: ':', scopes: ['source.css', 'meta.at-rule.supports.header.css', 'meta.feature-query.css', 'punctuation.separator.key-value.css'] + expect(lines[0][7]).toEqual value: 'test', scopes: ['source.css', 'meta.at-rule.supports.header.css', 'meta.feature-query.css', 'meta.property-value.css'] + expect(lines[0][8]).toEqual value: ')', scopes: ['source.css', 'meta.at-rule.supports.header.css', 'meta.feature-query.css', 'punctuation.definition.condition.end.bracket.round.css'] + expect(lines[0][10]).toEqual value: '{', scopes: ['source.css', 'meta.at-rule.supports.body.css', 'punctuation.section.supports.begin.bracket.curly.css'] + expect(lines[1][1]).toEqual value: '#', scopes: ['source.css', 'meta.at-rule.supports.body.css', 'meta.selector.css', 'entity.other.attribute-name.id.css', 'punctuation.definition.entity.css'] + expect(lines[1][2]).toEqual value: 'node', scopes: ['source.css', 'meta.at-rule.supports.body.css', 'meta.selector.css', 'entity.other.attribute-name.id.css'] + expect(lines[1][4]).toEqual value: '{', scopes: ['source.css', 'meta.at-rule.supports.body.css', 'meta.property-list.css', 'punctuation.section.property-list.begin.bracket.curly.css'] + expect(lines[2][1]).toEqual value: 'animation-name', scopes: ['source.css', 'meta.at-rule.supports.body.css', 'meta.property-list.css', 'meta.property-name.css', 'support.type.property-name.css'] + expect(lines[2][2]).toEqual value: ':', scopes: ['source.css', 'meta.at-rule.supports.body.css', 'meta.property-list.css', 'punctuation.separator.key-value.css'] + expect(lines[2][4]).toEqual value: 'test', scopes: ['source.css', 'meta.at-rule.supports.body.css', 'meta.property-list.css', 'meta.property-value.css'] + expect(lines[2][5]).toEqual value: ';', scopes: ['source.css', 'meta.at-rule.supports.body.css', 'meta.property-list.css', 'punctuation.terminator.rule.css'] + expect(lines[3][1]).toEqual value: '}', scopes: ['source.css', 'meta.at-rule.supports.body.css', 'meta.property-list.css', 'punctuation.section.property-list.end.bracket.curly.css'] + expect(lines[4][1]).toEqual value: 'body', scopes: ['source.css', 'meta.at-rule.supports.body.css', 'meta.selector.css', 'entity.name.tag.css'] + expect(lines[4][3]).toEqual value: '>', scopes: ['source.css', 'meta.at-rule.supports.body.css', 'meta.selector.css', 'keyword.operator.combinator.css'] + expect(lines[4][5]).toEqual value: 'header', scopes: ['source.css', 'meta.at-rule.supports.body.css', 'meta.selector.css', 'entity.name.tag.css'] + expect(lines[4][6]).toEqual value: '[', scopes: ['source.css', 'meta.at-rule.supports.body.css', 'meta.selector.css', 'meta.attribute-selector.css', 'punctuation.definition.entity.begin.bracket.square.css'] + expect(lines[4][7]).toEqual value: 'data-name', scopes: ['source.css', 'meta.at-rule.supports.body.css', 'meta.selector.css', 'meta.attribute-selector.css', 'entity.other.attribute-name.css'] + expect(lines[4][8]).toEqual value: '=', scopes: ['source.css', 'meta.at-rule.supports.body.css', 'meta.selector.css', 'meta.attribute-selector.css', 'keyword.operator.pattern.css'] + expect(lines[4][9]).toEqual value: '"', scopes: ['source.css', 'meta.at-rule.supports.body.css', 'meta.selector.css', 'meta.attribute-selector.css', 'string.quoted.double.css', 'punctuation.definition.string.begin.css'] + expect(lines[4][10]).toEqual value: 'attr', scopes: ['source.css', 'meta.at-rule.supports.body.css', 'meta.selector.css', 'meta.attribute-selector.css', 'string.quoted.double.css'] + expect(lines[4][11]).toEqual value: '"', scopes: ['source.css', 'meta.at-rule.supports.body.css', 'meta.selector.css', 'meta.attribute-selector.css', 'string.quoted.double.css', 'punctuation.definition.string.end.css'] + expect(lines[4][12]).toEqual value: ']', scopes: ['source.css', 'meta.at-rule.supports.body.css', 'meta.selector.css', 'meta.attribute-selector.css', 'punctuation.definition.entity.end.bracket.square.css'] + expect(lines[4][14]).toEqual value: '~', scopes: ['source.css', 'meta.at-rule.supports.body.css', 'meta.selector.css', 'keyword.operator.combinator.css'] + expect(lines[4][16]).toEqual value: '*', scopes: ['source.css', 'meta.at-rule.supports.body.css', 'meta.selector.css', 'entity.name.tag.wildcard.css'] + expect(lines[4][17]).toEqual value: ':', scopes: ['source.css', 'meta.at-rule.supports.body.css', 'meta.selector.css', 'entity.other.attribute-name.pseudo-class.css', 'punctuation.definition.entity.css'] + expect(lines[4][18]).toEqual value: 'not', scopes: ['source.css', 'meta.at-rule.supports.body.css', 'meta.selector.css', 'entity.other.attribute-name.pseudo-class.css'] + expect(lines[4][19]).toEqual value: '(', scopes: ['source.css', 'meta.at-rule.supports.body.css', 'meta.selector.css', 'punctuation.section.function.begin.bracket.round.css'] + expect(lines[4][20]).toEqual value: ':', scopes: ['source.css', 'meta.at-rule.supports.body.css', 'meta.selector.css', 'entity.other.attribute-name.pseudo-class.css', 'punctuation.definition.entity.css'] + expect(lines[4][21]).toEqual value: 'first-child', scopes: ['source.css', 'meta.at-rule.supports.body.css', 'meta.selector.css', 'entity.other.attribute-name.pseudo-class.css'] + expect(lines[4][22]).toEqual value: ')', scopes: ['source.css', 'meta.at-rule.supports.body.css', 'meta.selector.css', 'punctuation.section.function.end.bracket.round.css'] + expect(lines[4][23]).toEqual value: '{', scopes: ['source.css', 'meta.at-rule.supports.body.css', 'meta.property-list.css', 'punctuation.section.property-list.begin.bracket.curly.css'] + expect(lines[5][1]).toEqual value: 'content', scopes: ['source.css', 'meta.at-rule.supports.body.css', 'meta.property-list.css', 'meta.property-name.css', 'support.type.property-name.css'] + expect(lines[5][2]).toEqual value: ':', scopes: ['source.css', 'meta.at-rule.supports.body.css', 'meta.property-list.css', 'punctuation.separator.key-value.css'] + expect(lines[5][4]).toEqual value: '"', scopes: ['source.css', 'meta.at-rule.supports.body.css', 'meta.property-list.css', 'meta.property-value.css', 'string.quoted.double.css', 'punctuation.definition.string.begin.css'] + expect(lines[5][5]).toEqual value: '😂👌', scopes: ['source.css', 'meta.at-rule.supports.body.css', 'meta.property-list.css', 'meta.property-value.css', 'string.quoted.double.css'] + expect(lines[5][6]).toEqual value: '"', scopes: ['source.css', 'meta.at-rule.supports.body.css', 'meta.property-list.css', 'meta.property-value.css', 'string.quoted.double.css', 'punctuation.definition.string.end.css'] + expect(lines[6][1]).toEqual value: '}', scopes: ['source.css', 'meta.at-rule.supports.body.css', 'meta.property-list.css', 'punctuation.section.property-list.end.bracket.curly.css'] + expect(lines[7][1]).toEqual value: '@', scopes: ['source.css', 'meta.at-rule.supports.body.css', 'meta.at-rule.keyframes.header.css', 'keyword.control.at-rule.keyframes.css', 'punctuation.definition.keyword.css'] + expect(lines[7][2]).toEqual value: 'keyframes', scopes: ['source.css', 'meta.at-rule.supports.body.css', 'meta.at-rule.keyframes.header.css', 'keyword.control.at-rule.keyframes.css'] + expect(lines[7][4]).toEqual value: 'important1', scopes: ['source.css', 'meta.at-rule.supports.body.css', 'meta.at-rule.keyframes.header.css', 'variable.parameter.keyframe-list.css'] + expect(lines[7][6]).toEqual value: '{', scopes: ['source.css', 'meta.at-rule.supports.body.css', 'meta.at-rule.keyframes.body.css', 'punctuation.section.keyframes.begin.bracket.curly.css'] + expect(lines[8][1]).toEqual value: 'from', scopes: ['source.css', 'meta.at-rule.supports.body.css', 'meta.at-rule.keyframes.body.css', 'entity.other.keyframe-offset.css'] + expect(lines[8][3]).toEqual value: '{', scopes: ['source.css', 'meta.at-rule.supports.body.css', 'meta.at-rule.keyframes.body.css', 'meta.property-list.css', 'punctuation.section.property-list.begin.bracket.curly.css'] + expect(lines[9][1]).toEqual value: 'margin-top', scopes: ['source.css', 'meta.at-rule.supports.body.css', 'meta.at-rule.keyframes.body.css', 'meta.property-list.css', 'meta.property-name.css', 'support.type.property-name.css'] + expect(lines[9][2]).toEqual value: ':', scopes: ['source.css', 'meta.at-rule.supports.body.css', 'meta.at-rule.keyframes.body.css', 'meta.property-list.css', 'punctuation.separator.key-value.css'] + expect(lines[9][4]).toEqual value: '50', scopes: ['source.css', 'meta.at-rule.supports.body.css', 'meta.at-rule.keyframes.body.css', 'meta.property-list.css', 'meta.property-value.css', 'constant.numeric.css'] + expect(lines[9][5]).toEqual value: 'px', scopes: ['source.css', 'meta.at-rule.supports.body.css', 'meta.at-rule.keyframes.body.css', 'meta.property-list.css', 'meta.property-value.css', 'constant.numeric.css', 'keyword.other.unit.px.css'] + expect(lines[9][6]).toEqual value: ';', scopes: ['source.css', 'meta.at-rule.supports.body.css', 'meta.at-rule.keyframes.body.css', 'meta.property-list.css', 'punctuation.terminator.rule.css'] + expect(lines[10][1]).toEqual value: 'margin-bottom', scopes: ['source.css', 'meta.at-rule.supports.body.css', 'meta.at-rule.keyframes.body.css', 'meta.property-list.css', 'meta.property-name.css', 'support.type.property-name.css'] + expect(lines[10][2]).toEqual value: ':', scopes: ['source.css', 'meta.at-rule.supports.body.css', 'meta.at-rule.keyframes.body.css', 'meta.property-list.css', 'punctuation.separator.key-value.css'] + expect(lines[10][4]).toEqual value: '100', scopes: ['source.css', 'meta.at-rule.supports.body.css', 'meta.at-rule.keyframes.body.css', 'meta.property-list.css', 'meta.property-value.css', 'constant.numeric.css'] + expect(lines[10][5]).toEqual value: 'px', scopes: ['source.css', 'meta.at-rule.supports.body.css', 'meta.at-rule.keyframes.body.css', 'meta.property-list.css', 'meta.property-value.css', 'constant.numeric.css', 'keyword.other.unit.px.css'] + expect(lines[11][1]).toEqual value: '}', scopes: ['source.css', 'meta.at-rule.supports.body.css', 'meta.at-rule.keyframes.body.css', 'meta.property-list.css', 'punctuation.section.property-list.end.bracket.curly.css'] + expect(lines[12][1]).toEqual value: '50%', scopes: ['source.css', 'meta.at-rule.supports.body.css', 'meta.at-rule.keyframes.body.css', 'entity.other.keyframe-offset.percentage.css'] + expect(lines[12][3]).toEqual value: '{', scopes: ['source.css', 'meta.at-rule.supports.body.css', 'meta.at-rule.keyframes.body.css', 'meta.property-list.css', 'punctuation.section.property-list.begin.bracket.curly.css'] + expect(lines[12][5]).toEqual value: 'margin-top', scopes: ['source.css', 'meta.at-rule.supports.body.css', 'meta.at-rule.keyframes.body.css', 'meta.property-list.css', 'meta.property-name.css', 'support.type.property-name.css'] + expect(lines[12][6]).toEqual value: ':', scopes: ['source.css', 'meta.at-rule.supports.body.css', 'meta.at-rule.keyframes.body.css', 'meta.property-list.css', 'punctuation.separator.key-value.css'] + expect(lines[12][8]).toEqual value: '150', scopes: ['source.css', 'meta.at-rule.supports.body.css', 'meta.at-rule.keyframes.body.css', 'meta.property-list.css', 'meta.property-value.css', 'constant.numeric.css'] + expect(lines[12][9]).toEqual value: 'px', scopes: ['source.css', 'meta.at-rule.supports.body.css', 'meta.at-rule.keyframes.body.css', 'meta.property-list.css', 'meta.property-value.css', 'constant.numeric.css', 'keyword.other.unit.px.css'] + expect(lines[12][11]).toEqual value: '!important', scopes: ['source.css', 'meta.at-rule.supports.body.css', 'meta.at-rule.keyframes.body.css', 'meta.property-list.css', 'meta.property-value.css', 'keyword.other.important.css'] + expect(lines[12][12]).toEqual value: ';', scopes: ['source.css', 'meta.at-rule.supports.body.css', 'meta.at-rule.keyframes.body.css', 'meta.property-list.css', 'punctuation.terminator.rule.css'] + expect(lines[12][14]).toEqual value: '}', scopes: ['source.css', 'meta.at-rule.supports.body.css', 'meta.at-rule.keyframes.body.css', 'meta.property-list.css', 'punctuation.section.property-list.end.bracket.curly.css'] + expect(lines[12][16]).toEqual value: '/*', scopes: ['source.css', 'meta.at-rule.supports.body.css', 'meta.at-rule.keyframes.body.css', 'comment.block.css', 'punctuation.definition.comment.begin.css'] + expect(lines[12][17]).toEqual value: ' Ignored ', scopes: ['source.css', 'meta.at-rule.supports.body.css', 'meta.at-rule.keyframes.body.css', 'comment.block.css'] + expect(lines[12][18]).toEqual value: '*/', scopes: ['source.css', 'meta.at-rule.supports.body.css', 'meta.at-rule.keyframes.body.css', 'comment.block.css', 'punctuation.definition.comment.end.css'] + expect(lines[13][1]).toEqual value: 'to', scopes: ['source.css', 'meta.at-rule.supports.body.css', 'meta.at-rule.keyframes.body.css', 'entity.other.keyframe-offset.css'] + expect(lines[13][3]).toEqual value: '{', scopes: ['source.css', 'meta.at-rule.supports.body.css', 'meta.at-rule.keyframes.body.css', 'meta.property-list.css', 'punctuation.section.property-list.begin.bracket.curly.css'] + expect(lines[13][5]).toEqual value: 'margin-top', scopes: ['source.css', 'meta.at-rule.supports.body.css', 'meta.at-rule.keyframes.body.css', 'meta.property-list.css', 'meta.property-name.css', 'support.type.property-name.css'] + expect(lines[13][6]).toEqual value: ':', scopes: ['source.css', 'meta.at-rule.supports.body.css', 'meta.at-rule.keyframes.body.css', 'meta.property-list.css', 'punctuation.separator.key-value.css'] + expect(lines[13][8]).toEqual value: '100', scopes: ['source.css', 'meta.at-rule.supports.body.css', 'meta.at-rule.keyframes.body.css', 'meta.property-list.css', 'meta.property-value.css', 'constant.numeric.css'] + expect(lines[13][9]).toEqual value: 'px', scopes: ['source.css', 'meta.at-rule.supports.body.css', 'meta.at-rule.keyframes.body.css', 'meta.property-list.css', 'meta.property-value.css', 'constant.numeric.css', 'keyword.other.unit.px.css'] + expect(lines[13][10]).toEqual value: ';', scopes: ['source.css', 'meta.at-rule.supports.body.css', 'meta.at-rule.keyframes.body.css', 'meta.property-list.css', 'punctuation.terminator.rule.css'] + expect(lines[13][12]).toEqual value: '}', scopes: ['source.css', 'meta.at-rule.supports.body.css', 'meta.at-rule.keyframes.body.css', 'meta.property-list.css', 'punctuation.section.property-list.end.bracket.curly.css'] + expect(lines[14][1]).toEqual value: '}', scopes: ['source.css', 'meta.at-rule.supports.body.css', 'meta.at-rule.keyframes.body.css', 'punctuation.section.keyframes.end.bracket.curly.css'] + expect(lines[15][0]).toEqual value: '}', scopes: ['source.css', 'meta.at-rule.supports.body.css', 'punctuation.section.supports.end.bracket.curly.css'] + + it 'matches injected comments', -> + # NB: This particular example actually isn't valid @supports + # syntax; it's just for stress-testing boundary-matching. + lines = grammar.tokenizeLines """ + @supports/*===*/not/*==****************| + ==*/(display:table-cell)/*============*/ and (display: list-item)/*}*/{} + """ + expect(lines[0][0]).toEqual value: '@', scopes: ['source.css', 'meta.at-rule.supports.header.css', 'keyword.control.at-rule.supports.css', 'punctuation.definition.keyword.css'] + expect(lines[0][1]).toEqual value: 'supports', scopes: ['source.css', 'meta.at-rule.supports.header.css', 'keyword.control.at-rule.supports.css'] + expect(lines[0][2]).toEqual value: '/*', scopes: ['source.css', 'meta.at-rule.supports.header.css', 'comment.block.css', 'punctuation.definition.comment.begin.css'] + expect(lines[0][3]).toEqual value: '===', scopes: ['source.css', 'meta.at-rule.supports.header.css', 'comment.block.css'] + expect(lines[0][4]).toEqual value: '*/', scopes: ['source.css', 'meta.at-rule.supports.header.css', 'comment.block.css', 'punctuation.definition.comment.end.css'] + expect(lines[0][5]).toEqual value: 'not', scopes: ['source.css', 'meta.at-rule.supports.header.css', 'keyword.operator.logical.feature.not.css'] + expect(lines[0][6]).toEqual value: '/*', scopes: ['source.css', 'meta.at-rule.supports.header.css', 'comment.block.css', 'punctuation.definition.comment.begin.css'] + expect(lines[0][7]).toEqual value: '==****************|', scopes: ['source.css', 'meta.at-rule.supports.header.css', 'comment.block.css'] + expect(lines[1][0]).toEqual value: '==', scopes: ['source.css', 'meta.at-rule.supports.header.css', 'comment.block.css'] + expect(lines[1][1]).toEqual value: '*/', scopes: ['source.css', 'meta.at-rule.supports.header.css', 'comment.block.css', 'punctuation.definition.comment.end.css'] + expect(lines[1][2]).toEqual value: '(', scopes: ['source.css', 'meta.at-rule.supports.header.css', 'meta.feature-query.css', 'punctuation.definition.condition.begin.bracket.round.css'] + expect(lines[1][3]).toEqual value: 'display', scopes: ['source.css', 'meta.at-rule.supports.header.css', 'meta.feature-query.css', 'meta.property-name.css', 'support.type.property-name.css'] + expect(lines[1][4]).toEqual value: ':', scopes: ['source.css', 'meta.at-rule.supports.header.css', 'meta.feature-query.css', 'punctuation.separator.key-value.css'] + expect(lines[1][5]).toEqual value: 'table-cell', scopes: ['source.css', 'meta.at-rule.supports.header.css', 'meta.feature-query.css', 'meta.property-value.css', 'support.constant.property-value.css'] + expect(lines[1][6]).toEqual value: ')', scopes: ['source.css', 'meta.at-rule.supports.header.css', 'meta.feature-query.css', 'punctuation.definition.condition.end.bracket.round.css'] + expect(lines[1][7]).toEqual value: '/*', scopes: ['source.css', 'meta.at-rule.supports.header.css', 'comment.block.css', 'punctuation.definition.comment.begin.css'] + expect(lines[1][8]).toEqual value: '============', scopes: ['source.css', 'meta.at-rule.supports.header.css', 'comment.block.css'] + expect(lines[1][9]).toEqual value: '*/', scopes: ['source.css', 'meta.at-rule.supports.header.css', 'comment.block.css', 'punctuation.definition.comment.end.css'] + expect(lines[1][11]).toEqual value: 'and', scopes: ['source.css', 'meta.at-rule.supports.header.css', 'keyword.operator.logical.feature.and.css'] + expect(lines[1][13]).toEqual value: '(', scopes: ['source.css', 'meta.at-rule.supports.header.css', 'meta.feature-query.css', 'punctuation.definition.condition.begin.bracket.round.css'] + expect(lines[1][19]).toEqual value: '/*', scopes: ['source.css', 'meta.at-rule.supports.header.css', 'comment.block.css', 'punctuation.definition.comment.begin.css'] + expect(lines[1][20]).toEqual value: '}', scopes: ['source.css', 'meta.at-rule.supports.header.css', 'comment.block.css'] + expect(lines[1][21]).toEqual value: '*/', scopes: ['source.css', 'meta.at-rule.supports.header.css', 'comment.block.css', 'punctuation.definition.comment.end.css'] + expect(lines[1][22]).toEqual value: '{', scopes: ['source.css', 'meta.at-rule.supports.body.css', 'punctuation.section.supports.begin.bracket.curly.css'] + expect(lines[1][23]).toEqual value: '}', scopes: ['source.css', 'meta.at-rule.supports.body.css', 'punctuation.section.supports.end.bracket.curly.css'] + + it 'matches feature queries across multiple lines', -> + lines = grammar.tokenizeLines """ + @supports + (box-shadow: 0 0 2px rgba(0,0,0,.5) inset) or + (-moz-box-shadow: 0 0 2px black inset) or + (-webkit-box-shadow: 0 0 2px black inset) or + (-o-box-shadow: 0 0 2px black inset) + { .noticebox { } } + """ + expect(lines[0][0]).toEqual value: '@', scopes: ['source.css', 'meta.at-rule.supports.header.css', 'keyword.control.at-rule.supports.css', 'punctuation.definition.keyword.css'] + expect(lines[0][1]).toEqual value: 'supports', scopes: ['source.css', 'meta.at-rule.supports.header.css', 'keyword.control.at-rule.supports.css'] + expect(lines[1][1]).toEqual value: '(', scopes: ['source.css', 'meta.at-rule.supports.header.css', 'meta.feature-query.css', 'punctuation.definition.condition.begin.bracket.round.css'] + expect(lines[1][2]).toEqual value: 'box-shadow', scopes: ['source.css', 'meta.at-rule.supports.header.css', 'meta.feature-query.css', 'meta.property-name.css', 'support.type.property-name.css'] + expect(lines[1][3]).toEqual value: ':', scopes: ['source.css', 'meta.at-rule.supports.header.css', 'meta.feature-query.css', 'punctuation.separator.key-value.css'] + expect(lines[1][5]).toEqual value: '0', scopes: ['source.css', 'meta.at-rule.supports.header.css', 'meta.feature-query.css', 'meta.property-value.css', 'constant.numeric.css'] + expect(lines[1][7]).toEqual value: '0', scopes: ['source.css', 'meta.at-rule.supports.header.css', 'meta.feature-query.css', 'meta.property-value.css', 'constant.numeric.css'] + expect(lines[1][9]).toEqual value: '2', scopes: ['source.css', 'meta.at-rule.supports.header.css', 'meta.feature-query.css', 'meta.property-value.css', 'constant.numeric.css'] + expect(lines[1][10]).toEqual value: 'px', scopes: ['source.css', 'meta.at-rule.supports.header.css', 'meta.feature-query.css', 'meta.property-value.css', 'constant.numeric.css', 'keyword.other.unit.px.css'] + expect(lines[1][12]).toEqual value: 'rgba', scopes: ['source.css', 'meta.at-rule.supports.header.css', 'meta.feature-query.css', 'meta.property-value.css', 'meta.function.color.css', 'support.function.misc.css'] + expect(lines[1][13]).toEqual value: '(', scopes: ['source.css', 'meta.at-rule.supports.header.css', 'meta.feature-query.css', 'meta.property-value.css', 'meta.function.color.css', 'punctuation.section.function.begin.bracket.round.css'] + expect(lines[1][14]).toEqual value: '0', scopes: ['source.css', 'meta.at-rule.supports.header.css', 'meta.feature-query.css', 'meta.property-value.css', 'meta.function.color.css', 'constant.numeric.css'] + expect(lines[1][15]).toEqual value: ',', scopes: ['source.css', 'meta.at-rule.supports.header.css', 'meta.feature-query.css', 'meta.property-value.css', 'meta.function.color.css', 'punctuation.separator.list.comma.css'] + expect(lines[1][16]).toEqual value: '0', scopes: ['source.css', 'meta.at-rule.supports.header.css', 'meta.feature-query.css', 'meta.property-value.css', 'meta.function.color.css', 'constant.numeric.css'] + expect(lines[1][17]).toEqual value: ',', scopes: ['source.css', 'meta.at-rule.supports.header.css', 'meta.feature-query.css', 'meta.property-value.css', 'meta.function.color.css', 'punctuation.separator.list.comma.css'] + expect(lines[1][18]).toEqual value: '0', scopes: ['source.css', 'meta.at-rule.supports.header.css', 'meta.feature-query.css', 'meta.property-value.css', 'meta.function.color.css', 'constant.numeric.css'] + expect(lines[1][19]).toEqual value: ',', scopes: ['source.css', 'meta.at-rule.supports.header.css', 'meta.feature-query.css', 'meta.property-value.css', 'meta.function.color.css', 'punctuation.separator.list.comma.css'] + expect(lines[1][20]).toEqual value: '.5', scopes: ['source.css', 'meta.at-rule.supports.header.css', 'meta.feature-query.css', 'meta.property-value.css', 'meta.function.color.css', 'constant.numeric.css'] + expect(lines[1][21]).toEqual value: ')', scopes: ['source.css', 'meta.at-rule.supports.header.css', 'meta.feature-query.css', 'meta.property-value.css', 'meta.function.color.css', 'punctuation.section.function.end.bracket.round.css'] + expect(lines[1][23]).toEqual value: 'inset', scopes: ['source.css', 'meta.at-rule.supports.header.css', 'meta.feature-query.css', 'meta.property-value.css', 'support.constant.property-value.css'] + expect(lines[1][24]).toEqual value: ')', scopes: ['source.css', 'meta.at-rule.supports.header.css', 'meta.feature-query.css', 'punctuation.definition.condition.end.bracket.round.css'] + expect(lines[1][26]).toEqual value: 'or', scopes: ['source.css', 'meta.at-rule.supports.header.css', 'keyword.operator.logical.feature.or.css'] + expect(lines[2][1]).toEqual value: '(', scopes: ['source.css', 'meta.at-rule.supports.header.css', 'meta.feature-query.css', 'punctuation.definition.condition.begin.bracket.round.css'] + expect(lines[2][2]).toEqual value: '-moz-box-shadow', scopes: ['source.css', 'meta.at-rule.supports.header.css', 'meta.feature-query.css', 'meta.property-name.css', 'support.type.vendored.property-name.css'] + expect(lines[2][3]).toEqual value: ':', scopes: ['source.css', 'meta.at-rule.supports.header.css', 'meta.feature-query.css', 'punctuation.separator.key-value.css'] + expect(lines[2][5]).toEqual value: '0', scopes: ['source.css', 'meta.at-rule.supports.header.css', 'meta.feature-query.css', 'meta.property-value.css', 'constant.numeric.css'] + expect(lines[2][7]).toEqual value: '0', scopes: ['source.css', 'meta.at-rule.supports.header.css', 'meta.feature-query.css', 'meta.property-value.css', 'constant.numeric.css'] + expect(lines[2][9]).toEqual value: '2', scopes: ['source.css', 'meta.at-rule.supports.header.css', 'meta.feature-query.css', 'meta.property-value.css', 'constant.numeric.css'] + expect(lines[2][10]).toEqual value: 'px', scopes: ['source.css', 'meta.at-rule.supports.header.css', 'meta.feature-query.css', 'meta.property-value.css', 'constant.numeric.css', 'keyword.other.unit.px.css'] + expect(lines[2][12]).toEqual value: 'black', scopes: ['source.css', 'meta.at-rule.supports.header.css', 'meta.feature-query.css', 'meta.property-value.css', 'support.constant.color.w3c-standard-color-name.css'] + expect(lines[2][14]).toEqual value: 'inset', scopes: ['source.css', 'meta.at-rule.supports.header.css', 'meta.feature-query.css', 'meta.property-value.css', 'support.constant.property-value.css'] + expect(lines[2][15]).toEqual value: ')', scopes: ['source.css', 'meta.at-rule.supports.header.css', 'meta.feature-query.css', 'punctuation.definition.condition.end.bracket.round.css'] + expect(lines[2][17]).toEqual value: 'or', scopes: ['source.css', 'meta.at-rule.supports.header.css', 'keyword.operator.logical.feature.or.css'] + expect(lines[3][1]).toEqual value: '(', scopes: ['source.css', 'meta.at-rule.supports.header.css', 'meta.feature-query.css', 'punctuation.definition.condition.begin.bracket.round.css'] + expect(lines[3][2]).toEqual value: '-webkit-box-shadow', scopes: ['source.css', 'meta.at-rule.supports.header.css', 'meta.feature-query.css', 'meta.property-name.css', 'support.type.vendored.property-name.css'] + expect(lines[3][3]).toEqual value: ':', scopes: ['source.css', 'meta.at-rule.supports.header.css', 'meta.feature-query.css', 'punctuation.separator.key-value.css'] + expect(lines[3][5]).toEqual value: '0', scopes: ['source.css', 'meta.at-rule.supports.header.css', 'meta.feature-query.css', 'meta.property-value.css', 'constant.numeric.css'] + expect(lines[3][7]).toEqual value: '0', scopes: ['source.css', 'meta.at-rule.supports.header.css', 'meta.feature-query.css', 'meta.property-value.css', 'constant.numeric.css'] + expect(lines[3][9]).toEqual value: '2', scopes: ['source.css', 'meta.at-rule.supports.header.css', 'meta.feature-query.css', 'meta.property-value.css', 'constant.numeric.css'] + expect(lines[3][10]).toEqual value: 'px', scopes: ['source.css', 'meta.at-rule.supports.header.css', 'meta.feature-query.css', 'meta.property-value.css', 'constant.numeric.css', 'keyword.other.unit.px.css'] + expect(lines[3][12]).toEqual value: 'black', scopes: ['source.css', 'meta.at-rule.supports.header.css', 'meta.feature-query.css', 'meta.property-value.css', 'support.constant.color.w3c-standard-color-name.css'] + expect(lines[3][14]).toEqual value: 'inset', scopes: ['source.css', 'meta.at-rule.supports.header.css', 'meta.feature-query.css', 'meta.property-value.css', 'support.constant.property-value.css'] + expect(lines[3][15]).toEqual value: ')', scopes: ['source.css', 'meta.at-rule.supports.header.css', 'meta.feature-query.css', 'punctuation.definition.condition.end.bracket.round.css'] + expect(lines[3][17]).toEqual value: 'or', scopes: ['source.css', 'meta.at-rule.supports.header.css', 'keyword.operator.logical.feature.or.css'] + expect(lines[4][1]).toEqual value: '(', scopes: ['source.css', 'meta.at-rule.supports.header.css', 'meta.feature-query.css', 'punctuation.definition.condition.begin.bracket.round.css'] + expect(lines[4][2]).toEqual value: '-o-box-shadow', scopes: ['source.css', 'meta.at-rule.supports.header.css', 'meta.feature-query.css', 'meta.property-name.css', 'support.type.vendored.property-name.css'] + expect(lines[4][3]).toEqual value: ':', scopes: ['source.css', 'meta.at-rule.supports.header.css', 'meta.feature-query.css', 'punctuation.separator.key-value.css'] + expect(lines[4][5]).toEqual value: '0', scopes: ['source.css', 'meta.at-rule.supports.header.css', 'meta.feature-query.css', 'meta.property-value.css', 'constant.numeric.css'] + expect(lines[4][7]).toEqual value: '0', scopes: ['source.css', 'meta.at-rule.supports.header.css', 'meta.feature-query.css', 'meta.property-value.css', 'constant.numeric.css'] + expect(lines[4][9]).toEqual value: '2', scopes: ['source.css', 'meta.at-rule.supports.header.css', 'meta.feature-query.css', 'meta.property-value.css', 'constant.numeric.css'] + expect(lines[4][10]).toEqual value: 'px', scopes: ['source.css', 'meta.at-rule.supports.header.css', 'meta.feature-query.css', 'meta.property-value.css', 'constant.numeric.css', 'keyword.other.unit.px.css'] + expect(lines[4][12]).toEqual value: 'black', scopes: ['source.css', 'meta.at-rule.supports.header.css', 'meta.feature-query.css', 'meta.property-value.css', 'support.constant.color.w3c-standard-color-name.css'] + expect(lines[4][14]).toEqual value: 'inset', scopes: ['source.css', 'meta.at-rule.supports.header.css', 'meta.feature-query.css', 'meta.property-value.css', 'support.constant.property-value.css'] + expect(lines[4][15]).toEqual value: ')', scopes: ['source.css', 'meta.at-rule.supports.header.css', 'meta.feature-query.css', 'punctuation.definition.condition.end.bracket.round.css'] + expect(lines[5][0]).toEqual value: '{', scopes: ['source.css', 'meta.at-rule.supports.body.css', 'punctuation.section.supports.begin.bracket.curly.css'] + expect(lines[5][2]).toEqual value: '.', scopes: ['source.css', 'meta.at-rule.supports.body.css', 'meta.selector.css', 'entity.other.attribute-name.class.css', 'punctuation.definition.entity.css'] + expect(lines[5][3]).toEqual value: 'noticebox', scopes: ['source.css', 'meta.at-rule.supports.body.css', 'meta.selector.css', 'entity.other.attribute-name.class.css'] + expect(lines[5][5]).toEqual value: '{', scopes: ['source.css', 'meta.at-rule.supports.body.css', 'meta.property-list.css', 'punctuation.section.property-list.begin.bracket.curly.css'] + expect(lines[5][7]).toEqual value: '}', scopes: ['source.css', 'meta.at-rule.supports.body.css', 'meta.property-list.css', 'punctuation.section.property-list.end.bracket.curly.css'] + expect(lines[5][9]).toEqual value: '}', scopes: ['source.css', 'meta.at-rule.supports.body.css', 'punctuation.section.supports.end.bracket.curly.css'] + + describe '@namespace', -> + it 'tokenises @namespace statements correctly', -> + {tokens} = grammar.tokenizeLine('@namespace "XML";') + expect(tokens[0]).toEqual value: '@', scopes: ['source.css', 'meta.at-rule.namespace.css', 'keyword.control.at-rule.namespace.css', 'punctuation.definition.keyword.css'] + expect(tokens[1]).toEqual value: 'namespace', scopes: ['source.css', 'meta.at-rule.namespace.css', 'keyword.control.at-rule.namespace.css'] + expect(tokens[2]).toEqual value: ' ', scopes: ['source.css', 'meta.at-rule.namespace.css'] + expect(tokens[3]).toEqual value: '"', scopes: ['source.css', 'meta.at-rule.namespace.css', 'string.quoted.double.css', 'punctuation.definition.string.begin.css'] + expect(tokens[4]).toEqual value: 'XML', scopes: ['source.css', 'meta.at-rule.namespace.css', 'string.quoted.double.css'] + expect(tokens[5]).toEqual value: '"', scopes: ['source.css', 'meta.at-rule.namespace.css', 'string.quoted.double.css', 'punctuation.definition.string.end.css'] + expect(tokens[6]).toEqual value: ';', scopes: ['source.css', 'meta.at-rule.namespace.css', 'punctuation.terminator.rule.css'] + + {tokens} = grammar.tokenizeLine('@namespace prefix "XML" ;') + expect(tokens[0]).toEqual value: '@', scopes: ['source.css', 'meta.at-rule.namespace.css', 'keyword.control.at-rule.namespace.css', 'punctuation.definition.keyword.css'] + expect(tokens[1]).toEqual value: 'namespace', scopes: ['source.css', 'meta.at-rule.namespace.css', 'keyword.control.at-rule.namespace.css'] + expect(tokens[2]).toEqual value: ' ', scopes: ['source.css', 'meta.at-rule.namespace.css'] + expect(tokens[3]).toEqual value: 'prefix', scopes: ['source.css', 'meta.at-rule.namespace.css', 'entity.name.function.namespace-prefix.css'] + expect(tokens[4]).toEqual value: ' ', scopes: ['source.css', 'meta.at-rule.namespace.css'] + expect(tokens[5]).toEqual value: '"', scopes: ['source.css', 'meta.at-rule.namespace.css', 'string.quoted.double.css', 'punctuation.definition.string.begin.css'] + expect(tokens[6]).toEqual value: 'XML', scopes: ['source.css', 'meta.at-rule.namespace.css', 'string.quoted.double.css'] + expect(tokens[7]).toEqual value: '"', scopes: ['source.css', 'meta.at-rule.namespace.css', 'string.quoted.double.css', 'punctuation.definition.string.end.css'] + expect(tokens[8]).toEqual value: ' ', scopes: ['source.css', 'meta.at-rule.namespace.css'] + expect(tokens[9]).toEqual value: ';', scopes: ['source.css', 'meta.at-rule.namespace.css', 'punctuation.terminator.rule.css'] + + {tokens} = grammar.tokenizeLine('@namespace url("http://a.bc/");') + expect(tokens[0]).toEqual value: '@', scopes: ['source.css', 'meta.at-rule.namespace.css', 'keyword.control.at-rule.namespace.css', 'punctuation.definition.keyword.css'] + expect(tokens[1]).toEqual value: 'namespace', scopes: ['source.css', 'meta.at-rule.namespace.css', 'keyword.control.at-rule.namespace.css'] + expect(tokens[2]).toEqual value: ' ', scopes: ['source.css', 'meta.at-rule.namespace.css'] + expect(tokens[3]).toEqual value: 'url', scopes: ['source.css', 'meta.at-rule.namespace.css', 'meta.function.url.css', 'support.function.url.css'] + expect(tokens[4]).toEqual value: '(', scopes: ['source.css', 'meta.at-rule.namespace.css', 'meta.function.url.css', 'punctuation.section.function.begin.bracket.round.css'] + expect(tokens[5]).toEqual value: '"', scopes: ['source.css', 'meta.at-rule.namespace.css', 'meta.function.url.css', 'string.quoted.double.css', 'punctuation.definition.string.begin.css'] + expect(tokens[6]).toEqual value: 'http://a.bc/', scopes: ['source.css', 'meta.at-rule.namespace.css', 'meta.function.url.css', 'string.quoted.double.css'] + expect(tokens[7]).toEqual value: '"', scopes: ['source.css', 'meta.at-rule.namespace.css', 'meta.function.url.css', 'string.quoted.double.css', 'punctuation.definition.string.end.css'] + expect(tokens[8]).toEqual value: ')', scopes: ['source.css', 'meta.at-rule.namespace.css', 'meta.function.url.css', 'punctuation.section.function.end.bracket.round.css'] + expect(tokens[9]).toEqual value: ';', scopes: ['source.css', 'meta.at-rule.namespace.css', 'punctuation.terminator.rule.css'] + + it "doesn't confuse a prefix of 'url' as a function", -> + {tokens} = grammar.tokenizeLine('@namespace url url("http://a.bc/");') + expect(tokens[0]).toEqual value: '@', scopes: ['source.css', 'meta.at-rule.namespace.css', 'keyword.control.at-rule.namespace.css', 'punctuation.definition.keyword.css'] + expect(tokens[1]).toEqual value: 'namespace', scopes: ['source.css', 'meta.at-rule.namespace.css', 'keyword.control.at-rule.namespace.css'] + expect(tokens[3]).toEqual value: 'url', scopes: ['source.css', 'meta.at-rule.namespace.css', 'entity.name.function.namespace-prefix.css'] + expect(tokens[5]).toEqual value: 'url', scopes: ['source.css', 'meta.at-rule.namespace.css', 'meta.function.url.css', 'support.function.url.css'] + expect(tokens[6]).toEqual value: '(', scopes: ['source.css', 'meta.at-rule.namespace.css', 'meta.function.url.css', 'punctuation.section.function.begin.bracket.round.css'] + expect(tokens[7]).toEqual value: '"', scopes: ['source.css', 'meta.at-rule.namespace.css', 'meta.function.url.css', 'string.quoted.double.css', 'punctuation.definition.string.begin.css'] + expect(tokens[8]).toEqual value: 'http://a.bc/', scopes: ['source.css', 'meta.at-rule.namespace.css', 'meta.function.url.css', 'string.quoted.double.css'] + expect(tokens[9]).toEqual value: '"', scopes: ['source.css', 'meta.at-rule.namespace.css', 'meta.function.url.css', 'string.quoted.double.css', 'punctuation.definition.string.end.css'] + expect(tokens[10]).toEqual value: ')', scopes: ['source.css', 'meta.at-rule.namespace.css', 'meta.function.url.css', 'punctuation.section.function.end.bracket.round.css'] + expect(tokens[11]).toEqual value: ';', scopes: ['source.css', 'meta.at-rule.namespace.css', 'punctuation.terminator.rule.css'] + + it 'permits injected comments between tokens', -> + {tokens} = grammar.tokenizeLine('@namespace/*=*/pre/*=*/"url"/*=*/;') + expect(tokens[0]).toEqual value: '@', scopes: ['source.css', 'meta.at-rule.namespace.css', 'keyword.control.at-rule.namespace.css', 'punctuation.definition.keyword.css'] + expect(tokens[1]).toEqual value: 'namespace', scopes: ['source.css', 'meta.at-rule.namespace.css', 'keyword.control.at-rule.namespace.css'] + expect(tokens[2]).toEqual value: '/*', scopes: ['source.css', 'meta.at-rule.namespace.css', 'comment.block.css', 'punctuation.definition.comment.begin.css'] + expect(tokens[3]).toEqual value: '=', scopes: ['source.css', 'meta.at-rule.namespace.css', 'comment.block.css'] + expect(tokens[4]).toEqual value: '*/', scopes: ['source.css', 'meta.at-rule.namespace.css', 'comment.block.css', 'punctuation.definition.comment.end.css'] + expect(tokens[5]).toEqual value: 'pre', scopes: ['source.css', 'meta.at-rule.namespace.css', 'entity.name.function.namespace-prefix.css'] + expect(tokens[6]).toEqual value: '/*', scopes: ['source.css', 'meta.at-rule.namespace.css', 'comment.block.css', 'punctuation.definition.comment.begin.css'] + expect(tokens[7]).toEqual value: '=', scopes: ['source.css', 'meta.at-rule.namespace.css', 'comment.block.css'] + expect(tokens[8]).toEqual value: '*/', scopes: ['source.css', 'meta.at-rule.namespace.css', 'comment.block.css', 'punctuation.definition.comment.end.css'] + expect(tokens[9]).toEqual value: '"', scopes: ['source.css', 'meta.at-rule.namespace.css', 'string.quoted.double.css', 'punctuation.definition.string.begin.css'] + expect(tokens[10]).toEqual value: 'url', scopes: ['source.css', 'meta.at-rule.namespace.css', 'string.quoted.double.css'] + expect(tokens[11]).toEqual value: '"', scopes: ['source.css', 'meta.at-rule.namespace.css', 'string.quoted.double.css', 'punctuation.definition.string.end.css'] + expect(tokens[12]).toEqual value: '/*', scopes: ['source.css', 'meta.at-rule.namespace.css', 'comment.block.css', 'punctuation.definition.comment.begin.css'] + expect(tokens[13]).toEqual value: '=', scopes: ['source.css', 'meta.at-rule.namespace.css', 'comment.block.css'] + expect(tokens[14]).toEqual value: '*/', scopes: ['source.css', 'meta.at-rule.namespace.css', 'comment.block.css', 'punctuation.definition.comment.end.css'] + expect(tokens[15]).toEqual value: ';', scopes: ['source.css', 'meta.at-rule.namespace.css', 'punctuation.terminator.rule.css'] + + it 'allows no spaces between "@namespace" and quoted URLs', -> + {tokens} = grammar.tokenizeLine('@namespace"XML";') + expect(tokens[0]).toEqual value: '@', scopes: ['source.css', 'meta.at-rule.namespace.css', 'keyword.control.at-rule.namespace.css', 'punctuation.definition.keyword.css'] + expect(tokens[1]).toEqual value: 'namespace', scopes: ['source.css', 'meta.at-rule.namespace.css', 'keyword.control.at-rule.namespace.css'] + expect(tokens[2]).toEqual value: '"', scopes: ['source.css', 'meta.at-rule.namespace.css', 'string.quoted.double.css', 'punctuation.definition.string.begin.css'] + expect(tokens[3]).toEqual value: 'XML', scopes: ['source.css', 'meta.at-rule.namespace.css', 'string.quoted.double.css'] + expect(tokens[4]).toEqual value: '"', scopes: ['source.css', 'meta.at-rule.namespace.css', 'string.quoted.double.css', 'punctuation.definition.string.end.css'] + expect(tokens[5]).toEqual value: ';', scopes: ['source.css', 'meta.at-rule.namespace.css', 'punctuation.terminator.rule.css'] + + it 'tokenises escape sequences in prefixes', -> + {tokens} = grammar.tokenizeLine('@namespace pre\\ fix "http://url/";') + expect(tokens[3]).toEqual value: 'pre', scopes: ['source.css', 'meta.at-rule.namespace.css', 'entity.name.function.namespace-prefix.css'] + expect(tokens[4]).toEqual value: '\\ ', scopes: ['source.css', 'meta.at-rule.namespace.css', 'entity.name.function.namespace-prefix.css', 'constant.character.escape.css'] + expect(tokens[5]).toEqual value: 'fix', scopes: ['source.css', 'meta.at-rule.namespace.css', 'entity.name.function.namespace-prefix.css'] + expect(tokens[7]).toEqual value: '"', scopes: ['source.css', 'meta.at-rule.namespace.css', 'string.quoted.double.css', 'punctuation.definition.string.begin.css'] + + it 'allows arguments to span multiple lines', -> + lines = grammar.tokenizeLines """ + @namespace + prefix"XML"; + """ + expect(lines[0][0]).toEqual value: '@', scopes: ['source.css', 'meta.at-rule.namespace.css', 'keyword.control.at-rule.namespace.css', 'punctuation.definition.keyword.css'] + expect(lines[0][1]).toEqual value: 'namespace', scopes: ['source.css', 'meta.at-rule.namespace.css', 'keyword.control.at-rule.namespace.css'] + expect(lines[1][0]).toEqual value: 'prefix', scopes: ['source.css', 'meta.at-rule.namespace.css', 'entity.name.function.namespace-prefix.css'] + expect(lines[1][1]).toEqual value: '"', scopes: ['source.css', 'meta.at-rule.namespace.css', 'string.quoted.double.css', 'punctuation.definition.string.begin.css'] + expect(lines[1][2]).toEqual value: 'XML', scopes: ['source.css', 'meta.at-rule.namespace.css', 'string.quoted.double.css'] + expect(lines[1][3]).toEqual value: '"', scopes: ['source.css', 'meta.at-rule.namespace.css', 'string.quoted.double.css', 'punctuation.definition.string.end.css'] + expect(lines[1][4]).toEqual value: ';', scopes: ['source.css', 'meta.at-rule.namespace.css', 'punctuation.terminator.rule.css'] + + lines = grammar.tokenizeLines """ + @namespace + + prefix + + url("http://a.bc/"); + """ + expect(lines[0][0]).toEqual value: '@', scopes: ['source.css', 'meta.at-rule.namespace.css', 'keyword.control.at-rule.namespace.css', 'punctuation.definition.keyword.css'] + expect(lines[0][1]).toEqual value: 'namespace', scopes: ['source.css', 'meta.at-rule.namespace.css', 'keyword.control.at-rule.namespace.css'] + expect(lines[2][1]).toEqual value: 'prefix', scopes: ['source.css', 'meta.at-rule.namespace.css', 'entity.name.function.namespace-prefix.css'] + expect(lines[4][0]).toEqual value: 'url', scopes: ['source.css', 'meta.at-rule.namespace.css', 'meta.function.url.css', 'support.function.url.css'] + expect(lines[4][1]).toEqual value: '(', scopes: ['source.css', 'meta.at-rule.namespace.css', 'meta.function.url.css', 'punctuation.section.function.begin.bracket.round.css'] + expect(lines[4][2]).toEqual value: '"', scopes: ['source.css', 'meta.at-rule.namespace.css', 'meta.function.url.css', 'string.quoted.double.css', 'punctuation.definition.string.begin.css'] + expect(lines[4][3]).toEqual value: 'http://a.bc/', scopes: ['source.css', 'meta.at-rule.namespace.css', 'meta.function.url.css', 'string.quoted.double.css'] + expect(lines[4][4]).toEqual value: '"', scopes: ['source.css', 'meta.at-rule.namespace.css', 'meta.function.url.css', 'string.quoted.double.css', 'punctuation.definition.string.end.css'] + expect(lines[4][5]).toEqual value: ')', scopes: ['source.css', 'meta.at-rule.namespace.css', 'meta.function.url.css', 'punctuation.section.function.end.bracket.round.css'] + expect(lines[4][6]).toEqual value: ';', scopes: ['source.css', 'meta.at-rule.namespace.css', 'punctuation.terminator.rule.css'] + + describe 'font-feature declarations', -> + it 'tokenises font-feature blocks', -> + {tokens} = grammar.tokenizeLine('@font-feature-values Font name 2 { }') + expect(tokens[0]).toEqual value: '@', scopes: ['source.css', 'meta.at-rule.font-features.css', 'keyword.control.at-rule.font-feature-values.css', 'punctuation.definition.keyword.css'] + expect(tokens[1]).toEqual value: 'font-feature-values', scopes: ['source.css', 'meta.at-rule.font-features.css', 'keyword.control.at-rule.font-feature-values.css'] + expect(tokens[2]).toEqual value: ' ', scopes: ['source.css', 'meta.at-rule.font-features.css'] + expect(tokens[3]).toEqual value: 'Font name 2', scopes: ['source.css', 'meta.at-rule.font-features.css', 'variable.parameter.font-name.css'] + expect(tokens[4]).toEqual value: ' ', scopes: ['source.css'] + expect(tokens[5]).toEqual value: '{', scopes: ['source.css', 'meta.property-list.css', 'punctuation.section.property-list.begin.bracket.curly.css'] + expect(tokens[7]).toEqual value: '}', scopes: ['source.css', 'meta.property-list.css', 'punctuation.section.property-list.end.bracket.curly.css'] + + it 'allows font-feature names to start on a different line', -> + lines = grammar.tokenizeLines """ + @font-feature-values + Font name 2 + { + """ + expect(lines[0][0]).toEqual value: '@', scopes: ['source.css', 'meta.at-rule.font-features.css', 'keyword.control.at-rule.font-feature-values.css', 'punctuation.definition.keyword.css'] + expect(lines[0][1]).toEqual value: 'font-feature-values', scopes: ['source.css', 'meta.at-rule.font-features.css', 'keyword.control.at-rule.font-feature-values.css'] + expect(lines[1][0]).toEqual value: 'Font name 2', scopes: ['source.css', 'meta.at-rule.font-features.css', 'variable.parameter.font-name.css'] + expect(lines[2][0]).toEqual value: '{', scopes: ['source.css', 'meta.property-list.css', 'punctuation.section.property-list.begin.bracket.curly.css'] + + it 'matches injected comments', -> + {tokens} = grammar.tokenizeLine('@font-feature-values/*{*/Font/*}*/name/*{*/2{') + expect(tokens[0]).toEqual value: '@', scopes: ['source.css', 'meta.at-rule.font-features.css', 'keyword.control.at-rule.font-feature-values.css', 'punctuation.definition.keyword.css'] + expect(tokens[1]).toEqual value: 'font-feature-values', scopes: ['source.css', 'meta.at-rule.font-features.css', 'keyword.control.at-rule.font-feature-values.css'] + expect(tokens[2]).toEqual value: '/*', scopes: ['source.css', 'meta.at-rule.font-features.css', 'variable.parameter.font-name.css', 'comment.block.css', 'punctuation.definition.comment.begin.css'] + expect(tokens[3]).toEqual value: '{', scopes: ['source.css', 'meta.at-rule.font-features.css', 'variable.parameter.font-name.css', 'comment.block.css'] + expect(tokens[4]).toEqual value: '*/', scopes: ['source.css', 'meta.at-rule.font-features.css', 'variable.parameter.font-name.css', 'comment.block.css', 'punctuation.definition.comment.end.css'] + expect(tokens[5]).toEqual value: 'Font', scopes: ['source.css', 'meta.at-rule.font-features.css', 'variable.parameter.font-name.css'] + expect(tokens[6]).toEqual value: '/*', scopes: ['source.css', 'meta.at-rule.font-features.css', 'variable.parameter.font-name.css', 'comment.block.css', 'punctuation.definition.comment.begin.css'] + expect(tokens[7]).toEqual value: '}', scopes: ['source.css', 'meta.at-rule.font-features.css', 'variable.parameter.font-name.css', 'comment.block.css'] + expect(tokens[8]).toEqual value: '*/', scopes: ['source.css', 'meta.at-rule.font-features.css', 'variable.parameter.font-name.css', 'comment.block.css', 'punctuation.definition.comment.end.css'] + expect(tokens[9]).toEqual value: 'name', scopes: ['source.css', 'meta.at-rule.font-features.css', 'variable.parameter.font-name.css'] + expect(tokens[10]).toEqual value: '/*', scopes: ['source.css', 'meta.at-rule.font-features.css', 'variable.parameter.font-name.css', 'comment.block.css', 'punctuation.definition.comment.begin.css'] + expect(tokens[11]).toEqual value: '{', scopes: ['source.css', 'meta.at-rule.font-features.css', 'variable.parameter.font-name.css', 'comment.block.css'] + expect(tokens[12]).toEqual value: '*/', scopes: ['source.css', 'meta.at-rule.font-features.css', 'variable.parameter.font-name.css', 'comment.block.css', 'punctuation.definition.comment.end.css'] + expect(tokens[13]).toEqual value: '2', scopes: ['source.css', 'meta.at-rule.font-features.css', 'variable.parameter.font-name.css'] + expect(tokens[14]).toEqual value: '{', scopes: ['source.css', 'meta.property-list.css', 'punctuation.section.property-list.begin.bracket.curly.css'] + + it 'tokenises at-rules for feature names', -> + lines = grammar.tokenizeLines """ + @swash{ swashy: 2; } + @ornaments{ ident: 2; } + @annotation{ ident: 1; } + @stylistic{ stylish: 2; } + @styleset{ sets: 2 3 4; } + @character-variant{ charvar: 2 } + """ + expect(lines[0][0]).toEqual value: '@', scopes: ['source.css', 'meta.at-rule.swash.css', 'keyword.control.at-rule.swash.css', 'punctuation.definition.keyword.css'] + expect(lines[0][1]).toEqual value: 'swash', scopes: ['source.css', 'meta.at-rule.swash.css', 'keyword.control.at-rule.swash.css'] + expect(lines[0][2]).toEqual value: '{', scopes: ['source.css', 'meta.at-rule.swash.css', 'meta.property-list.font-feature.css', 'punctuation.section.property-list.begin.bracket.curly.css'] + expect(lines[0][4]).toEqual value: 'swashy', scopes: ['source.css', 'meta.at-rule.swash.css', 'meta.property-list.font-feature.css', 'variable.font-feature.css'] + expect(lines[0][5]).toEqual value: ':', scopes: ['source.css', 'meta.at-rule.swash.css', 'meta.property-list.font-feature.css', 'punctuation.separator.key-value.css'] + expect(lines[0][7]).toEqual value: '2', scopes: ['source.css', 'meta.at-rule.swash.css', 'meta.property-list.font-feature.css', 'meta.property-value.css', 'constant.numeric.css'] + expect(lines[0][8]).toEqual value: ';', scopes: ['source.css', 'meta.at-rule.swash.css', 'meta.property-list.font-feature.css', 'punctuation.terminator.rule.css'] + expect(lines[0][10]).toEqual value: '}', scopes: ['source.css', 'meta.at-rule.swash.css', 'meta.property-list.font-feature.css', 'punctuation.section.property-list.end.bracket.curly.css'] + expect(lines[1][0]).toEqual value: '@', scopes: ['source.css', 'meta.at-rule.ornaments.css', 'keyword.control.at-rule.ornaments.css', 'punctuation.definition.keyword.css'] + expect(lines[1][1]).toEqual value: 'ornaments', scopes: ['source.css', 'meta.at-rule.ornaments.css', 'keyword.control.at-rule.ornaments.css'] + expect(lines[1][2]).toEqual value: '{', scopes: ['source.css', 'meta.at-rule.ornaments.css', 'meta.property-list.font-feature.css', 'punctuation.section.property-list.begin.bracket.curly.css'] + expect(lines[1][4]).toEqual value: 'ident', scopes: ['source.css', 'meta.at-rule.ornaments.css', 'meta.property-list.font-feature.css', 'variable.font-feature.css'] + expect(lines[1][5]).toEqual value: ':', scopes: ['source.css', 'meta.at-rule.ornaments.css', 'meta.property-list.font-feature.css', 'punctuation.separator.key-value.css'] + expect(lines[1][7]).toEqual value: '2', scopes: ['source.css', 'meta.at-rule.ornaments.css', 'meta.property-list.font-feature.css', 'meta.property-value.css', 'constant.numeric.css'] + expect(lines[1][8]).toEqual value: ';', scopes: ['source.css', 'meta.at-rule.ornaments.css', 'meta.property-list.font-feature.css', 'punctuation.terminator.rule.css'] + expect(lines[1][10]).toEqual value: '}', scopes: ['source.css', 'meta.at-rule.ornaments.css', 'meta.property-list.font-feature.css', 'punctuation.section.property-list.end.bracket.curly.css'] + expect(lines[2][0]).toEqual value: '@', scopes: ['source.css', 'meta.at-rule.annotation.css', 'keyword.control.at-rule.annotation.css', 'punctuation.definition.keyword.css'] + expect(lines[2][1]).toEqual value: 'annotation', scopes: ['source.css', 'meta.at-rule.annotation.css', 'keyword.control.at-rule.annotation.css'] + expect(lines[2][2]).toEqual value: '{', scopes: ['source.css', 'meta.at-rule.annotation.css', 'meta.property-list.font-feature.css', 'punctuation.section.property-list.begin.bracket.curly.css'] + expect(lines[2][4]).toEqual value: 'ident', scopes: ['source.css', 'meta.at-rule.annotation.css', 'meta.property-list.font-feature.css', 'variable.font-feature.css'] + expect(lines[2][5]).toEqual value: ':', scopes: ['source.css', 'meta.at-rule.annotation.css', 'meta.property-list.font-feature.css', 'punctuation.separator.key-value.css'] + expect(lines[2][7]).toEqual value: '1', scopes: ['source.css', 'meta.at-rule.annotation.css', 'meta.property-list.font-feature.css', 'meta.property-value.css', 'constant.numeric.css'] + expect(lines[2][8]).toEqual value: ';', scopes: ['source.css', 'meta.at-rule.annotation.css', 'meta.property-list.font-feature.css', 'punctuation.terminator.rule.css'] + expect(lines[2][10]).toEqual value: '}', scopes: ['source.css', 'meta.at-rule.annotation.css', 'meta.property-list.font-feature.css', 'punctuation.section.property-list.end.bracket.curly.css'] + expect(lines[3][0]).toEqual value: '@', scopes: ['source.css', 'meta.at-rule.stylistic.css', 'keyword.control.at-rule.stylistic.css', 'punctuation.definition.keyword.css'] + expect(lines[3][1]).toEqual value: 'stylistic', scopes: ['source.css', 'meta.at-rule.stylistic.css', 'keyword.control.at-rule.stylistic.css'] + expect(lines[3][2]).toEqual value: '{', scopes: ['source.css', 'meta.at-rule.stylistic.css', 'meta.property-list.font-feature.css', 'punctuation.section.property-list.begin.bracket.curly.css'] + expect(lines[3][4]).toEqual value: 'stylish', scopes: ['source.css', 'meta.at-rule.stylistic.css', 'meta.property-list.font-feature.css', 'variable.font-feature.css'] + expect(lines[3][5]).toEqual value: ':', scopes: ['source.css', 'meta.at-rule.stylistic.css', 'meta.property-list.font-feature.css', 'punctuation.separator.key-value.css'] + expect(lines[3][7]).toEqual value: '2', scopes: ['source.css', 'meta.at-rule.stylistic.css', 'meta.property-list.font-feature.css', 'meta.property-value.css', 'constant.numeric.css'] + expect(lines[3][8]).toEqual value: ';', scopes: ['source.css', 'meta.at-rule.stylistic.css', 'meta.property-list.font-feature.css', 'punctuation.terminator.rule.css'] + expect(lines[3][10]).toEqual value: '}', scopes: ['source.css', 'meta.at-rule.stylistic.css', 'meta.property-list.font-feature.css', 'punctuation.section.property-list.end.bracket.curly.css'] + expect(lines[4][0]).toEqual value: '@', scopes: ['source.css', 'meta.at-rule.styleset.css', 'keyword.control.at-rule.styleset.css', 'punctuation.definition.keyword.css'] + expect(lines[4][1]).toEqual value: 'styleset', scopes: ['source.css', 'meta.at-rule.styleset.css', 'keyword.control.at-rule.styleset.css'] + expect(lines[4][2]).toEqual value: '{', scopes: ['source.css', 'meta.at-rule.styleset.css', 'meta.property-list.font-feature.css', 'punctuation.section.property-list.begin.bracket.curly.css'] + expect(lines[4][4]).toEqual value: 'sets', scopes: ['source.css', 'meta.at-rule.styleset.css', 'meta.property-list.font-feature.css', 'variable.font-feature.css'] + expect(lines[4][5]).toEqual value: ':', scopes: ['source.css', 'meta.at-rule.styleset.css', 'meta.property-list.font-feature.css', 'punctuation.separator.key-value.css'] + expect(lines[4][7]).toEqual value: '2', scopes: ['source.css', 'meta.at-rule.styleset.css', 'meta.property-list.font-feature.css', 'meta.property-value.css', 'constant.numeric.css'] + expect(lines[4][9]).toEqual value: '3', scopes: ['source.css', 'meta.at-rule.styleset.css', 'meta.property-list.font-feature.css', 'meta.property-value.css', 'constant.numeric.css'] + expect(lines[4][11]).toEqual value: '4', scopes: ['source.css', 'meta.at-rule.styleset.css', 'meta.property-list.font-feature.css', 'meta.property-value.css', 'constant.numeric.css'] + expect(lines[4][12]).toEqual value: ';', scopes: ['source.css', 'meta.at-rule.styleset.css', 'meta.property-list.font-feature.css', 'punctuation.terminator.rule.css'] + expect(lines[4][14]).toEqual value: '}', scopes: ['source.css', 'meta.at-rule.styleset.css', 'meta.property-list.font-feature.css', 'punctuation.section.property-list.end.bracket.curly.css'] + expect(lines[5][0]).toEqual value: '@', scopes: ['source.css', 'meta.at-rule.character-variant.css', 'keyword.control.at-rule.character-variant.css', 'punctuation.definition.keyword.css'] + expect(lines[5][1]).toEqual value: 'character-variant', scopes: ['source.css', 'meta.at-rule.character-variant.css', 'keyword.control.at-rule.character-variant.css'] + expect(lines[5][2]).toEqual value: '{', scopes: ['source.css', 'meta.at-rule.character-variant.css', 'meta.property-list.font-feature.css', 'punctuation.section.property-list.begin.bracket.curly.css'] + expect(lines[5][4]).toEqual value: 'charvar', scopes: ['source.css', 'meta.at-rule.character-variant.css', 'meta.property-list.font-feature.css', 'variable.font-feature.css'] + expect(lines[5][5]).toEqual value: ':', scopes: ['source.css', 'meta.at-rule.character-variant.css', 'meta.property-list.font-feature.css', 'punctuation.separator.key-value.css'] + expect(lines[5][7]).toEqual value: '2', scopes: ['source.css', 'meta.at-rule.character-variant.css', 'meta.property-list.font-feature.css', 'meta.property-value.css', 'constant.numeric.css'] + expect(lines[5][9]).toEqual value: '}', scopes: ['source.css', 'meta.at-rule.character-variant.css', 'meta.property-list.font-feature.css', 'punctuation.section.property-list.end.bracket.curly.css'] + + it 'matches feature-name rules case-insensitively', -> + lines = grammar.tokenizeLines """ + @sWASH{ swashy: 2; } + @ornaMENts{ ident: 2; } + @anNOTatION{ ident: 1; } + @styLISTic{ stylish: 2; } + @STYLEset{ sets: 2 3 4; } + @CHARacter-VARiant{ charvar: 2 } + """ + expect(lines[0][1]).toEqual value: 'sWASH', scopes: ['source.css', 'meta.at-rule.swash.css', 'keyword.control.at-rule.swash.css'] + expect(lines[1][1]).toEqual value: 'ornaMENts', scopes: ['source.css', 'meta.at-rule.ornaments.css', 'keyword.control.at-rule.ornaments.css'] + expect(lines[2][1]).toEqual value: 'anNOTatION', scopes: ['source.css', 'meta.at-rule.annotation.css', 'keyword.control.at-rule.annotation.css'] + expect(lines[3][1]).toEqual value: 'styLISTic', scopes: ['source.css', 'meta.at-rule.stylistic.css', 'keyword.control.at-rule.stylistic.css'] + expect(lines[4][1]).toEqual value: 'STYLEset', scopes: ['source.css', 'meta.at-rule.styleset.css', 'keyword.control.at-rule.styleset.css'] + expect(lines[5][1]).toEqual value: 'CHARacter-VARiant', scopes: ['source.css', 'meta.at-rule.character-variant.css', 'keyword.control.at-rule.character-variant.css'] + + it 'matches comments inside feature-name rules', -> + lines = grammar.tokenizeLines """ + @font-feature-values Font name 2 { + @swash{/* + ========*/swashy:/**/2;/**/} + } + """ + expect(lines[0][0]).toEqual value: '@', scopes: ['source.css', 'meta.at-rule.font-features.css', 'keyword.control.at-rule.font-feature-values.css', 'punctuation.definition.keyword.css'] + expect(lines[0][1]).toEqual value: 'font-feature-values', scopes: ['source.css', 'meta.at-rule.font-features.css', 'keyword.control.at-rule.font-feature-values.css'] + expect(lines[0][3]).toEqual value: 'Font name 2', scopes: ['source.css', 'meta.at-rule.font-features.css', 'variable.parameter.font-name.css'] + expect(lines[0][5]).toEqual value: '{', scopes: ['source.css', 'meta.property-list.css', 'punctuation.section.property-list.begin.bracket.curly.css'] + expect(lines[1][0]).toEqual value: '@', scopes: ['source.css', 'meta.property-list.css', 'meta.at-rule.swash.css', 'keyword.control.at-rule.swash.css', 'punctuation.definition.keyword.css'] + expect(lines[1][1]).toEqual value: 'swash', scopes: ['source.css', 'meta.property-list.css', 'meta.at-rule.swash.css', 'keyword.control.at-rule.swash.css'] + expect(lines[1][2]).toEqual value: '{', scopes: ['source.css', 'meta.property-list.css', 'meta.at-rule.swash.css', 'meta.property-list.font-feature.css', 'punctuation.section.property-list.begin.bracket.curly.css'] + expect(lines[1][3]).toEqual value: '/*', scopes: ['source.css', 'meta.property-list.css', 'meta.at-rule.swash.css', 'meta.property-list.font-feature.css', 'comment.block.css', 'punctuation.definition.comment.begin.css'] + expect(lines[2][0]).toEqual value: '========', scopes: ['source.css', 'meta.property-list.css', 'meta.at-rule.swash.css', 'meta.property-list.font-feature.css', 'comment.block.css'] + expect(lines[2][1]).toEqual value: '*/', scopes: ['source.css', 'meta.property-list.css', 'meta.at-rule.swash.css', 'meta.property-list.font-feature.css', 'comment.block.css', 'punctuation.definition.comment.end.css'] + expect(lines[2][2]).toEqual value: 'swashy', scopes: ['source.css', 'meta.property-list.css', 'meta.at-rule.swash.css', 'meta.property-list.font-feature.css', 'variable.font-feature.css'] + expect(lines[2][3]).toEqual value: ':', scopes: ['source.css', 'meta.property-list.css', 'meta.at-rule.swash.css', 'meta.property-list.font-feature.css', 'punctuation.separator.key-value.css'] + expect(lines[2][4]).toEqual value: '/*', scopes: ['source.css', 'meta.property-list.css', 'meta.at-rule.swash.css', 'meta.property-list.font-feature.css', 'meta.property-value.css', 'comment.block.css', 'punctuation.definition.comment.begin.css'] + expect(lines[2][5]).toEqual value: '*/', scopes: ['source.css', 'meta.property-list.css', 'meta.at-rule.swash.css', 'meta.property-list.font-feature.css', 'meta.property-value.css', 'comment.block.css', 'punctuation.definition.comment.end.css'] + expect(lines[2][6]).toEqual value: '2', scopes: ['source.css', 'meta.property-list.css', 'meta.at-rule.swash.css', 'meta.property-list.font-feature.css', 'meta.property-value.css', 'constant.numeric.css'] + expect(lines[2][7]).toEqual value: ';', scopes: ['source.css', 'meta.property-list.css', 'meta.at-rule.swash.css', 'meta.property-list.font-feature.css', 'punctuation.terminator.rule.css'] + expect(lines[2][8]).toEqual value: '/*', scopes: ['source.css', 'meta.property-list.css', 'meta.at-rule.swash.css', 'meta.property-list.font-feature.css', 'comment.block.css', 'punctuation.definition.comment.begin.css'] + expect(lines[2][9]).toEqual value: '*/', scopes: ['source.css', 'meta.property-list.css', 'meta.at-rule.swash.css', 'meta.property-list.font-feature.css', 'comment.block.css', 'punctuation.definition.comment.end.css'] + expect(lines[2][10]).toEqual value: '}', scopes: ['source.css', 'meta.property-list.css', 'meta.at-rule.swash.css', 'meta.property-list.font-feature.css', 'punctuation.section.property-list.end.bracket.curly.css'] + expect(lines[3][0]).toEqual value: '}', scopes: ['source.css', 'meta.property-list.css', 'punctuation.section.property-list.end.bracket.curly.css'] + + it 'highlights escape sequences inside feature-names', -> + {tokens} = grammar.tokenizeLine('@swash{ s\\000077a\\73hy: 1; }') + expect(tokens[0]).toEqual value: '@', scopes: ['source.css', 'meta.at-rule.swash.css', 'keyword.control.at-rule.swash.css', 'punctuation.definition.keyword.css'] + expect(tokens[1]).toEqual value: 'swash', scopes: ['source.css', 'meta.at-rule.swash.css', 'keyword.control.at-rule.swash.css'] + expect(tokens[4]).toEqual value: 's', scopes: ['source.css', 'meta.at-rule.swash.css', 'meta.property-list.font-feature.css', 'variable.font-feature.css'] + expect(tokens[5]).toEqual value: '\\000077', scopes: ['source.css', 'meta.at-rule.swash.css', 'meta.property-list.font-feature.css', 'variable.font-feature.css', 'constant.character.escape.codepoint.css'] + expect(tokens[6]).toEqual value: 'a', scopes: ['source.css', 'meta.at-rule.swash.css', 'meta.property-list.font-feature.css', 'variable.font-feature.css'] + expect(tokens[7]).toEqual value: '\\73', scopes: ['source.css', 'meta.at-rule.swash.css', 'meta.property-list.font-feature.css', 'variable.font-feature.css', 'constant.character.escape.codepoint.css'] + expect(tokens[8]).toEqual value: 'hy', scopes: ['source.css', 'meta.at-rule.swash.css', 'meta.property-list.font-feature.css', 'variable.font-feature.css'] + + describe '@page', -> + it 'tokenises @page blocks correctly', -> + {tokens} = grammar.tokenizeLine('@page :first { }') + expect(tokens[0]).toEqual value: '@', scopes: ['source.css', 'meta.at-rule.page.css', 'keyword.control.at-rule.page.css', 'punctuation.definition.keyword.css'] + expect(tokens[1]).toEqual value: 'page', scopes: ['source.css', 'meta.at-rule.page.css', 'keyword.control.at-rule.page.css'] + expect(tokens[2]).toEqual value: ' ', scopes: ['source.css'] + expect(tokens[3]).toEqual value: ':', scopes: ['source.css', 'meta.selector.css', 'entity.other.attribute-name.pseudo-class.css', 'punctuation.definition.entity.css'] + expect(tokens[4]).toEqual value: 'first', scopes: ['source.css', 'meta.selector.css', 'entity.other.attribute-name.pseudo-class.css'] + expect(tokens[5]).toEqual value: ' ', scopes: ['source.css'] + expect(tokens[6]).toEqual value: '{', scopes: ['source.css', 'meta.property-list.css', 'punctuation.section.property-list.begin.bracket.curly.css'] + expect(tokens[8]).toEqual value: '}', scopes: ['source.css', 'meta.property-list.css', 'punctuation.section.property-list.end.bracket.curly.css'] + + {tokens} = grammar.tokenizeLine('@page:right{}') + expect(tokens[0]).toEqual value: '@', scopes: ['source.css', 'meta.at-rule.page.css', 'keyword.control.at-rule.page.css', 'punctuation.definition.keyword.css'] + expect(tokens[1]).toEqual value: 'page', scopes: ['source.css', 'meta.at-rule.page.css', 'keyword.control.at-rule.page.css'] + expect(tokens[2]).toEqual value: ':', scopes: ['source.css', 'meta.selector.css', 'entity.other.attribute-name.pseudo-class.css', 'punctuation.definition.entity.css'] + expect(tokens[3]).toEqual value: 'right', scopes: ['source.css', 'meta.selector.css', 'entity.other.attribute-name.pseudo-class.css'] + expect(tokens[4]).toEqual value: '{', scopes: ['source.css', 'meta.property-list.css', 'punctuation.section.property-list.begin.bracket.curly.css'] + expect(tokens[5]).toEqual value: '}', scopes: ['source.css', 'meta.property-list.css', 'punctuation.section.property-list.end.bracket.curly.css'] + + {tokens} = grammar.tokenizeLine('@page{}') + expect(tokens[0]).toEqual value: '@', scopes: ['source.css', 'meta.at-rule.page.css', 'keyword.control.at-rule.page.css', 'punctuation.definition.keyword.css'] + expect(tokens[1]).toEqual value: 'page', scopes: ['source.css', 'meta.at-rule.page.css', 'keyword.control.at-rule.page.css'] + expect(tokens[2]).toEqual value: '{', scopes: ['source.css', 'meta.property-list.css', 'punctuation.section.property-list.begin.bracket.curly.css'] + expect(tokens[3]).toEqual value: '}', scopes: ['source.css', 'meta.property-list.css', 'punctuation.section.property-list.end.bracket.curly.css'] + + describe '@counter-style', -> + it 'tokenises them and their contents correctly', -> + lines = grammar.tokenizeLines """ + @counter-style winners-list { + system: fixed; + symbols: url(gold-medal.svg) url(silver-medal.svg) url(bronze-medal.svg); + suffix: " "; + } + """ + expect(lines[0][0]).toEqual value: '@', scopes: ['source.css', 'meta.at-rule.counter-style.header.css', 'keyword.control.at-rule.counter-style.css', 'punctuation.definition.keyword.css'] + expect(lines[0][1]).toEqual value: 'counter-style', scopes: ['source.css', 'meta.at-rule.counter-style.header.css', 'keyword.control.at-rule.counter-style.css'] + expect(lines[0][3]).toEqual value: 'winners-list', scopes: ['source.css', 'meta.at-rule.counter-style.header.css', 'variable.parameter.style-name.css'] + expect(lines[0][5]).toEqual value: '{', scopes: ['source.css', 'meta.at-rule.counter-style.body.css', 'punctuation.section.property-list.begin.bracket.curly.css'] + expect(lines[1][1]).toEqual value: 'system', scopes: ['source.css', 'meta.at-rule.counter-style.body.css', 'meta.property-name.css', 'support.type.property-name.css'] + expect(lines[1][2]).toEqual value: ':', scopes: ['source.css', 'meta.at-rule.counter-style.body.css', 'punctuation.separator.key-value.css'] + expect(lines[1][4]).toEqual value: 'fixed', scopes: ['source.css', 'meta.at-rule.counter-style.body.css', 'meta.property-value.css', 'support.constant.property-value.css'] + expect(lines[1][5]).toEqual value: ';', scopes: ['source.css', 'meta.at-rule.counter-style.body.css', 'punctuation.terminator.rule.css'] + expect(lines[2][1]).toEqual value: 'symbols', scopes: ['source.css', 'meta.at-rule.counter-style.body.css', 'meta.property-name.css', 'support.type.property-name.css'] + expect(lines[2][2]).toEqual value: ':', scopes: ['source.css', 'meta.at-rule.counter-style.body.css', 'punctuation.separator.key-value.css'] + expect(lines[2][4]).toEqual value: 'url', scopes: ['source.css', 'meta.at-rule.counter-style.body.css', 'meta.property-value.css', 'meta.function.url.css', 'support.function.url.css'] + expect(lines[2][5]).toEqual value: '(', scopes: ['source.css', 'meta.at-rule.counter-style.body.css', 'meta.property-value.css', 'meta.function.url.css', 'punctuation.section.function.begin.bracket.round.css'] + expect(lines[2][6]).toEqual value: 'gold-medal.svg', scopes: ['source.css', 'meta.at-rule.counter-style.body.css', 'meta.property-value.css', 'meta.function.url.css', 'variable.parameter.url.css'] + expect(lines[2][7]).toEqual value: ')', scopes: ['source.css', 'meta.at-rule.counter-style.body.css', 'meta.property-value.css', 'meta.function.url.css', 'punctuation.section.function.end.bracket.round.css'] + expect(lines[2][9]).toEqual value: 'url', scopes: ['source.css', 'meta.at-rule.counter-style.body.css', 'meta.property-value.css', 'meta.function.url.css', 'support.function.url.css'] + expect(lines[2][10]).toEqual value: '(', scopes: ['source.css', 'meta.at-rule.counter-style.body.css', 'meta.property-value.css', 'meta.function.url.css', 'punctuation.section.function.begin.bracket.round.css'] + expect(lines[2][11]).toEqual value: 'silver-medal.svg', scopes: ['source.css', 'meta.at-rule.counter-style.body.css', 'meta.property-value.css', 'meta.function.url.css', 'variable.parameter.url.css'] + expect(lines[2][12]).toEqual value: ')', scopes: ['source.css', 'meta.at-rule.counter-style.body.css', 'meta.property-value.css', 'meta.function.url.css', 'punctuation.section.function.end.bracket.round.css'] + expect(lines[2][14]).toEqual value: 'url', scopes: ['source.css', 'meta.at-rule.counter-style.body.css', 'meta.property-value.css', 'meta.function.url.css', 'support.function.url.css'] + expect(lines[2][15]).toEqual value: '(', scopes: ['source.css', 'meta.at-rule.counter-style.body.css', 'meta.property-value.css', 'meta.function.url.css', 'punctuation.section.function.begin.bracket.round.css'] + expect(lines[2][16]).toEqual value: 'bronze-medal.svg', scopes: ['source.css', 'meta.at-rule.counter-style.body.css', 'meta.property-value.css', 'meta.function.url.css', 'variable.parameter.url.css'] + expect(lines[2][17]).toEqual value: ')', scopes: ['source.css', 'meta.at-rule.counter-style.body.css', 'meta.property-value.css', 'meta.function.url.css', 'punctuation.section.function.end.bracket.round.css'] + expect(lines[2][18]).toEqual value: ';', scopes: ['source.css', 'meta.at-rule.counter-style.body.css', 'punctuation.terminator.rule.css'] + expect(lines[3][1]).toEqual value: 'suffix', scopes: ['source.css', 'meta.at-rule.counter-style.body.css', 'meta.property-name.css', 'support.type.property-name.css'] + expect(lines[3][2]).toEqual value: ':', scopes: ['source.css', 'meta.at-rule.counter-style.body.css', 'punctuation.separator.key-value.css'] + expect(lines[3][4]).toEqual value: '"', scopes: ['source.css', 'meta.at-rule.counter-style.body.css', 'meta.property-value.css', 'string.quoted.double.css', 'punctuation.definition.string.begin.css'] + expect(lines[3][6]).toEqual value: '"', scopes: ['source.css', 'meta.at-rule.counter-style.body.css', 'meta.property-value.css', 'string.quoted.double.css', 'punctuation.definition.string.end.css'] + expect(lines[3][7]).toEqual value: ';', scopes: ['source.css', 'meta.at-rule.counter-style.body.css', 'punctuation.terminator.rule.css'] + expect(lines[4][0]).toEqual value: '}', scopes: ['source.css', 'meta.at-rule.counter-style.body.css', 'punctuation.section.property-list.end.bracket.curly.css'] + + it 'matches injected comments', -> + {tokens} = grammar.tokenizeLine('@counter-style/*{*/winners-list/*}*/{ system: fixed; }') + expect(tokens[0]).toEqual value: '@', scopes: ['source.css', 'meta.at-rule.counter-style.header.css', 'keyword.control.at-rule.counter-style.css', 'punctuation.definition.keyword.css'] + expect(tokens[1]).toEqual value: 'counter-style', scopes: ['source.css', 'meta.at-rule.counter-style.header.css', 'keyword.control.at-rule.counter-style.css'] + expect(tokens[2]).toEqual value: '/*', scopes: ['source.css', 'meta.at-rule.counter-style.header.css', 'comment.block.css', 'punctuation.definition.comment.begin.css'] + expect(tokens[3]).toEqual value: '{', scopes: ['source.css', 'meta.at-rule.counter-style.header.css', 'comment.block.css'] + expect(tokens[4]).toEqual value: '*/', scopes: ['source.css', 'meta.at-rule.counter-style.header.css', 'comment.block.css', 'punctuation.definition.comment.end.css'] + expect(tokens[5]).toEqual value: 'winners-list', scopes: ['source.css', 'meta.at-rule.counter-style.header.css', 'variable.parameter.style-name.css'] + expect(tokens[6]).toEqual value: '/*', scopes: ['source.css', 'meta.at-rule.counter-style.header.css', 'comment.block.css', 'punctuation.definition.comment.begin.css'] + expect(tokens[7]).toEqual value: '}', scopes: ['source.css', 'meta.at-rule.counter-style.header.css', 'comment.block.css'] + expect(tokens[8]).toEqual value: '*/', scopes: ['source.css', 'meta.at-rule.counter-style.header.css', 'comment.block.css', 'punctuation.definition.comment.end.css'] + expect(tokens[9]).toEqual value: '{', scopes: ['source.css', 'meta.at-rule.counter-style.body.css', 'punctuation.section.property-list.begin.bracket.curly.css'] + expect(tokens[11]).toEqual value: 'system', scopes: ['source.css', 'meta.at-rule.counter-style.body.css', 'meta.property-name.css', 'support.type.property-name.css'] + expect(tokens[12]).toEqual value: ':', scopes: ['source.css', 'meta.at-rule.counter-style.body.css', 'punctuation.separator.key-value.css'] + expect(tokens[14]).toEqual value: 'fixed', scopes: ['source.css', 'meta.at-rule.counter-style.body.css', 'meta.property-value.css', 'support.constant.property-value.css'] + expect(tokens[15]).toEqual value: ';', scopes: ['source.css', 'meta.at-rule.counter-style.body.css', 'punctuation.terminator.rule.css'] + expect(tokens[17]).toEqual value: '}', scopes: ['source.css', 'meta.at-rule.counter-style.body.css', 'punctuation.section.property-list.end.bracket.curly.css'] + + it "allows the counter-style's name to start on a different line", -> + lines = grammar.tokenizeLines """ + @counter-style + winners-list + """ + expect(lines[0][0]).toEqual value: '@', scopes: ['source.css', 'meta.at-rule.counter-style.header.css', 'keyword.control.at-rule.counter-style.css', 'punctuation.definition.keyword.css'] + expect(lines[0][1]).toEqual value: 'counter-style', scopes: ['source.css', 'meta.at-rule.counter-style.header.css', 'keyword.control.at-rule.counter-style.css'] + expect(lines[1][0]).toEqual value: 'winners-list', scopes: ['source.css', 'meta.at-rule.counter-style.header.css', 'variable.parameter.style-name.css'] + + it "highlights escape sequences inside the style's name", -> + {tokens} = grammar.tokenizeLine '@counter-style A\\01F602z' + expect(tokens[0]).toEqual value: '@', scopes: ['source.css', 'meta.at-rule.counter-style.header.css', 'keyword.control.at-rule.counter-style.css', 'punctuation.definition.keyword.css'] + expect(tokens[1]).toEqual value: 'counter-style', scopes: ['source.css', 'meta.at-rule.counter-style.header.css', 'keyword.control.at-rule.counter-style.css'] + expect(tokens[3]).toEqual value: 'A', scopes: ['source.css', 'meta.at-rule.counter-style.header.css', 'variable.parameter.style-name.css'] + expect(tokens[4]).toEqual value: '\\01F602', scopes: ['source.css', 'meta.at-rule.counter-style.header.css', 'variable.parameter.style-name.css', 'constant.character.escape.codepoint.css'] + expect(tokens[5]).toEqual value: 'z', scopes: ['source.css', 'meta.at-rule.counter-style.header.css', 'variable.parameter.style-name.css'] + + describe '@document', -> + it 'correctly tokenises @document rules', -> + lines = grammar.tokenizeLines """ + @document url(http://www.w3.org/), + url-prefix(http://www.w3.org/Style/), /* Comment */ + domain(/**/mozilla.org), + regexp("https:.*") { + body{ color: #f00; } + } + """ + expect(lines[0][0]).toEqual value: '@', scopes: ['source.css', 'meta.at-rule.document.header.css', 'keyword.control.at-rule.document.css', 'punctuation.definition.keyword.css'] + expect(lines[0][1]).toEqual value: 'document', scopes: ['source.css', 'meta.at-rule.document.header.css', 'keyword.control.at-rule.document.css'] + expect(lines[0][3]).toEqual value: 'url', scopes: ['source.css', 'meta.at-rule.document.header.css', 'meta.function.url.css', 'support.function.url.css'] + expect(lines[0][4]).toEqual value: '(', scopes: ['source.css', 'meta.at-rule.document.header.css', 'meta.function.url.css', 'punctuation.section.function.begin.bracket.round.css'] + expect(lines[0][5]).toEqual value: 'http://www.w3.org/', scopes: ['source.css', 'meta.at-rule.document.header.css', 'meta.function.url.css', 'variable.parameter.url.css'] + expect(lines[0][6]).toEqual value: ')', scopes: ['source.css', 'meta.at-rule.document.header.css', 'meta.function.url.css', 'punctuation.section.function.end.bracket.round.css'] + expect(lines[0][7]).toEqual value: ',', scopes: ['source.css', 'meta.at-rule.document.header.css', 'punctuation.separator.list.comma.css'] + expect(lines[1][1]).toEqual value: 'url-prefix', scopes: ['source.css', 'meta.at-rule.document.header.css', 'meta.function.document-rule.css', 'support.function.document-rule.css'] + expect(lines[1][2]).toEqual value: '(', scopes: ['source.css', 'meta.at-rule.document.header.css', 'meta.function.document-rule.css', 'punctuation.section.function.begin.bracket.round.css'] + expect(lines[1][3]).toEqual value: 'http://www.w3.org/Style/', scopes: ['source.css', 'meta.at-rule.document.header.css', 'meta.function.document-rule.css', 'variable.parameter.document-rule.css'] + expect(lines[1][4]).toEqual value: ')', scopes: ['source.css', 'meta.at-rule.document.header.css', 'meta.function.document-rule.css', 'punctuation.section.function.end.bracket.round.css'] + expect(lines[1][5]).toEqual value: ',', scopes: ['source.css', 'meta.at-rule.document.header.css', 'punctuation.separator.list.comma.css'] + expect(lines[1][7]).toEqual value: '/*', scopes: ['source.css', 'meta.at-rule.document.header.css', 'comment.block.css', 'punctuation.definition.comment.begin.css'] + expect(lines[1][8]).toEqual value: ' Comment ', scopes: ['source.css', 'meta.at-rule.document.header.css', 'comment.block.css'] + expect(lines[1][9]).toEqual value: '*/', scopes: ['source.css', 'meta.at-rule.document.header.css', 'comment.block.css', 'punctuation.definition.comment.end.css'] + expect(lines[2][1]).toEqual value: 'domain', scopes: ['source.css', 'meta.at-rule.document.header.css', 'meta.function.document-rule.css', 'support.function.document-rule.css'] + expect(lines[2][2]).toEqual value: '(', scopes: ['source.css', 'meta.at-rule.document.header.css', 'meta.function.document-rule.css', 'punctuation.section.function.begin.bracket.round.css'] + expect(lines[2][3]).toEqual value: '/*', scopes: ['source.css', 'meta.at-rule.document.header.css', 'meta.function.document-rule.css', 'comment.block.css', 'punctuation.definition.comment.begin.css'] + expect(lines[2][4]).toEqual value: '*/', scopes: ['source.css', 'meta.at-rule.document.header.css', 'meta.function.document-rule.css', 'comment.block.css', 'punctuation.definition.comment.end.css'] + expect(lines[2][5]).toEqual value: 'mozilla.org', scopes: ['source.css', 'meta.at-rule.document.header.css', 'meta.function.document-rule.css', 'variable.parameter.document-rule.css'] + expect(lines[2][6]).toEqual value: ')', scopes: ['source.css', 'meta.at-rule.document.header.css', 'meta.function.document-rule.css', 'punctuation.section.function.end.bracket.round.css'] + expect(lines[2][7]).toEqual value: ',', scopes: ['source.css', 'meta.at-rule.document.header.css', 'punctuation.separator.list.comma.css'] + expect(lines[3][1]).toEqual value: 'regexp', scopes: ['source.css', 'meta.at-rule.document.header.css', 'meta.function.document-rule.css', 'support.function.document-rule.css'] + expect(lines[3][2]).toEqual value: '(', scopes: ['source.css', 'meta.at-rule.document.header.css', 'meta.function.document-rule.css', 'punctuation.section.function.begin.bracket.round.css'] + expect(lines[3][3]).toEqual value: '"', scopes: ['source.css', 'meta.at-rule.document.header.css', 'meta.function.document-rule.css', 'string.quoted.double.css', 'punctuation.definition.string.begin.css'] + expect(lines[3][4]).toEqual value: 'https:.*', scopes: ['source.css', 'meta.at-rule.document.header.css', 'meta.function.document-rule.css', 'string.quoted.double.css'] + expect(lines[3][5]).toEqual value: '"', scopes: ['source.css', 'meta.at-rule.document.header.css', 'meta.function.document-rule.css', 'string.quoted.double.css', 'punctuation.definition.string.end.css'] + expect(lines[3][6]).toEqual value: ')', scopes: ['source.css', 'meta.at-rule.document.header.css', 'meta.function.document-rule.css', 'punctuation.section.function.end.bracket.round.css'] + expect(lines[3][8]).toEqual value: '{', scopes: ['source.css', 'meta.at-rule.document.body.css', 'punctuation.section.document.begin.bracket.curly.css'] + expect(lines[4][1]).toEqual value: 'body', scopes: ['source.css', 'meta.at-rule.document.body.css', 'meta.selector.css', 'entity.name.tag.css'] + expect(lines[4][2]).toEqual value: '{', scopes: ['source.css', 'meta.at-rule.document.body.css', 'meta.property-list.css', 'punctuation.section.property-list.begin.bracket.curly.css'] + expect(lines[4][4]).toEqual value: 'color', scopes: ['source.css', 'meta.at-rule.document.body.css', 'meta.property-list.css', 'meta.property-name.css', 'support.type.property-name.css'] + expect(lines[4][5]).toEqual value: ':', scopes: ['source.css', 'meta.at-rule.document.body.css', 'meta.property-list.css', 'punctuation.separator.key-value.css'] + expect(lines[4][7]).toEqual value: '#', scopes: ['source.css', 'meta.at-rule.document.body.css', 'meta.property-list.css', 'meta.property-value.css', 'constant.other.color.rgb-value.hex.css', 'punctuation.definition.constant.css'] + expect(lines[4][8]).toEqual value: 'f00', scopes: ['source.css', 'meta.at-rule.document.body.css', 'meta.property-list.css', 'meta.property-value.css', 'constant.other.color.rgb-value.hex.css'] + expect(lines[4][9]).toEqual value: ';', scopes: ['source.css', 'meta.at-rule.document.body.css', 'meta.property-list.css', 'punctuation.terminator.rule.css'] + expect(lines[4][11]).toEqual value: '}', scopes: ['source.css', 'meta.at-rule.document.body.css', 'meta.property-list.css', 'punctuation.section.property-list.end.bracket.curly.css'] + expect(lines[5][1]).toEqual value: '}', scopes: ['source.css', 'meta.at-rule.document.body.css', 'punctuation.section.document.end.bracket.curly.css'] + + describe '@viewport', -> + it 'tokenises @viewport blocks correctly', -> + {tokens} = grammar.tokenizeLine('@viewport { min-width: 640px; max-width: 800px; }') + expect(tokens[0]).toEqual value: '@', scopes: ['source.css', 'meta.at-rule.viewport.css', 'keyword.control.at-rule.viewport.css', 'punctuation.definition.keyword.css'] + expect(tokens[1]).toEqual value: 'viewport', scopes: ['source.css', 'meta.at-rule.viewport.css', 'keyword.control.at-rule.viewport.css'] + expect(tokens[2]).toEqual value: ' ', scopes: ['source.css'] + expect(tokens[3]).toEqual value: '{', scopes: ['source.css', 'meta.property-list.css', 'punctuation.section.property-list.begin.bracket.curly.css'] + expect(tokens[5]).toEqual value: 'min-width', scopes: ['source.css', 'meta.property-list.css', 'meta.property-name.css', 'support.type.property-name.css'] + expect(tokens[6]).toEqual value: ':', scopes: ['source.css', 'meta.property-list.css', 'punctuation.separator.key-value.css'] + expect(tokens[8]).toEqual value: '640', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'constant.numeric.css'] + expect(tokens[9]).toEqual value: 'px', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'constant.numeric.css', 'keyword.other.unit.px.css'] + expect(tokens[10]).toEqual value: ';', scopes: ['source.css', 'meta.property-list.css', 'punctuation.terminator.rule.css'] + expect(tokens[12]).toEqual value: 'max-width', scopes: ['source.css', 'meta.property-list.css', 'meta.property-name.css', 'support.type.property-name.css'] + expect(tokens[13]).toEqual value: ':', scopes: ['source.css', 'meta.property-list.css', 'punctuation.separator.key-value.css'] + expect(tokens[15]).toEqual value: '800', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'constant.numeric.css'] + expect(tokens[16]).toEqual value: 'px', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'constant.numeric.css', 'keyword.other.unit.px.css'] + expect(tokens[17]).toEqual value: ';', scopes: ['source.css', 'meta.property-list.css', 'punctuation.terminator.rule.css'] + expect(tokens[19]).toEqual value: '}', scopes: ['source.css', 'meta.property-list.css', 'punctuation.section.property-list.end.bracket.curly.css'] + + it 'tokenises them across lines', -> + lines = grammar.tokenizeLines """ + @-O-VIEWPORT + { + zoom: 0.75; + min-zoom: 0.5; + max-zoom: 0.9; + } + """ + expect(lines[0][0]).toEqual value: '@', scopes: ['source.css', 'meta.at-rule.viewport.css', 'keyword.control.at-rule.viewport.css', 'punctuation.definition.keyword.css'] + expect(lines[0][1]).toEqual value: '-O-VIEWPORT', scopes: ['source.css', 'meta.at-rule.viewport.css', 'keyword.control.at-rule.viewport.css'] + expect(lines[1][0]).toEqual value: '{', scopes: ['source.css', 'meta.property-list.css', 'punctuation.section.property-list.begin.bracket.curly.css'] + expect(lines[2][1]).toEqual value: 'zoom', scopes: ['source.css', 'meta.property-list.css', 'meta.property-name.css', 'support.type.property-name.css'] + expect(lines[2][2]).toEqual value: ':', scopes: ['source.css', 'meta.property-list.css', 'punctuation.separator.key-value.css'] + expect(lines[2][4]).toEqual value: '0.75', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'constant.numeric.css'] + expect(lines[2][5]).toEqual value: ';', scopes: ['source.css', 'meta.property-list.css', 'punctuation.terminator.rule.css'] + expect(lines[3][1]).toEqual value: 'min-zoom', scopes: ['source.css', 'meta.property-list.css', 'meta.property-name.css', 'support.type.property-name.css'] + expect(lines[3][2]).toEqual value: ':', scopes: ['source.css', 'meta.property-list.css', 'punctuation.separator.key-value.css'] + expect(lines[3][4]).toEqual value: '0.5', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'constant.numeric.css'] + expect(lines[3][5]).toEqual value: ';', scopes: ['source.css', 'meta.property-list.css', 'punctuation.terminator.rule.css'] + expect(lines[4][1]).toEqual value: 'max-zoom', scopes: ['source.css', 'meta.property-list.css', 'meta.property-name.css', 'support.type.property-name.css'] + expect(lines[4][2]).toEqual value: ':', scopes: ['source.css', 'meta.property-list.css', 'punctuation.separator.key-value.css'] + expect(lines[4][4]).toEqual value: '0.9', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'constant.numeric.css'] + expect(lines[4][5]).toEqual value: ';', scopes: ['source.css', 'meta.property-list.css', 'punctuation.terminator.rule.css'] + expect(lines[5][0]).toEqual value: '}', scopes: ['source.css', 'meta.property-list.css', 'punctuation.section.property-list.end.bracket.curly.css'] + + it 'tokenises injected comments', -> + lines = grammar.tokenizeLines """ + @-ms-viewport/*{*/{/* + ==*/orientation: landscape; + } + """ + expect(lines[0][0]).toEqual value: '@', scopes: ['source.css', 'meta.at-rule.viewport.css', 'keyword.control.at-rule.viewport.css', 'punctuation.definition.keyword.css'] + expect(lines[0][1]).toEqual value: '-ms-viewport', scopes: ['source.css', 'meta.at-rule.viewport.css', 'keyword.control.at-rule.viewport.css'] + expect(lines[0][2]).toEqual value: '/*', scopes: ['source.css', 'meta.at-rule.viewport.css', 'comment.block.css', 'punctuation.definition.comment.begin.css'] + expect(lines[0][3]).toEqual value: '{', scopes: ['source.css', 'meta.at-rule.viewport.css', 'comment.block.css'] + expect(lines[0][4]).toEqual value: '*/', scopes: ['source.css', 'meta.at-rule.viewport.css', 'comment.block.css', 'punctuation.definition.comment.end.css'] + expect(lines[0][5]).toEqual value: '{', scopes: ['source.css', 'meta.property-list.css', 'punctuation.section.property-list.begin.bracket.curly.css'] + expect(lines[0][6]).toEqual value: '/*', scopes: ['source.css', 'meta.property-list.css', 'comment.block.css', 'punctuation.definition.comment.begin.css'] + expect(lines[1][0]).toEqual value: '==', scopes: ['source.css', 'meta.property-list.css', 'comment.block.css'] + expect(lines[1][1]).toEqual value: '*/', scopes: ['source.css', 'meta.property-list.css', 'comment.block.css', 'punctuation.definition.comment.end.css'] + expect(lines[1][2]).toEqual value: 'orientation', scopes: ['source.css', 'meta.property-list.css', 'meta.property-name.css', 'support.type.property-name.css'] + expect(lines[1][3]).toEqual value: ':', scopes: ['source.css', 'meta.property-list.css', 'punctuation.separator.key-value.css'] + expect(lines[1][5]).toEqual value: 'landscape', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'support.constant.property-value.css'] + expect(lines[1][6]).toEqual value: ';', scopes: ['source.css', 'meta.property-list.css', 'punctuation.terminator.rule.css'] + expect(lines[2][0]).toEqual value: '}', scopes: ['source.css', 'meta.property-list.css', 'punctuation.section.property-list.end.bracket.curly.css'] + + describe 'unknown at-rules', -> + it 'correctly parses single-line unknown at-rules closing with semicolons', -> + lines = grammar.tokenizeLines """ + @foo; + @foo ; + @foo a; + @foo (); + @foo (a); + """ + expect(lines[0][1]).toEqual value: 'foo', scopes: ['source.css', 'meta.at-rule.header.css', 'keyword.control.at-rule.css'] + + expect(lines[1][1]).toEqual value: 'foo', scopes: ['source.css', 'meta.at-rule.header.css', 'keyword.control.at-rule.css'] + + expect(lines[2][1]).toEqual value: 'foo', scopes: ['source.css', 'meta.at-rule.header.css', 'keyword.control.at-rule.css'] + expect(lines[2][2]).toEqual value: ' a', scopes: ['source.css', 'meta.at-rule.header.css'] + + expect(lines[3][1]).toEqual value: 'foo', scopes: ['source.css', 'meta.at-rule.header.css', 'keyword.control.at-rule.css'] + expect(lines[3][2]).toEqual value: ' ()', scopes: ['source.css', 'meta.at-rule.header.css'] + + expect(lines[4][1]).toEqual value: 'foo', scopes: ['source.css', 'meta.at-rule.header.css', 'keyword.control.at-rule.css'] + expect(lines[4][2]).toEqual value: ' (a)', scopes: ['source.css', 'meta.at-rule.header.css'] + + it 'correctly parses single-line unknown at-rules closing with ;', -> + lines = grammar.tokenizeLines """ + @foo bar; + .foo + """ + expect(lines[0][1]).toEqual value: 'foo', scopes: ['source.css', 'meta.at-rule.header.css', 'keyword.control.at-rule.css'] + + expect(lines[1][0]).toEqual value: '.', scopes: ['source.css', 'meta.selector.css', 'entity.other.attribute-name.class.css', 'punctuation.definition.entity.css'] + expect(lines[1][1]).toEqual value: 'foo', scopes: ['source.css', 'meta.selector.css', 'entity.other.attribute-name.class.css'] + + describe 'capitalisation', -> + it 'ignores case in at-rules', -> + lines = grammar.tokenizeLines """ + @IMPoRT url("file.css"); + @MEdIA (MAX-WIDTH: 2px){ } + @pAgE :fIRST { } + @NAMEspace "A"; + @foNT-FacE {} + """ + expect(lines[0][1]).toEqual value: 'IMPoRT', scopes: ['source.css', 'meta.at-rule.import.css', 'keyword.control.at-rule.import.css'] + expect(lines[1][1]).toEqual value: 'MEdIA', scopes: ['source.css', 'meta.at-rule.media.header.css', 'keyword.control.at-rule.media.css'] + expect(lines[1][4]).toEqual value: 'MAX-WIDTH', scopes: ['source.css', 'meta.at-rule.media.header.css', 'support.type.property-name.media.css'] + expect(lines[2][1]).toEqual value: 'pAgE', scopes: ['source.css', 'meta.at-rule.page.css', 'keyword.control.at-rule.page.css'] + expect(lines[2][4]).toEqual value: 'fIRST', scopes: ['source.css', 'meta.selector.css', 'entity.other.attribute-name.pseudo-class.css'] + expect(lines[3][1]).toEqual value: 'NAMEspace', scopes: ['source.css', 'meta.at-rule.namespace.css', 'keyword.control.at-rule.namespace.css'] + expect(lines[4][1]).toEqual value: 'foNT-FacE', scopes: ['source.css', 'meta.at-rule.font-face.css', 'keyword.control.at-rule.font-face.css'] + + it 'ignores case in property names', -> + lines = grammar.tokenizeLines """ + a{ COLOR: #fff; } + a{ gRId-tEMPLaTe: none; } + a{ bACkgrOUND-iMAGE: none; } + a{ -MOZ-IMAGE: none; } + """ + expect(lines[0][3]).toEqual value: 'COLOR', scopes: ['source.css', 'meta.property-list.css', 'meta.property-name.css', 'support.type.property-name.css'] + expect(lines[1][3]).toEqual value: 'gRId-tEMPLaTe', scopes: ['source.css', 'meta.property-list.css', 'meta.property-name.css', 'support.type.property-name.css'] + expect(lines[2][3]).toEqual value: 'bACkgrOUND-iMAGE', scopes: ['source.css', 'meta.property-list.css', 'meta.property-name.css', 'support.type.property-name.css'] + expect(lines[3][3]).toEqual value: '-MOZ-IMAGE', scopes: ['source.css', 'meta.property-list.css', 'meta.property-name.css', 'support.type.vendored.property-name.css'] + + it 'ignores case in property keywords', -> + lines = grammar.tokenizeLines """ + a{ color: INItIaL; } + a{ color: trAnsPAREnT; } + a{ color: rED; } + a{ color: unSET; } + a{ color: NONe; } + a{ style: lOWER-lATIN; } + a{ color: -WebkIT-foo; } + a{ font: HelVETica; } + """ + expect(lines[0][6]).toEqual value: 'INItIaL', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'support.constant.property-value.css'] + expect(lines[1][6]).toEqual value: 'trAnsPAREnT', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'support.constant.property-value.css'] + expect(lines[2][6]).toEqual value: 'rED', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'support.constant.color.w3c-standard-color-name.css'] + expect(lines[3][6]).toEqual value: 'unSET', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'support.constant.property-value.css'] + expect(lines[4][6]).toEqual value: 'NONe', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'support.constant.property-value.css'] + expect(lines[5][6]).toEqual value: 'lOWER-lATIN', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'support.constant.property-value.list-style-type.css'] + expect(lines[6][6]).toEqual value: '-WebkIT-foo', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'support.constant.vendored.property-value.css'] + expect(lines[7][6]).toEqual value: 'HelVETica', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'support.constant.font-name.css'] + + it 'ignores case in selectors', -> + lines = grammar.tokenizeLines """ + DIV:HOVER { } + #id::BefORE { } + #id::aFTEr { } + TABle:nTH-cHILD(2N+1) {} + htML:NOT(.htiml) {} + I::BACKDROP + I::-mOZ-thing {} + """ + expect(lines[0][0]).toEqual value: 'DIV', scopes: ['source.css', 'meta.selector.css', 'entity.name.tag.css'] + expect(lines[0][2]).toEqual value: 'HOVER', scopes: ['source.css', 'meta.selector.css', 'entity.other.attribute-name.pseudo-class.css'] + expect(lines[1][3]).toEqual value: 'BefORE', scopes: ['source.css', 'meta.selector.css', 'entity.other.attribute-name.pseudo-element.css'] + expect(lines[2][3]).toEqual value: 'aFTEr', scopes: ['source.css', 'meta.selector.css', 'entity.other.attribute-name.pseudo-element.css'] + expect(lines[3][0]).toEqual value: 'TABle', scopes: ['source.css', 'meta.selector.css', 'entity.name.tag.css'] + expect(lines[3][2]).toEqual value: 'nTH-cHILD', scopes: ['source.css', 'meta.selector.css', 'entity.other.attribute-name.pseudo-class.css'] + expect(lines[3][4]).toEqual value: '2N+1', scopes: ['source.css', 'meta.selector.css', 'constant.numeric.css'] + expect(lines[4][0]).toEqual value: 'htML', scopes: ['source.css', 'meta.selector.css', 'entity.name.tag.css'] + expect(lines[4][2]).toEqual value: 'NOT', scopes: ['source.css', 'meta.selector.css', 'entity.other.attribute-name.pseudo-class.css'] + expect(lines[5][0]).toEqual value: 'I', scopes: ['source.css', 'meta.selector.css', 'entity.name.tag.css'] + expect(lines[5][2]).toEqual value: 'BACKDROP', scopes: ['source.css', 'meta.selector.css', 'entity.other.attribute-name.pseudo-element.css'] + expect(lines[6][2]).toEqual value: '-mOZ-thing', scopes: ['source.css', 'meta.selector.css', 'entity.other.attribute-name.pseudo-element.css'] + + it 'ignores case in function names', -> + lines = grammar.tokenizeLines """ + a{ color: RGBa(); } + a{ color: hslA(); } + a{ color: URL(); } + a{ content: ATTr(); } + a{ content: CoUNTer(); } + a{ content: cuBIC-beZIER()} + a{ content: sTePs()} + a{ content: cALc(2 + 2)} + """ + expect(lines[0][6]).toEqual value: 'RGBa', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.color.css', 'support.function.misc.css'] + expect(lines[1][6]).toEqual value: 'hslA', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.color.css', 'support.function.misc.css'] + expect(lines[2][6]).toEqual value: 'URL', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.url.css', 'support.function.url.css'] + expect(lines[3][6]).toEqual value: 'ATTr', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.misc.css', 'support.function.misc.css'] + expect(lines[4][6]).toEqual value: 'CoUNTer', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.misc.css', 'support.function.misc.css'] + expect(lines[5][6]).toEqual value: 'cuBIC-beZIER', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.timing-function.css', 'support.function.timing-function.css'] + expect(lines[6][6]).toEqual value: 'sTePs', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.timing-function.css', 'support.function.timing-function.css'] + expect(lines[7][6]).toEqual value: 'cALc', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.calc.css', 'support.function.calc.css'] + + it 'ignores case in unit names', -> + lines = grammar.tokenizeLines """ + a{width: 20EM; } + a{width: 20ReM; } + a{width: 8tURN; } + a{width: 20S; } + a{width: 20CM} + a{width: 2gRAd} + """ + expect(lines[0][5]).toEqual value: '20', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'constant.numeric.css'] + expect(lines[0][6]).toEqual value: 'EM', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'constant.numeric.css', 'keyword.other.unit.em.css'] + expect(lines[1][6]).toEqual value: 'ReM', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'constant.numeric.css', 'keyword.other.unit.rem.css'] + expect(lines[2][2]).toEqual value: 'width', scopes: ['source.css', 'meta.property-list.css', 'meta.property-name.css', 'support.type.property-name.css'] + expect(lines[2][6]).toEqual value: 'tURN', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'constant.numeric.css', 'keyword.other.unit.turn.css'] + expect(lines[3][6]).toEqual value: 'S', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'constant.numeric.css', 'keyword.other.unit.s.css'] + expect(lines[4][5]).toEqual value: '20', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'constant.numeric.css'] + expect(lines[4][6]).toEqual value: 'CM', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'constant.numeric.css', 'keyword.other.unit.cm.css'] + expect(lines[5][6]).toEqual value: 'gRAd', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'constant.numeric.css', 'keyword.other.unit.grad.css'] + + describe 'pseudo-classes', -> + it 'tokenizes regular pseudo-classes', -> + {tokens} = grammar.tokenizeLine 'p:first-child' + expect(tokens[0]).toEqual value: 'p', scopes: ['source.css', 'meta.selector.css', 'entity.name.tag.css'] + expect(tokens[1]).toEqual value: ':', scopes: ['source.css', 'meta.selector.css', 'entity.other.attribute-name.pseudo-class.css', 'punctuation.definition.entity.css'] + expect(tokens[2]).toEqual value: 'first-child', scopes: ['source.css', 'meta.selector.css', 'entity.other.attribute-name.pseudo-class.css'] + + it "doesn't tokenise pseudo-classes if followed by a semicolon or closed bracket", -> + {tokens} = grammar.tokenizeLine('p{ left:left }') + expect(tokens[0]).toEqual value: 'p', scopes: ['source.css', 'meta.selector.css', 'entity.name.tag.css'] + expect(tokens[1]).toEqual value: '{', scopes: ['source.css', 'meta.property-list.css', 'punctuation.section.property-list.begin.bracket.curly.css'] + expect(tokens[3]).toEqual value: 'left', scopes: ['source.css', 'meta.property-list.css', 'meta.property-name.css', 'support.type.property-name.css'] + expect(tokens[4]).toEqual value: ':', scopes: ['source.css', 'meta.property-list.css', 'punctuation.separator.key-value.css'] + expect(tokens[5]).toEqual value: 'left', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'support.constant.property-value.css'] + expect(tokens[7]).toEqual value: '}', scopes: ['source.css', 'meta.property-list.css', 'punctuation.section.property-list.end.bracket.curly.css'] + + describe ':dir()', -> + it 'tokenises :dir() and its keywords', -> + lines = grammar.tokenizeLines """ + a:dir(ltr ){ } + *:dir( rtl){ } + """ + expect(lines[0][0]).toEqual value: 'a', scopes: ['source.css', 'meta.selector.css', 'entity.name.tag.css'] + expect(lines[0][1]).toEqual value: ':', scopes: ['source.css', 'meta.selector.css', 'entity.other.attribute-name.pseudo-class.css', 'punctuation.definition.entity.css'] + expect(lines[0][2]).toEqual value: 'dir', scopes: ['source.css', 'meta.selector.css', 'entity.other.attribute-name.pseudo-class.css'] + expect(lines[0][3]).toEqual value: '(', scopes: ['source.css', 'meta.selector.css', 'punctuation.section.function.begin.bracket.round.css'] + expect(lines[0][4]).toEqual value: 'ltr', scopes: ['source.css', 'meta.selector.css', 'support.constant.text-direction.css'] + expect(lines[0][5]).toEqual value: ' ', scopes: ['source.css', 'meta.selector.css'] + expect(lines[0][6]).toEqual value: ')', scopes: ['source.css', 'meta.selector.css', 'punctuation.section.function.end.bracket.round.css'] + expect(lines[1][0]).toEqual value: '*', scopes: ['source.css', 'meta.selector.css', 'entity.name.tag.wildcard.css'] + expect(lines[1][1]).toEqual value: ':', scopes: ['source.css', 'meta.selector.css', 'entity.other.attribute-name.pseudo-class.css', 'punctuation.definition.entity.css'] + expect(lines[1][2]).toEqual value: 'dir', scopes: ['source.css', 'meta.selector.css', 'entity.other.attribute-name.pseudo-class.css'] + expect(lines[1][3]).toEqual value: '(', scopes: ['source.css', 'meta.selector.css', 'punctuation.section.function.begin.bracket.round.css'] + expect(lines[1][4]).toEqual value: ' ', scopes: ['source.css', 'meta.selector.css'] + expect(lines[1][5]).toEqual value: 'rtl', scopes: ['source.css', 'meta.selector.css', 'support.constant.text-direction.css'] + expect(lines[1][6]).toEqual value: ')', scopes: ['source.css', 'meta.selector.css', 'punctuation.section.function.end.bracket.round.css'] + + it 'allows :dir() to include comments and newlines', -> + lines = grammar.tokenizeLines """ + :DIR(/** + ==*/ltr/* + */) + """ + expect(lines[0][0]).toEqual value: ':', scopes: ['source.css', 'meta.selector.css', 'entity.other.attribute-name.pseudo-class.css', 'punctuation.definition.entity.css'] + expect(lines[0][1]).toEqual value: 'DIR', scopes: ['source.css', 'meta.selector.css', 'entity.other.attribute-name.pseudo-class.css'] + expect(lines[0][2]).toEqual value: '(', scopes: ['source.css', 'meta.selector.css', 'punctuation.section.function.begin.bracket.round.css'] + expect(lines[0][3]).toEqual value: '/*', scopes: ['source.css', 'meta.selector.css', 'comment.block.css', 'punctuation.definition.comment.begin.css'] + expect(lines[0][4]).toEqual value: '*', scopes: ['source.css', 'meta.selector.css', 'comment.block.css'] + expect(lines[1][0]).toEqual value: '==', scopes: ['source.css', 'meta.selector.css', 'comment.block.css'] + expect(lines[1][1]).toEqual value: '*/', scopes: ['source.css', 'meta.selector.css', 'comment.block.css', 'punctuation.definition.comment.end.css'] + expect(lines[1][2]).toEqual value: 'ltr', scopes: ['source.css', 'meta.selector.css', 'support.constant.text-direction.css'] + expect(lines[1][3]).toEqual value: '/*', scopes: ['source.css', 'meta.selector.css', 'comment.block.css', 'punctuation.definition.comment.begin.css'] + expect(lines[2][0]).toEqual value: '*/', scopes: ['source.css', 'meta.selector.css', 'comment.block.css', 'punctuation.definition.comment.end.css'] + expect(lines[2][1]).toEqual value: ')', scopes: ['source.css', 'meta.selector.css', 'punctuation.section.function.end.bracket.round.css'] + + describe ':lang()', -> + it 'tokenizes :lang()', -> + {tokens} = grammar.tokenizeLine ':lang(zh-Hans-CN,es-419)' + expect(tokens[0]).toEqual value: ':', scopes: ['source.css', 'meta.selector.css', 'entity.other.attribute-name.pseudo-class.css', 'punctuation.definition.entity.css'] + expect(tokens[1]).toEqual value: 'lang', scopes: ['source.css', 'meta.selector.css', 'entity.other.attribute-name.pseudo-class.css'] + expect(tokens[2]).toEqual value: '(', scopes: ['source.css', 'meta.selector.css', 'punctuation.section.function.begin.bracket.round.css'] + expect(tokens[3]).toEqual value: 'zh-Hans-CN', scopes: ['source.css', 'meta.selector.css', 'support.constant.language-range.css'] + expect(tokens[4]).toEqual value: ',', scopes: ['source.css', 'meta.selector.css', 'punctuation.separator.list.comma.css'] + expect(tokens[5]).toEqual value: 'es-419', scopes: ['source.css', 'meta.selector.css', 'support.constant.language-range.css'] + expect(tokens[6]).toEqual value: ')', scopes: ['source.css', 'meta.selector.css', 'punctuation.section.function.end.bracket.round.css'] + + it 'does not tokenize unquoted language ranges containing asterisks', -> + {tokens} = grammar.tokenizeLine ':lang(zh-*-CN)' + expect(tokens[3]).toEqual value: 'zh-*-CN', scopes: ['source.css', 'meta.selector.css'] + + it 'tokenizes language ranges containing asterisks quoted as strings', -> + {tokens} = grammar.tokenizeLine ':lang("zh-*-CN",\'*-ab-\')' + expect(tokens[3]).toEqual value: '"', scopes: ['source.css', 'meta.selector.css', 'string.quoted.double.css', 'punctuation.definition.string.begin.css'] + expect(tokens[4]).toEqual value: 'zh-*-CN', scopes: ['source.css', 'meta.selector.css', 'string.quoted.double.css', 'support.constant.language-range.css'] + expect(tokens[5]).toEqual value: '"', scopes: ['source.css', 'meta.selector.css', 'string.quoted.double.css', 'punctuation.definition.string.end.css'] + expect(tokens[6]).toEqual value: ',', scopes: ['source.css', 'meta.selector.css', 'punctuation.separator.list.comma.css'] + expect(tokens[7]).toEqual value: "'", scopes: ['source.css', 'meta.selector.css', 'string.quoted.single.css', 'punctuation.definition.string.begin.css'] + expect(tokens[8]).toEqual value: '*-ab-', scopes: ['source.css', 'meta.selector.css', 'string.quoted.single.css', 'support.constant.language-range.css'] + expect(tokens[9]).toEqual value: "'", scopes: ['source.css', 'meta.selector.css', 'string.quoted.single.css', 'punctuation.definition.string.end.css'] + + describe ':not()', -> + it 'tokenises other selectors inside :not()', -> + {tokens} = grammar.tokenizeLine('*:not(.class-name):not(div) {}') + expect(tokens[1]).toEqual value: ':', scopes: ['source.css', 'meta.selector.css', 'entity.other.attribute-name.pseudo-class.css', 'punctuation.definition.entity.css'] + expect(tokens[2]).toEqual value: 'not', scopes: ['source.css', 'meta.selector.css', 'entity.other.attribute-name.pseudo-class.css'] + expect(tokens[3]).toEqual value: '(', scopes: ['source.css', 'meta.selector.css', 'punctuation.section.function.begin.bracket.round.css'] + expect(tokens[4]).toEqual value: '.', scopes: ['source.css', 'meta.selector.css', 'entity.other.attribute-name.class.css', 'punctuation.definition.entity.css'] + expect(tokens[5]).toEqual value: 'class-name', scopes: ['source.css', 'meta.selector.css', 'entity.other.attribute-name.class.css'] + expect(tokens[6]).toEqual value: ')', scopes: ['source.css', 'meta.selector.css', 'punctuation.section.function.end.bracket.round.css'] + expect(tokens[7]).toEqual value: ':', scopes: ['source.css', 'meta.selector.css', 'entity.other.attribute-name.pseudo-class.css', 'punctuation.definition.entity.css'] + expect(tokens[8]).toEqual value: 'not', scopes: ['source.css', 'meta.selector.css', 'entity.other.attribute-name.pseudo-class.css'] + expect(tokens[9]).toEqual value: '(', scopes: ['source.css', 'meta.selector.css', 'punctuation.section.function.begin.bracket.round.css'] + expect(tokens[10]).toEqual value: 'div', scopes: ['source.css', 'meta.selector.css', 'entity.name.tag.css'] + expect(tokens[11]).toEqual value: ')', scopes: ['source.css', 'meta.selector.css', 'punctuation.section.function.end.bracket.round.css'] + + it 'tokenises injected comments', -> + {tokens} = grammar.tokenizeLine('*:not(/*(*/.class-name/*)*/):not(/*b*/) {}') + expect(tokens[2]).toEqual value: 'not', scopes: ['source.css', 'meta.selector.css', 'entity.other.attribute-name.pseudo-class.css'] + expect(tokens[3]).toEqual value: '(', scopes: ['source.css', 'meta.selector.css', 'punctuation.section.function.begin.bracket.round.css'] + expect(tokens[4]).toEqual value: '/*', scopes: ['source.css', 'meta.selector.css', 'comment.block.css', 'punctuation.definition.comment.begin.css'] + expect(tokens[5]).toEqual value: '(', scopes: ['source.css', 'meta.selector.css', 'comment.block.css'] + expect(tokens[6]).toEqual value: '*/', scopes: ['source.css', 'meta.selector.css', 'comment.block.css', 'punctuation.definition.comment.end.css'] + expect(tokens[7]).toEqual value: '.', scopes: ['source.css', 'meta.selector.css', 'entity.other.attribute-name.class.css', 'punctuation.definition.entity.css'] + expect(tokens[8]).toEqual value: 'class-name', scopes: ['source.css', 'meta.selector.css', 'entity.other.attribute-name.class.css'] + expect(tokens[9]).toEqual value: '/*', scopes: ['source.css', 'meta.selector.css', 'comment.block.css', 'punctuation.definition.comment.begin.css'] + expect(tokens[10]).toEqual value: ')', scopes: ['source.css', 'meta.selector.css', 'comment.block.css'] + expect(tokens[11]).toEqual value: '*/', scopes: ['source.css', 'meta.selector.css', 'comment.block.css', 'punctuation.definition.comment.end.css'] + expect(tokens[12]).toEqual value: ')', scopes: ['source.css', 'meta.selector.css', 'punctuation.section.function.end.bracket.round.css'] + expect(tokens[13]).toEqual value: ':', scopes: ['source.css', 'meta.selector.css', 'entity.other.attribute-name.pseudo-class.css', 'punctuation.definition.entity.css'] + expect(tokens[14]).toEqual value: 'not', scopes: ['source.css', 'meta.selector.css', 'entity.other.attribute-name.pseudo-class.css'] + expect(tokens[15]).toEqual value: '(', scopes: ['source.css', 'meta.selector.css', 'punctuation.section.function.begin.bracket.round.css'] + expect(tokens[16]).toEqual value: '/*', scopes: ['source.css', 'meta.selector.css', 'comment.block.css', 'punctuation.definition.comment.begin.css'] + expect(tokens[17]).toEqual value: 'b', scopes: ['source.css', 'meta.selector.css', 'comment.block.css'] + expect(tokens[18]).toEqual value: '*/', scopes: ['source.css', 'meta.selector.css', 'comment.block.css', 'punctuation.definition.comment.end.css'] + expect(tokens[19]).toEqual value: ')', scopes: ['source.css', 'meta.selector.css', 'punctuation.section.function.end.bracket.round.css'] + + describe ':nth-*()', -> + it 'tokenizes :nth-child()', -> + tokens = grammar.tokenizeLines ''' + :nth-child(2n+1) + :nth-child(2n -1) + :nth-child(-2n+ 1) + :nth-child(-2n - 1) + :nth-child(odd) + :nth-child(even) + :nth-child( odd ) + :nth-child( even ) + ''' + expect(tokens[0][0]).toEqual value: ':', scopes: ['source.css', 'meta.selector.css', 'entity.other.attribute-name.pseudo-class.css', 'punctuation.definition.entity.css'] + expect(tokens[0][1]).toEqual value: 'nth-child', scopes: ['source.css', 'meta.selector.css', 'entity.other.attribute-name.pseudo-class.css'] + expect(tokens[0][2]).toEqual value: '(', scopes: ['source.css', 'meta.selector.css', 'punctuation.section.function.begin.bracket.round.css'] + expect(tokens[0][3]).toEqual value: '2n+1', scopes: ['source.css', 'meta.selector.css', 'constant.numeric.css'] + expect(tokens[0][4]).toEqual value: ')', scopes: ['source.css', 'meta.selector.css', 'punctuation.section.function.end.bracket.round.css'] + expect(tokens[1][3]).toEqual value: '2n -1', scopes: ['source.css', 'meta.selector.css', 'constant.numeric.css'] + expect(tokens[2][3]).toEqual value: '-2n+ 1', scopes: ['source.css', 'meta.selector.css', 'constant.numeric.css'] + expect(tokens[3][3]).toEqual value: '-2n - 1', scopes: ['source.css', 'meta.selector.css', 'constant.numeric.css'] + expect(tokens[4][3]).toEqual value: 'odd', scopes: ['source.css', 'meta.selector.css', 'support.constant.parity.css'] + expect(tokens[5][3]).toEqual value: 'even', scopes: ['source.css', 'meta.selector.css', 'support.constant.parity.css'] + expect(tokens[6][3]).toEqual value: ' ', scopes: ['source.css', 'meta.selector.css'] + expect(tokens[6][4]).toEqual value: 'odd', scopes: ['source.css', 'meta.selector.css', 'support.constant.parity.css'] + expect(tokens[7][4]).toEqual value: 'even', scopes: ['source.css', 'meta.selector.css', 'support.constant.parity.css'] + expect(tokens[7][5]).toEqual value: ' ', scopes: ['source.css', 'meta.selector.css'] + + it 'tokenizes :nth-last-child()', -> + tokens = grammar.tokenizeLines ''' + :nth-last-child(2n) + :nth-last-child( -2n) + :nth-last-child( 2n ) + :nth-last-child(even) + ''' + expect(tokens[0][0]).toEqual value: ':', scopes: ['source.css', 'meta.selector.css', 'entity.other.attribute-name.pseudo-class.css', 'punctuation.definition.entity.css'] + expect(tokens[0][1]).toEqual value: 'nth-last-child', scopes: ['source.css', 'meta.selector.css', 'entity.other.attribute-name.pseudo-class.css'] + expect(tokens[0][2]).toEqual value: '(', scopes: ['source.css', 'meta.selector.css', 'punctuation.section.function.begin.bracket.round.css'] + expect(tokens[0][3]).toEqual value: '2n', scopes: ['source.css', 'meta.selector.css', 'constant.numeric.css'] + expect(tokens[0][4]).toEqual value: ')', scopes: ['source.css', 'meta.selector.css', 'punctuation.section.function.end.bracket.round.css'] + expect(tokens[1][4]).toEqual value: '-2n', scopes: ['source.css', 'meta.selector.css', 'constant.numeric.css'] + expect(tokens[2][4]).toEqual value: '2n', scopes: ['source.css', 'meta.selector.css', 'constant.numeric.css'] + expect(tokens[2][6]).toEqual value: ')', scopes: ['source.css', 'meta.selector.css', 'punctuation.section.function.end.bracket.round.css'] + expect(tokens[3][3]).toEqual value: 'even', scopes: ['source.css', 'meta.selector.css', 'support.constant.parity.css'] + + it 'tokenizes :nth-of-type()', -> + tokens = grammar.tokenizeLines ''' + img:nth-of-type(+n+1) + img:nth-of-type(-n+1) + img:nth-of-type(n+1) + ''' + expect(tokens[0][1]).toEqual value: ':', scopes: ['source.css', 'meta.selector.css', 'entity.other.attribute-name.pseudo-class.css', 'punctuation.definition.entity.css'] + expect(tokens[0][2]).toEqual value: 'nth-of-type', scopes: ['source.css', 'meta.selector.css', 'entity.other.attribute-name.pseudo-class.css'] + expect(tokens[0][3]).toEqual value: '(', scopes: ['source.css', 'meta.selector.css', 'punctuation.section.function.begin.bracket.round.css'] + expect(tokens[0][4]).toEqual value: '+n+1', scopes: ['source.css', 'meta.selector.css', 'constant.numeric.css'] + expect(tokens[0][5]).toEqual value: ')', scopes: ['source.css', 'meta.selector.css', 'punctuation.section.function.end.bracket.round.css'] + expect(tokens[1][4]).toEqual value: '-n+1', scopes: ['source.css', 'meta.selector.css', 'constant.numeric.css'] + expect(tokens[2][4]).toEqual value: 'n+1', scopes: ['source.css', 'meta.selector.css', 'constant.numeric.css'] + + it 'tokenizes ::nth-last-of-type()', -> + tokens = grammar.tokenizeLines ''' + h1:nth-last-of-type(-1) + h1:nth-last-of-type(+2) + h1:nth-last-of-type(3) + ''' + expect(tokens[0][1]).toEqual value: ':', scopes: ['source.css', 'meta.selector.css', 'entity.other.attribute-name.pseudo-class.css', 'punctuation.definition.entity.css'] + expect(tokens[0][2]).toEqual value: 'nth-last-of-type', scopes: ['source.css', 'meta.selector.css', 'entity.other.attribute-name.pseudo-class.css'] + expect(tokens[0][3]).toEqual value: '(', scopes: ['source.css', 'meta.selector.css', 'punctuation.section.function.begin.bracket.round.css'] + expect(tokens[0][4]).toEqual value: '-1', scopes: ['source.css', 'meta.selector.css', 'constant.numeric.css'] + expect(tokens[0][5]).toEqual value: ')', scopes: ['source.css', 'meta.selector.css', 'punctuation.section.function.end.bracket.round.css'] + expect(tokens[1][4]).toEqual value: '+2', scopes: ['source.css', 'meta.selector.css', 'constant.numeric.css'] + expect(tokens[2][4]).toEqual value: '3', scopes: ['source.css', 'meta.selector.css', 'constant.numeric.css'] + + describe 'pseudo-elements', -> + # :first-line, :first-letter, :before and :after + it 'tokenizes both : and :: notations for pseudo-elements introduced in CSS 1 and 2', -> + {tokens} = grammar.tokenizeLine '.opening:first-letter' + expect(tokens[0]).toEqual value: '.', scopes: ['source.css', 'meta.selector.css', 'entity.other.attribute-name.class.css', 'punctuation.definition.entity.css'] + expect(tokens[1]).toEqual value: 'opening', scopes: ['source.css', 'meta.selector.css', 'entity.other.attribute-name.class.css'] + expect(tokens[2]).toEqual value: ':', scopes: ['source.css', 'meta.selector.css', 'entity.other.attribute-name.pseudo-element.css', 'punctuation.definition.entity.css'] + expect(tokens[3]).toEqual value: 'first-letter', scopes: ['source.css', 'meta.selector.css', 'entity.other.attribute-name.pseudo-element.css'] + + {tokens} = grammar.tokenizeLine 'q::after' + expect(tokens[0]).toEqual value: 'q', scopes: ['source.css', 'meta.selector.css', 'entity.name.tag.css'] + expect(tokens[1]).toEqual value: '::', scopes: ['source.css', 'meta.selector.css', 'entity.other.attribute-name.pseudo-element.css', 'punctuation.definition.entity.css'] + expect(tokens[2]).toEqual value: 'after', scopes: ['source.css', 'meta.selector.css', 'entity.other.attribute-name.pseudo-element.css'] + + it 'tokenizes both : and :: notations for vendor-prefixed pseudo-elements', -> + {tokens} = grammar.tokenizeLine ':-ms-input-placeholder' + expect(tokens[0]).toEqual value: ':', scopes: ['source.css', 'meta.selector.css', 'entity.other.attribute-name.pseudo-element.css', 'punctuation.definition.entity.css'] + expect(tokens[1]).toEqual value: '-ms-input-placeholder', scopes: ['source.css', 'meta.selector.css', 'entity.other.attribute-name.pseudo-element.css'] + + {tokens} = grammar.tokenizeLine '::-webkit-input-placeholder' + expect(tokens[0]).toEqual value: '::', scopes: ['source.css', 'meta.selector.css', 'entity.other.attribute-name.pseudo-element.css', 'punctuation.definition.entity.css'] + expect(tokens[1]).toEqual value: '-webkit-input-placeholder', scopes: ['source.css', 'meta.selector.css', 'entity.other.attribute-name.pseudo-element.css'] + + it 'only tokenizes the :: notation for other pseudo-elements', -> + {tokens} = grammar.tokenizeLine '::selection' + expect(tokens[0]).toEqual value: '::', scopes: ['source.css', 'meta.selector.css', 'entity.other.attribute-name.pseudo-element.css', 'punctuation.definition.entity.css'] + expect(tokens[1]).toEqual value: 'selection', scopes: ['source.css', 'meta.selector.css', 'entity.other.attribute-name.pseudo-element.css'] + + {tokens} = grammar.tokenizeLine ':selection' + expect(tokens[0]).toEqual value: ':selection', scopes: ['source.css', 'meta.selector.css'] + + describe 'compound selectors', -> + it 'tokenizes the combination of type selectors followed by class selectors', -> + {tokens} = grammar.tokenizeLine 'very-custom.class' + expect(tokens[0]).toEqual value: 'very-custom', scopes: ['source.css', 'meta.selector.css', 'entity.name.tag.custom.css'] + expect(tokens[1]).toEqual value: '.', scopes: ['source.css', 'meta.selector.css', 'entity.other.attribute-name.class.css', 'punctuation.definition.entity.css'] + expect(tokens[2]).toEqual value: 'class', scopes: ['source.css', 'meta.selector.css', 'entity.other.attribute-name.class.css'] + + it 'tokenizes the combination of type selectors followed by pseudo-classes', -> + {tokens} = grammar.tokenizeLine 'very-custom:hover' + expect(tokens[0]).toEqual value: 'very-custom', scopes: ['source.css', 'meta.selector.css', 'entity.name.tag.custom.css'] + expect(tokens[1]).toEqual value: ':', scopes: ['source.css', 'meta.selector.css', 'entity.other.attribute-name.pseudo-class.css', 'punctuation.definition.entity.css'] + expect(tokens[2]).toEqual value: 'hover', scopes: ['source.css', 'meta.selector.css', 'entity.other.attribute-name.pseudo-class.css'] + + it 'tokenizes the combination of type selectors followed by pseudo-elements', -> + {tokens} = grammar.tokenizeLine 'very-custom::shadow' + expect(tokens[0]).toEqual value: 'very-custom', scopes: ['source.css', 'meta.selector.css', 'entity.name.tag.custom.css'] + expect(tokens[1]).toEqual value: '::', scopes: ['source.css', 'meta.selector.css', 'entity.other.attribute-name.pseudo-element.css', 'punctuation.definition.entity.css'] + expect(tokens[2]).toEqual value: 'shadow', scopes: ['source.css', 'meta.selector.css', 'entity.other.attribute-name.pseudo-element.css'] + + describe 'property lists (declaration blocks)', -> + it 'tokenizes inline property lists', -> + {tokens} = grammar.tokenizeLine 'div { font-size: inherit; }' + expect(tokens[4]).toEqual value: 'font-size', scopes: ['source.css', 'meta.property-list.css', 'meta.property-name.css', 'support.type.property-name.css'] + expect(tokens[5]).toEqual value: ':', scopes: ['source.css', 'meta.property-list.css', 'punctuation.separator.key-value.css'] + expect(tokens[6]).toEqual value: ' ', scopes: ['source.css', 'meta.property-list.css'] + expect(tokens[7]).toEqual value: 'inherit', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'support.constant.property-value.css'] + expect(tokens[8]).toEqual value: ';', scopes: ['source.css', 'meta.property-list.css', 'punctuation.terminator.rule.css'] + expect(tokens[10]).toEqual value: '}', scopes: ['source.css', 'meta.property-list.css', 'punctuation.section.property-list.end.bracket.curly.css'] + + it 'tokenizes compact inline property lists', -> + {tokens} = grammar.tokenizeLine 'div{color:inherit;float:left}' + expect(tokens[2]).toEqual value: 'color', scopes: ['source.css', 'meta.property-list.css', 'meta.property-name.css', 'support.type.property-name.css'] + expect(tokens[3]).toEqual value: ':', scopes: ['source.css', 'meta.property-list.css', 'punctuation.separator.key-value.css'] + expect(tokens[4]).toEqual value: 'inherit', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'support.constant.property-value.css'] + expect(tokens[5]).toEqual value: ';', scopes: ['source.css', 'meta.property-list.css', 'punctuation.terminator.rule.css'] + expect(tokens[6]).toEqual value: 'float', scopes: ['source.css', 'meta.property-list.css', 'meta.property-name.css', 'support.type.property-name.css'] + expect(tokens[7]).toEqual value: ':', scopes: ['source.css', 'meta.property-list.css', 'punctuation.separator.key-value.css'] + expect(tokens[8]).toEqual value: 'left', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'support.constant.property-value.css'] + expect(tokens[9]).toEqual value: '}', scopes: ['source.css', 'meta.property-list.css', 'punctuation.section.property-list.end.bracket.curly.css'] + + it 'tokenizes multiple inline property lists', -> + tokens = grammar.tokenizeLines ''' + very-custom { color: inherit } + another-one { display : none ; } + ''' + expect(tokens[0][0]).toEqual value: 'very-custom', scopes: ['source.css', 'meta.selector.css', 'entity.name.tag.custom.css'] + expect(tokens[0][4]).toEqual value: 'color', scopes: ['source.css', 'meta.property-list.css', 'meta.property-name.css', 'support.type.property-name.css'] + expect(tokens[0][5]).toEqual value: ':', scopes: ['source.css', 'meta.property-list.css', 'punctuation.separator.key-value.css'] + expect(tokens[0][7]).toEqual value: 'inherit', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'support.constant.property-value.css'] + expect(tokens[0][8]).toEqual value: ' ', scopes: ['source.css', 'meta.property-list.css'] + expect(tokens[0][9]).toEqual value: '}', scopes: ['source.css', 'meta.property-list.css', 'punctuation.section.property-list.end.bracket.curly.css'] + expect(tokens[1][0]).toEqual value: 'another-one', scopes: ['source.css', 'meta.selector.css', 'entity.name.tag.custom.css'] + expect(tokens[1][4]).toEqual value: 'display', scopes: ['source.css', 'meta.property-list.css', 'meta.property-name.css', 'support.type.property-name.css'] + expect(tokens[1][5]).toEqual value: ' ', scopes: ['source.css', 'meta.property-list.css'] + expect(tokens[1][6]).toEqual value: ':', scopes: ['source.css', 'meta.property-list.css', 'punctuation.separator.key-value.css'] + expect(tokens[1][8]).toEqual value: 'none', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'support.constant.property-value.css'] + expect(tokens[1][9]).toEqual value: ' ', scopes: ['source.css', 'meta.property-list.css'] + expect(tokens[1][10]).toEqual value: ';', scopes: ['source.css', 'meta.property-list.css', 'punctuation.terminator.rule.css'] + expect(tokens[1][12]).toEqual value: '}', scopes: ['source.css', 'meta.property-list.css', 'punctuation.section.property-list.end.bracket.curly.css'] + + it 'tokenizes custom properties', -> + {tokens} = grammar.tokenizeLine ':root { --white: #FFF; }' + expect(tokens[5]).toEqual value: '--white', scopes: ['source.css', 'meta.property-list.css', 'variable.css'] + + it 'tokenises commas between property values', -> + {tokens} = grammar.tokenizeLine('a{ text-shadow: a, b; }') + expect(tokens[7]).toEqual value: ',', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'punctuation.separator.list.comma.css'] + + it 'tokenises superfluous semicolons', -> + lines = grammar.tokenizeLines ''' + .test{ width: 20em;;;;;;;;; + ;;;;;;;;;height: 10em; } + ''' + for i in [0..8] + expect(lines[0][i+9]).toEqual value: ';', scopes: ['source.css', 'meta.property-list.css', 'punctuation.terminator.rule.css'] + expect(lines[1][i]).toEqual value: ';', scopes: ['source.css', 'meta.property-list.css', 'punctuation.terminator.rule.css'] + expect(lines[1][9]).toEqual value: 'height', scopes: ['source.css', 'meta.property-list.css', 'meta.property-name.css', 'support.type.property-name.css'] + + describe 'values', -> + it 'tokenizes color keywords', -> + {tokens} = grammar.tokenizeLine '#jon { color: snow; }' + expect(tokens[8]).toEqual value: 'snow', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'support.constant.color.w3c-extended-color-name.css'] + + it 'tokenises RGBA values in hex notation', -> + {tokens} = grammar.tokenizeLine('p{ color: #f030; }') + expect(tokens[6]).toEqual value: '#', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'constant.other.color.rgb-value.hex.css', 'punctuation.definition.constant.css'] + expect(tokens[7]).toEqual value: 'f030', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'constant.other.color.rgb-value.hex.css'] + expect(tokens[8]).toEqual value: ';', scopes: ['source.css', 'meta.property-list.css', 'punctuation.terminator.rule.css'] + expect(tokens[10]).toEqual value: '}', scopes: ['source.css', 'meta.property-list.css', 'punctuation.section.property-list.end.bracket.curly.css'] + + {tokens} = grammar.tokenizeLine('a{ color: #CAFEBABE; }') + expect(tokens[0]).toEqual value: 'a', scopes: ['source.css', 'meta.selector.css', 'entity.name.tag.css'] + expect(tokens[1]).toEqual value: '{', scopes: ['source.css', 'meta.property-list.css', 'punctuation.section.property-list.begin.bracket.curly.css'] + expect(tokens[3]).toEqual value: 'color', scopes: ['source.css', 'meta.property-list.css', 'meta.property-name.css', 'support.type.property-name.css'] + expect(tokens[4]).toEqual value: ':', scopes: ['source.css', 'meta.property-list.css', 'punctuation.separator.key-value.css'] + expect(tokens[6]).toEqual value: '#', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'constant.other.color.rgb-value.hex.css', 'punctuation.definition.constant.css'] + expect(tokens[7]).toEqual value: 'CAFEBABE', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'constant.other.color.rgb-value.hex.css'] + expect(tokens[8]).toEqual value: ';', scopes: ['source.css', 'meta.property-list.css', 'punctuation.terminator.rule.css'] + expect(tokens[10]).toEqual value: '}', scopes: ['source.css', 'meta.property-list.css', 'punctuation.section.property-list.end.bracket.curly.css'] + + {tokens} = grammar.tokenizeLine('a{ color: #CAFEBABEF; }') + expect(tokens[6]).toEqual value: '#CAFEBABEF', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css'] + + it 'tokenizes common font names', -> + {tokens} = grammar.tokenizeLine 'p { font-family: Verdana, Helvetica, sans-serif; }' + expect(tokens[7]).toEqual value: 'Verdana', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'support.constant.font-name.css'] + expect(tokens[10]).toEqual value: 'Helvetica', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'support.constant.font-name.css'] + expect(tokens[13]).toEqual value: 'sans-serif', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'support.constant.font-name.css'] + + it 'tokenizes predefined list style types', -> + {tokens} = grammar.tokenizeLine 'ol.myth { list-style-type: cjk-earthly-branch }' + expect(tokens[9]).toEqual value: 'cjk-earthly-branch', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'support.constant.property-value.list-style-type.css'] + + it 'tokenizes numeric values', -> + {tokens} = grammar.tokenizeLine 'div { font-size: 14px; }' + expect(tokens[7]).toEqual value: '14', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'constant.numeric.css'] + expect(tokens[8]).toEqual value: 'px', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'constant.numeric.css', 'keyword.other.unit.px.css'] + + it 'does not tokenize invalid numeric values', -> + {tokens} = grammar.tokenizeLine 'div { font-size: test14px; }' + expect(tokens[7]).toEqual value: 'test14px', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css'] + + {tokens} = grammar.tokenizeLine 'div { font-size: test-14px; }' + expect(tokens[7]).toEqual value: 'test-14px', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css'] + + it 'tokenizes vendor-prefixed values', -> + {tokens} = grammar.tokenizeLine '.edge { cursor: -webkit-zoom-in; }' + expect(tokens[8]).toEqual value: '-webkit-zoom-in', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'support.constant.vendored.property-value.css'] + + {tokens} = grammar.tokenizeLine '.edge { width: -moz-min-content; }' + expect(tokens[8]).toEqual value: '-moz-min-content', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'support.constant.vendored.property-value.css'] + + {tokens} = grammar.tokenizeLine '.edge { display: -ms-grid; }' + expect(tokens[8]).toEqual value: '-ms-grid', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'support.constant.vendored.property-value.css'] + + it 'tokenizes custom variables', -> + {tokens} = grammar.tokenizeLine 'div { color: var(--primary-color) }' + expect(tokens[9]).toEqual value: '--primary-color', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.variable.css', 'variable.argument.css'] + + it 'tokenises numeric values correctly', -> + lines = grammar.tokenizeLines """ + .a { a: 12em } + .a { a: 4.01ex } + .a { a: -456.8ch } + .a { a: 0.0REM } + .a { a: +0.0vh } + .a { a: -0.0vw } + .a { a: .6px } + .a { a: 10e3mm } + .a { a: 10E3cm } + .a { a: -3.4e+2In } + .a { a: -3.4e-2ch } + .a { a: +.5E-2% } + .a { a: -3.4e-2% } + """ + expect(lines[0][8]).toEqual value: '12', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'constant.numeric.css'] + expect(lines[0][9]).toEqual value: 'em', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'constant.numeric.css', 'keyword.other.unit.em.css'] + expect(lines[1][8]).toEqual value: '4.01', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'constant.numeric.css'] + expect(lines[1][9]).toEqual value: 'ex', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'constant.numeric.css', 'keyword.other.unit.ex.css'] + expect(lines[2][8]).toEqual value: '-456.8', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'constant.numeric.css'] + expect(lines[2][9]).toEqual value: 'ch', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'constant.numeric.css', 'keyword.other.unit.ch.css'] + expect(lines[3][8]).toEqual value: '0.0', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'constant.numeric.css'] + expect(lines[3][9]).toEqual value: 'REM', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'constant.numeric.css', 'keyword.other.unit.rem.css'] + expect(lines[4][8]).toEqual value: '+0.0', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'constant.numeric.css'] + expect(lines[4][9]).toEqual value: 'vh', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'constant.numeric.css', 'keyword.other.unit.vh.css'] + expect(lines[5][8]).toEqual value: '-0.0', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'constant.numeric.css'] + expect(lines[5][9]).toEqual value: 'vw', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'constant.numeric.css', 'keyword.other.unit.vw.css'] + expect(lines[6][8]).toEqual value: '.6', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'constant.numeric.css'] + expect(lines[6][9]).toEqual value: 'px', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'constant.numeric.css', 'keyword.other.unit.px.css'] + expect(lines[7][8]).toEqual value: '10e3', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'constant.numeric.css'] + expect(lines[7][9]).toEqual value: 'mm', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'constant.numeric.css', 'keyword.other.unit.mm.css'] + expect(lines[8][8]).toEqual value: '10E3', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'constant.numeric.css'] + expect(lines[8][9]).toEqual value: 'cm', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'constant.numeric.css', 'keyword.other.unit.cm.css'] + expect(lines[9][8]).toEqual value: '-3.4e+2', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'constant.numeric.css'] + expect(lines[9][9]).toEqual value: 'In', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'constant.numeric.css', 'keyword.other.unit.in.css'] + expect(lines[10][8]).toEqual value: '-3.4e-2', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'constant.numeric.css'] + expect(lines[10][9]).toEqual value: 'ch', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'constant.numeric.css', 'keyword.other.unit.ch.css'] + expect(lines[11][8]).toEqual value: '+.5E-2', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'constant.numeric.css'] + expect(lines[11][9]).toEqual value: '%', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'constant.numeric.css', 'keyword.other.unit.percentage.css'] + expect(lines[12][8]).toEqual value: '-3.4e-2', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'constant.numeric.css'] + expect(lines[12][9]).toEqual value: '%', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'constant.numeric.css', 'keyword.other.unit.percentage.css'] + + describe 'functional notation', -> + describe 'attr()', -> + it 'tokenises parameters correctly and case-insensitively', -> + {tokens} = grammar.tokenizeLine('a{content:aTTr(data-width px, inherit)}') + expect(tokens[4]).toEqual value: 'aTTr', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.misc.css', 'support.function.misc.css'] + expect(tokens[5]).toEqual value: '(', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.misc.css', 'punctuation.section.function.begin.bracket.round.css'] + expect(tokens[6]).toEqual value: 'data-width', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.misc.css', 'variable.parameter.misc.css'] + expect(tokens[8]).toEqual value: 'px', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.misc.css', 'variable.parameter.misc.css'] + expect(tokens[9]).toEqual value: ',', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.misc.css', 'punctuation.separator.list.comma.css'] + expect(tokens[11]).toEqual value: 'inherit', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.misc.css', 'support.constant.property-value.css'] + expect(tokens[12]).toEqual value: ')', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.misc.css', 'punctuation.section.function.end.bracket.round.css'] + expect(tokens[13]).toEqual value: '}', scopes: ['source.css', 'meta.property-list.css', 'punctuation.section.property-list.end.bracket.curly.css'] + + it 'matches variables', -> + {tokens} = grammar.tokenizeLine('a{content:ATTR(VAR(--name) px, "N/A")}') + expect(tokens[4]).toEqual value: 'ATTR', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.misc.css', 'support.function.misc.css'] + expect(tokens[5]).toEqual value: '(', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.misc.css', 'punctuation.section.function.begin.bracket.round.css'] + expect(tokens[6]).toEqual value: 'VAR', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.misc.css', 'meta.function.variable.css', 'support.function.misc.css'] + expect(tokens[7]).toEqual value: '(', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.misc.css', 'meta.function.variable.css', 'punctuation.section.function.begin.bracket.round.css'] + expect(tokens[8]).toEqual value: '--name', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.misc.css', 'meta.function.variable.css', 'variable.argument.css'] + expect(tokens[9]).toEqual value: ')', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.misc.css', 'meta.function.variable.css', 'punctuation.section.function.end.bracket.round.css'] + expect(tokens[11]).toEqual value: 'px', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.misc.css', 'variable.parameter.misc.css'] + expect(tokens[12]).toEqual value: ',', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.misc.css', 'punctuation.separator.list.comma.css'] + expect(tokens[14]).toEqual value: '"', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.misc.css', 'string.quoted.double.css', 'punctuation.definition.string.begin.css'] + expect(tokens[15]).toEqual value: 'N/A', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.misc.css', 'string.quoted.double.css'] + expect(tokens[16]).toEqual value: '"', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.misc.css', 'string.quoted.double.css', 'punctuation.definition.string.end.css'] + expect(tokens[17]).toEqual value: ')', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.misc.css', 'punctuation.section.function.end.bracket.round.css'] + + describe 'calc()', -> + it 'tokenises calculations', -> + lines = grammar.tokenizeLines """ + a{ + width: calc(3px + -1em); + width: calc(3px - -1em); + width: calc(3px * 2); + width: calc(3px / 2); + } + """ + expect(lines[1][4]).toEqual value: 'calc', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.calc.css', 'support.function.calc.css'] + expect(lines[1][5]).toEqual value: '(', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.calc.css', 'punctuation.section.function.begin.bracket.round.css'] + expect(lines[1][6]).toEqual value: '3', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.calc.css', 'constant.numeric.css'] + expect(lines[1][7]).toEqual value: 'px', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.calc.css', 'constant.numeric.css', 'keyword.other.unit.px.css'] + expect(lines[1][9]).toEqual value: '+', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.calc.css', 'keyword.operator.arithmetic.css'] + expect(lines[1][11]).toEqual value: '-1', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.calc.css', 'constant.numeric.css'] + expect(lines[1][12]).toEqual value: 'em', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.calc.css', 'constant.numeric.css', 'keyword.other.unit.em.css'] + expect(lines[1][13]).toEqual value: ')', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.calc.css', 'punctuation.section.function.end.bracket.round.css'] + expect(lines[2][9]).toEqual value: '-', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.calc.css', 'keyword.operator.arithmetic.css'] + expect(lines[2][11]).toEqual value: '-1', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.calc.css', 'constant.numeric.css'] + expect(lines[2][12]).toEqual value: 'em', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.calc.css', 'constant.numeric.css', 'keyword.other.unit.em.css'] + expect(lines[3][7]).toEqual value: 'px', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.calc.css', 'constant.numeric.css', 'keyword.other.unit.px.css'] + expect(lines[3][9]).toEqual value: '*', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.calc.css', 'keyword.operator.arithmetic.css'] + expect(lines[4][7]).toEqual value: 'px', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.calc.css', 'constant.numeric.css', 'keyword.other.unit.px.css'] + expect(lines[4][9]).toEqual value: '/', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.calc.css', 'keyword.operator.arithmetic.css'] + expect(lines[4][11]).toEqual value: '2', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.calc.css', 'constant.numeric.css'] + + it 'requires whitespace around + and - operators', -> + {tokens} = grammar.tokenizeLine('a{ width: calc(3px+1em); }') + expect(tokens[9]).toEqual value: 'px', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.calc.css', 'constant.numeric.css', 'keyword.other.unit.px.css'] + expect(tokens[10]).toEqual value: '+', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.calc.css'] + expect(tokens[11]).toEqual value: '1', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.calc.css', 'constant.numeric.css'] + expect(tokens[12]).toEqual value: 'em', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.calc.css', 'constant.numeric.css', 'keyword.other.unit.em.css'] + + {tokens} = grammar.tokenizeLine('a{ width: calc(3px--1em); height: calc(10-1em);}') + expect(tokens[9]).toEqual value: 'px', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.calc.css', 'constant.numeric.css', 'keyword.other.unit.px.css'] + expect(tokens[10]).toEqual value: '--1em', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.calc.css'] + expect(tokens[19]).toEqual value: '10', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.calc.css', 'constant.numeric.css'] + expect(tokens[20]).toEqual value: '-1em', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.calc.css'] + + it 'does not require whitespace around * and / operators', -> + {tokens} = grammar.tokenizeLine('a{ width: calc(3px*2); }') + expect(tokens[9]).toEqual value: 'px', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.calc.css', 'constant.numeric.css', 'keyword.other.unit.px.css'] + expect(tokens[10]).toEqual value: '*', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.calc.css', 'keyword.operator.arithmetic.css'] + expect(tokens[11]).toEqual value: '2', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.calc.css', 'constant.numeric.css'] + + {tokens} = grammar.tokenizeLine('a{ width: calc(3px/2); }') + expect(tokens[9]).toEqual value: 'px', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.calc.css', 'constant.numeric.css', 'keyword.other.unit.px.css'] + expect(tokens[10]).toEqual value: '/', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.calc.css', 'keyword.operator.arithmetic.css'] + expect(tokens[11]).toEqual value: '2', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.calc.css', 'constant.numeric.css'] + + it 'matches variable expansions inside calculations', -> + {tokens} = grammar.tokenizeLine('.foo { margin-top: calc(var(--gap) + 1px); }') + expect(tokens[8]).toEqual value: 'calc', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.calc.css', 'support.function.calc.css'] + expect(tokens[9]).toEqual value: '(', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.calc.css', 'punctuation.section.function.begin.bracket.round.css'] + expect(tokens[10]).toEqual value: 'var', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.calc.css', 'meta.function.variable.css', 'support.function.misc.css'] + expect(tokens[11]).toEqual value: '(', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.calc.css', 'meta.function.variable.css', 'punctuation.section.function.begin.bracket.round.css'] + expect(tokens[12]).toEqual value: '--gap', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.calc.css', 'meta.function.variable.css', 'variable.argument.css'] + expect(tokens[13]).toEqual value: ')', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.calc.css', 'meta.function.variable.css', 'punctuation.section.function.end.bracket.round.css'] + expect(tokens[15]).toEqual value: '+', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.calc.css', 'keyword.operator.arithmetic.css'] + expect(tokens[17]).toEqual value: '1', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.calc.css', 'constant.numeric.css'] + expect(tokens[18]).toEqual value: 'px', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.calc.css', 'constant.numeric.css', 'keyword.other.unit.px.css'] + expect(tokens[19]).toEqual value: ')', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.calc.css', 'punctuation.section.function.end.bracket.round.css'] + expect(tokens[20]).toEqual value: ';', scopes: ['source.css', 'meta.property-list.css', 'punctuation.terminator.rule.css'] + expect(tokens[22]).toEqual value: '}', scopes: ['source.css', 'meta.property-list.css', 'punctuation.section.property-list.end.bracket.curly.css'] + + describe 'colours', -> + it 'tokenises colour functions correctly', -> + {tokens} = grammar.tokenizeLine('a{ color: rgb(187,255,221); }') + expect(tokens[6]).toEqual value: 'rgb', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.color.css', 'support.function.misc.css'] + expect(tokens[7]).toEqual value: '(', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.color.css', 'punctuation.section.function.begin.bracket.round.css'] + expect(tokens[8]).toEqual value: '187', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.color.css', 'constant.numeric.css'] + expect(tokens[9]).toEqual value: ',', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.color.css', 'punctuation.separator.list.comma.css'] + expect(tokens[10]).toEqual value: '255', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.color.css', 'constant.numeric.css'] + expect(tokens[11]).toEqual value: ',', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.color.css', 'punctuation.separator.list.comma.css'] + expect(tokens[12]).toEqual value: '221', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.color.css', 'constant.numeric.css'] + expect(tokens[13]).toEqual value: ')', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.color.css', 'punctuation.section.function.end.bracket.round.css'] + + {tokens} = grammar.tokenizeLine('a{ color: RGBa( 100%, 0% ,20.17% ,.5 ); }') + expect(tokens[6]).toEqual value: 'RGBa', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.color.css', 'support.function.misc.css'] + expect(tokens[7]).toEqual value: '(', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.color.css', 'punctuation.section.function.begin.bracket.round.css'] + expect(tokens[9]).toEqual value: '100', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.color.css', 'constant.numeric.css'] + expect(tokens[10]).toEqual value: '%', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.color.css', 'constant.numeric.css', 'keyword.other.unit.percentage.css'] + expect(tokens[11]).toEqual value: ',', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.color.css', 'punctuation.separator.list.comma.css'] + expect(tokens[13]).toEqual value: '0', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.color.css', 'constant.numeric.css'] + expect(tokens[14]).toEqual value: '%', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.color.css', 'constant.numeric.css', 'keyword.other.unit.percentage.css'] + expect(tokens[16]).toEqual value: ',', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.color.css', 'punctuation.separator.list.comma.css'] + expect(tokens[17]).toEqual value: '20.17', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.color.css', 'constant.numeric.css'] + expect(tokens[18]).toEqual value: '%', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.color.css', 'constant.numeric.css', 'keyword.other.unit.percentage.css'] + expect(tokens[20]).toEqual value: ',', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.color.css', 'punctuation.separator.list.comma.css'] + expect(tokens[21]).toEqual value: '.5', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.color.css', 'constant.numeric.css'] + expect(tokens[23]).toEqual value: ')', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.color.css', 'punctuation.section.function.end.bracket.round.css'] + + {tokens} = grammar.tokenizeLine('a{color:HSL(0, 00100%,50%)}') + expect(tokens[4]).toEqual value: 'HSL', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.color.css', 'support.function.misc.css'] + expect(tokens[5]).toEqual value: '(', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.color.css', 'punctuation.section.function.begin.bracket.round.css'] + expect(tokens[6]).toEqual value: '0', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.color.css', 'constant.numeric.css'] + expect(tokens[7]).toEqual value: ',', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.color.css', 'punctuation.separator.list.comma.css'] + expect(tokens[9]).toEqual value: '00100', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.color.css', 'constant.numeric.css'] + expect(tokens[10]).toEqual value: '%', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.color.css', 'constant.numeric.css', 'keyword.other.unit.percentage.css'] + expect(tokens[11]).toEqual value: ',', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.color.css', 'punctuation.separator.list.comma.css'] + expect(tokens[12]).toEqual value: '50', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.color.css', 'constant.numeric.css'] + expect(tokens[13]).toEqual value: '%', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.color.css', 'constant.numeric.css', 'keyword.other.unit.percentage.css'] + expect(tokens[14]).toEqual value: ')', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.color.css', 'punctuation.section.function.end.bracket.round.css'] + + {tokens} = grammar.tokenizeLine('a{color:HSLa(2,.0%,1%,.7)}') + expect(tokens[4]).toEqual value: 'HSLa', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.color.css', 'support.function.misc.css'] + expect(tokens[5]).toEqual value: '(', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.color.css', 'punctuation.section.function.begin.bracket.round.css'] + expect(tokens[6]).toEqual value: '2', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.color.css', 'constant.numeric.css'] + expect(tokens[7]).toEqual value: ',', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.color.css', 'punctuation.separator.list.comma.css'] + expect(tokens[8]).toEqual value: '.0', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.color.css', 'constant.numeric.css'] + expect(tokens[9]).toEqual value: '%', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.color.css', 'constant.numeric.css', 'keyword.other.unit.percentage.css'] + expect(tokens[10]).toEqual value: ',', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.color.css', 'punctuation.separator.list.comma.css'] + expect(tokens[11]).toEqual value: '1', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.color.css', 'constant.numeric.css'] + expect(tokens[12]).toEqual value: '%', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.color.css', 'constant.numeric.css', 'keyword.other.unit.percentage.css'] + expect(tokens[13]).toEqual value: ',', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.color.css', 'punctuation.separator.list.comma.css'] + expect(tokens[14]).toEqual value: '.7', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.color.css', 'constant.numeric.css'] + expect(tokens[15]).toEqual value: ')', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.color.css', 'punctuation.section.function.end.bracket.round.css'] + + it 'matches variables as colour components', -> + {tokens} = grammar.tokenizeLine('a{ color: RGBA(var(--red), 0% , 20%, .2)}') + expect(tokens[7]).toEqual value: '(', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.color.css', 'punctuation.section.function.begin.bracket.round.css'] + expect(tokens[8]).toEqual value: 'var', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.color.css', 'meta.function.variable.css', 'support.function.misc.css'] + expect(tokens[9]).toEqual value: '(', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.color.css', 'meta.function.variable.css', 'punctuation.section.function.begin.bracket.round.css'] + expect(tokens[10]).toEqual value: '--red', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.color.css', 'meta.function.variable.css', 'variable.argument.css'] + expect(tokens[11]).toEqual value: ')', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.color.css', 'meta.function.variable.css', 'punctuation.section.function.end.bracket.round.css'] + expect(tokens[12]).toEqual value: ',', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.color.css', 'punctuation.separator.list.comma.css'] + + it 'matches comments between colour components', -> + {tokens} = grammar.tokenizeLine('a{ color: rgba(/**/255/*=*/,0,/*2.2%*/51/*,*/0.2)}') + expect(tokens[8]).toEqual value: '/*', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.color.css', 'comment.block.css', 'punctuation.definition.comment.begin.css'] + expect(tokens[9]).toEqual value: '*/', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.color.css', 'comment.block.css', 'punctuation.definition.comment.end.css'] + expect(tokens[10]).toEqual value: '255', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.color.css', 'constant.numeric.css'] + expect(tokens[11]).toEqual value: '/*', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.color.css', 'comment.block.css', 'punctuation.definition.comment.begin.css'] + expect(tokens[12]).toEqual value: '=', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.color.css', 'comment.block.css'] + expect(tokens[13]).toEqual value: '*/', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.color.css', 'comment.block.css', 'punctuation.definition.comment.end.css'] + expect(tokens[14]).toEqual value: ',', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.color.css', 'punctuation.separator.list.comma.css'] + expect(tokens[16]).toEqual value: ',', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.color.css', 'punctuation.separator.list.comma.css'] + expect(tokens[17]).toEqual value: '/*', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.color.css', 'comment.block.css', 'punctuation.definition.comment.begin.css'] + expect(tokens[19]).toEqual value: '*/', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.color.css', 'comment.block.css', 'punctuation.definition.comment.end.css'] + expect(tokens[20]).toEqual value: '51', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.color.css', 'constant.numeric.css'] + expect(tokens[21]).toEqual value: '/*', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.color.css', 'comment.block.css', 'punctuation.definition.comment.begin.css'] + expect(tokens[22]).toEqual value: ',', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.color.css', 'comment.block.css'] + expect(tokens[23]).toEqual value: '*/', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.color.css', 'comment.block.css', 'punctuation.definition.comment.end.css'] + expect(tokens[24]).toEqual value: '0.2', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.color.css', 'constant.numeric.css'] + + it 'allows colour components to be split across lines', -> + lines = grammar.tokenizeLines """ + .frost{ + background-color: rgba( + var(--red), /* Red */ + var(--green), /* Green */ + var(--blue), /* Blue */ + /* var(--test), + /**/var(--opacity) /* Transparency */ + ); + } + """ + expect(lines[1][4]).toEqual value: 'rgba', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.color.css', 'support.function.misc.css'] + expect(lines[1][5]).toEqual value: '(', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.color.css', 'punctuation.section.function.begin.bracket.round.css'] + expect(lines[2][1]).toEqual value: 'var', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.color.css', 'meta.function.variable.css', 'support.function.misc.css'] + expect(lines[2][2]).toEqual value: '(', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.color.css', 'meta.function.variable.css', 'punctuation.section.function.begin.bracket.round.css'] + expect(lines[2][3]).toEqual value: '--red', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.color.css', 'meta.function.variable.css', 'variable.argument.css'] + expect(lines[2][4]).toEqual value: ')', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.color.css', 'meta.function.variable.css', 'punctuation.section.function.end.bracket.round.css'] + expect(lines[2][8]).toEqual value: ' Red ', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.color.css', 'comment.block.css'] + expect(lines[3][1]).toEqual value: 'var', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.color.css', 'meta.function.variable.css', 'support.function.misc.css'] + expect(lines[3][3]).toEqual value: '--green', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.color.css', 'meta.function.variable.css', 'variable.argument.css'] + expect(lines[3][5]).toEqual value: ',', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.color.css', 'punctuation.separator.list.comma.css'] + expect(lines[3][8]).toEqual value: ' Green ', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.color.css', 'comment.block.css'] + expect(lines[4][1]).toEqual value: 'var', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.color.css', 'meta.function.variable.css', 'support.function.misc.css'] + expect(lines[4][2]).toEqual value: '(', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.color.css', 'meta.function.variable.css', 'punctuation.section.function.begin.bracket.round.css'] + expect(lines[4][3]).toEqual value: '--blue', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.color.css', 'meta.function.variable.css', 'variable.argument.css'] + expect(lines[4][4]).toEqual value: ')', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.color.css', 'meta.function.variable.css', 'punctuation.section.function.end.bracket.round.css'] + expect(lines[4][5]).toEqual value: ',', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.color.css', 'punctuation.separator.list.comma.css'] + expect(lines[4][8]).toEqual value: ' Blue ', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.color.css', 'comment.block.css'] + expect(lines[4][9]).toEqual value: '*/', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.color.css', 'comment.block.css', 'punctuation.definition.comment.end.css'] + expect(lines[5][1]).toEqual value: '/*', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.color.css', 'comment.block.css', 'punctuation.definition.comment.begin.css'] + expect(lines[5][2]).toEqual value: ' var(--test),', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.color.css', 'comment.block.css'] + expect(lines[6][0]).toEqual value: ' /*', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.color.css', 'comment.block.css'] + expect(lines[6][1]).toEqual value: '*/', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.color.css', 'comment.block.css', 'punctuation.definition.comment.end.css'] + expect(lines[6][2]).toEqual value: 'var', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.color.css', 'meta.function.variable.css', 'support.function.misc.css'] + expect(lines[6][3]).toEqual value: '(', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.color.css', 'meta.function.variable.css', 'punctuation.section.function.begin.bracket.round.css'] + expect(lines[6][4]).toEqual value: '--opacity', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.color.css', 'meta.function.variable.css', 'variable.argument.css'] + expect(lines[6][5]).toEqual value: ')', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.color.css', 'meta.function.variable.css', 'punctuation.section.function.end.bracket.round.css'] + expect(lines[6][7]).toEqual value: '/*', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.color.css', 'comment.block.css', 'punctuation.definition.comment.begin.css'] + expect(lines[6][8]).toEqual value: ' Transparency ', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.color.css', 'comment.block.css'] + expect(lines[6][9]).toEqual value: '*/', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.color.css', 'comment.block.css', 'punctuation.definition.comment.end.css'] + expect(lines[7][1]).toEqual value: ')', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.color.css', 'punctuation.section.function.end.bracket.round.css'] + + describe 'gradients', -> + it 'tokenises linear gradients', -> + {tokens} = grammar.tokenizeLine('a{ background-image: linear-gradient( 45deg, blue, red ); }') + expect(tokens[6]).toEqual value: 'linear-gradient', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.gradient.css', 'support.function.gradient.css'] + expect(tokens[7]).toEqual value: '(', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.gradient.css', 'punctuation.section.function.begin.bracket.round.css'] + expect(tokens[9]).toEqual value: '45', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.gradient.css', 'constant.numeric.css'] + expect(tokens[10]).toEqual value: 'deg', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.gradient.css', 'constant.numeric.css', 'keyword.other.unit.deg.css'] + expect(tokens[11]).toEqual value: ',', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.gradient.css', 'punctuation.separator.list.comma.css'] + expect(tokens[13]).toEqual value: 'blue', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.gradient.css', 'support.constant.color.w3c-standard-color-name.css'] + expect(tokens[14]).toEqual value: ',', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.gradient.css', 'punctuation.separator.list.comma.css'] + expect(tokens[16]).toEqual value: 'red', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.gradient.css', 'support.constant.color.w3c-standard-color-name.css'] + expect(tokens[18]).toEqual value: ')', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.gradient.css', 'punctuation.section.function.end.bracket.round.css'] + expect(tokens[19]).toEqual value: ';', scopes: ['source.css', 'meta.property-list.css', 'punctuation.terminator.rule.css'] + + {tokens} = grammar.tokenizeLine('a{ background-image: LINear-graDIEnt( ellipse to left top, blue, red);') + expect(tokens[6]).toEqual value: 'LINear-graDIEnt', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.gradient.css', 'support.function.gradient.css'] + expect(tokens[9]).toEqual value: 'ellipse', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.gradient.css', 'support.constant.property-value.css'] + expect(tokens[11]).toEqual value: 'to', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.gradient.css', 'keyword.operator.gradient.css'] + expect(tokens[13]).toEqual value: 'left', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.gradient.css', 'support.constant.property-value.css'] + expect(tokens[15]).toEqual value: 'top', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.gradient.css', 'support.constant.property-value.css'] + expect(tokens[16]).toEqual value: ',', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.gradient.css', 'punctuation.separator.list.comma.css'] + expect(tokens[18]).toEqual value: 'blue', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.gradient.css', 'support.constant.color.w3c-standard-color-name.css'] + expect(tokens[19]).toEqual value: ',', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.gradient.css', 'punctuation.separator.list.comma.css'] + expect(tokens[21]).toEqual value: 'red', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.gradient.css', 'support.constant.color.w3c-standard-color-name.css'] + + it 'tokenises radial gradients', -> + {tokens} = grammar.tokenizeLine('a{ background-image: radial-gradient(farthest-corner at 45px 45px , #f00 0%, #00f 100%);}') + expect(tokens[6]).toEqual value: 'radial-gradient', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.gradient.css', 'support.function.gradient.css'] + expect(tokens[7]).toEqual value: '(', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.gradient.css', 'punctuation.section.function.begin.bracket.round.css'] + expect(tokens[8]).toEqual value: 'farthest-corner', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.gradient.css', 'support.constant.property-value.css'] + expect(tokens[10]).toEqual value: 'at', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.gradient.css', 'keyword.operator.gradient.css'] + expect(tokens[12]).toEqual value: '45', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.gradient.css', 'constant.numeric.css'] + expect(tokens[13]).toEqual value: 'px', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.gradient.css', 'constant.numeric.css', 'keyword.other.unit.px.css'] + expect(tokens[15]).toEqual value: '45', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.gradient.css', 'constant.numeric.css'] + expect(tokens[16]).toEqual value: 'px', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.gradient.css', 'constant.numeric.css', 'keyword.other.unit.px.css'] + expect(tokens[18]).toEqual value: ',', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.gradient.css', 'punctuation.separator.list.comma.css'] + expect(tokens[20]).toEqual value: '#', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.gradient.css', 'constant.other.color.rgb-value.hex.css', 'punctuation.definition.constant.css'] + expect(tokens[21]).toEqual value: 'f00', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.gradient.css', 'constant.other.color.rgb-value.hex.css'] + expect(tokens[23]).toEqual value: '0', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.gradient.css', 'constant.numeric.css'] + expect(tokens[24]).toEqual value: '%', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.gradient.css', 'constant.numeric.css', 'keyword.other.unit.percentage.css'] + + {tokens} = grammar.tokenizeLine('a{ background-image: RADial-gradiENT(16px at 60px 50%,#000 0%, #000 14px, rgba(0,0,0,.3) 18px, transparent 19px)}') + expect(tokens[6]).toEqual value: 'RADial-gradiENT', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.gradient.css', 'support.function.gradient.css'] + expect(tokens[7]).toEqual value: '(', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.gradient.css', 'punctuation.section.function.begin.bracket.round.css'] + expect(tokens[8]).toEqual value: '16', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.gradient.css', 'constant.numeric.css'] + expect(tokens[9]).toEqual value: 'px', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.gradient.css', 'constant.numeric.css', 'keyword.other.unit.px.css'] + expect(tokens[11]).toEqual value: 'at', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.gradient.css', 'keyword.operator.gradient.css'] + expect(tokens[13]).toEqual value: '60', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.gradient.css', 'constant.numeric.css'] + expect(tokens[14]).toEqual value: 'px', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.gradient.css', 'constant.numeric.css', 'keyword.other.unit.px.css'] + expect(tokens[16]).toEqual value: '50', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.gradient.css', 'constant.numeric.css'] + expect(tokens[17]).toEqual value: '%', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.gradient.css', 'constant.numeric.css', 'keyword.other.unit.percentage.css'] + expect(tokens[18]).toEqual value: ',', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.gradient.css', 'punctuation.separator.list.comma.css'] + expect(tokens[19]).toEqual value: '#', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.gradient.css', 'constant.other.color.rgb-value.hex.css', 'punctuation.definition.constant.css'] + expect(tokens[20]).toEqual value: '000', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.gradient.css', 'constant.other.color.rgb-value.hex.css'] + expect(tokens[33]).toEqual value: 'rgba', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.gradient.css', 'meta.function.color.css', 'support.function.misc.css'] + expect(tokens[34]).toEqual value: '(', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.gradient.css', 'meta.function.color.css', 'punctuation.section.function.begin.bracket.round.css'] + expect(tokens[35]).toEqual value: '0', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.gradient.css', 'meta.function.color.css', 'constant.numeric.css'] + expect(tokens[36]).toEqual value: ',', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.gradient.css', 'meta.function.color.css', 'punctuation.separator.list.comma.css'] + expect(tokens[41]).toEqual value: '.3', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.gradient.css', 'meta.function.color.css', 'constant.numeric.css'] + expect(tokens[42]).toEqual value: ')', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.gradient.css', 'meta.function.color.css', 'punctuation.section.function.end.bracket.round.css'] + expect(tokens[48]).toEqual value: 'transparent', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.gradient.css', 'support.constant.property-value.css'] + + it 'matches gradients that span multiple lines with injected comments', -> + lines = grammar.tokenizeLines """ + a{ + background-image: raDIAL-gradiENT( + ellipse farthest-corner/*@*/at/*@*/470px 47px,/*=== + ========*/#FFFF80 20%, rgba(204, 153, 153, 0.4) 30%,/*))))))))}*/#E6E6FF 60%); } + """ + expect(lines[1][4]).toEqual value: 'raDIAL-gradiENT', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.gradient.css', 'support.function.gradient.css'] + expect(lines[2][1]).toEqual value: 'ellipse', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.gradient.css', 'support.constant.property-value.css'] + expect(lines[2][3]).toEqual value: 'farthest-corner', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.gradient.css', 'support.constant.property-value.css'] + expect(lines[2][4]).toEqual value: '/*', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.gradient.css', 'comment.block.css', 'punctuation.definition.comment.begin.css'] + expect(lines[2][5]).toEqual value: '@', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.gradient.css', 'comment.block.css'] + expect(lines[2][6]).toEqual value: '*/', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.gradient.css', 'comment.block.css', 'punctuation.definition.comment.end.css'] + expect(lines[2][7]).toEqual value: 'at', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.gradient.css', 'keyword.operator.gradient.css'] + expect(lines[2][8]).toEqual value: '/*', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.gradient.css', 'comment.block.css', 'punctuation.definition.comment.begin.css'] + expect(lines[2][11]).toEqual value: '470', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.gradient.css', 'constant.numeric.css'] + expect(lines[2][12]).toEqual value: 'px', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.gradient.css', 'constant.numeric.css', 'keyword.other.unit.px.css'] + expect(lines[2][16]).toEqual value: ',', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.gradient.css', 'punctuation.separator.list.comma.css'] + expect(lines[2][17]).toEqual value: '/*', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.gradient.css', 'comment.block.css', 'punctuation.definition.comment.begin.css'] + expect(lines[2][18]).toEqual value: '===', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.gradient.css', 'comment.block.css'] + expect(lines[3][0]).toEqual value: '========', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.gradient.css', 'comment.block.css'] + expect(lines[3][2]).toEqual value: '#', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.gradient.css', 'constant.other.color.rgb-value.hex.css', 'punctuation.definition.constant.css'] + expect(lines[3][3]).toEqual value: 'FFFF80', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.gradient.css', 'constant.other.color.rgb-value.hex.css'] + expect(lines[3][9]).toEqual value: 'rgba', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.gradient.css', 'meta.function.color.css', 'support.function.misc.css'] + expect(lines[3][10]).toEqual value: '(', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.gradient.css', 'meta.function.color.css', 'punctuation.section.function.begin.bracket.round.css'] + expect(lines[3][20]).toEqual value: '0.4', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.gradient.css', 'meta.function.color.css', 'constant.numeric.css'] + expect(lines[3][21]).toEqual value: ')', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.gradient.css', 'meta.function.color.css', 'punctuation.section.function.end.bracket.round.css'] + expect(lines[3][26]).toEqual value: '/*', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.gradient.css', 'comment.block.css', 'punctuation.definition.comment.begin.css'] + expect(lines[3][27]).toEqual value: '))))))))}', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.gradient.css', 'comment.block.css'] + expect(lines[3][28]).toEqual value: '*/', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.gradient.css', 'comment.block.css', 'punctuation.definition.comment.end.css'] + expect(lines[3][29]).toEqual value: '#', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.gradient.css', 'constant.other.color.rgb-value.hex.css', 'punctuation.definition.constant.css'] + expect(lines[3][30]).toEqual value: 'E6E6FF', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.gradient.css', 'constant.other.color.rgb-value.hex.css'] + + it 'highlights vendored gradient functions', -> + lines = grammar.tokenizeLines """ + .grad { + background-image: -webkit-linear-gradient(top, /* For Chrome 25 and Safari 6, iOS 6.1, Android 4.3 */ hsl(0, 80%, 70%), #bada55); + background-image: -moz-linear-gradient(top, /* For Firefox (3.6 to 15) */ hsl(0, 80%, 70%), #bada55); + background-image: -o-linear-gradient(top, /* For old Opera (11.1 to 12.0) */ hsl(0, 80%, 70%), #bada55); + } + """ + expect(lines[1][4]).toEqual value: '-webkit-linear-gradient', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.gradient.css', 'support.function.gradient.css'] + expect(lines[1][5]).toEqual value: '(', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.gradient.css', 'punctuation.section.function.begin.bracket.round.css'] + expect(lines[1][6]).toEqual value: 'top', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.gradient.css', 'support.constant.property-value.css'] + expect(lines[1][10]).toEqual value: ' For Chrome 25 and Safari 6, iOS 6.1, Android 4.3 ', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.gradient.css', 'comment.block.css'] + expect(lines[1][13]).toEqual value: 'hsl', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.gradient.css', 'meta.function.color.css', 'support.function.misc.css'] + expect(lines[1][22]).toEqual value: '70', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.gradient.css', 'meta.function.color.css', 'constant.numeric.css'] + expect(lines[1][23]).toEqual value: '%', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.gradient.css', 'meta.function.color.css', 'constant.numeric.css', 'keyword.other.unit.percentage.css'] + expect(lines[1][24]).toEqual value: ')', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.gradient.css', 'meta.function.color.css', 'punctuation.section.function.end.bracket.round.css'] + expect(lines[1][27]).toEqual value: '#', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.gradient.css', 'constant.other.color.rgb-value.hex.css', 'punctuation.definition.constant.css'] + expect(lines[1][28]).toEqual value: 'bada55', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.gradient.css', 'constant.other.color.rgb-value.hex.css'] + expect(lines[1][29]).toEqual value: ')', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.gradient.css', 'punctuation.section.function.end.bracket.round.css'] + expect(lines[2][4]).toEqual value: '-moz-linear-gradient', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.gradient.css', 'support.function.gradient.css'] + expect(lines[2][5]).toEqual value: '(', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.gradient.css', 'punctuation.section.function.begin.bracket.round.css'] + expect(lines[2][6]).toEqual value: 'top', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.gradient.css', 'support.constant.property-value.css'] + expect(lines[2][7]).toEqual value: ',', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.gradient.css', 'punctuation.separator.list.comma.css'] + expect(lines[2][10]).toEqual value: ' For Firefox (3.6 to 15) ', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.gradient.css', 'comment.block.css'] + expect(lines[2][13]).toEqual value: 'hsl', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.gradient.css', 'meta.function.color.css', 'support.function.misc.css'] + expect(lines[2][14]).toEqual value: '(', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.gradient.css', 'meta.function.color.css', 'punctuation.section.function.begin.bracket.round.css'] + expect(lines[2][24]).toEqual value: ')', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.gradient.css', 'meta.function.color.css', 'punctuation.section.function.end.bracket.round.css'] + expect(lines[2][29]).toEqual value: ')', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.gradient.css', 'punctuation.section.function.end.bracket.round.css'] + expect(lines[3][4]).toEqual value: '-o-linear-gradient', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.gradient.css', 'support.function.gradient.css'] + expect(lines[3][5]).toEqual value: '(', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.gradient.css', 'punctuation.section.function.begin.bracket.round.css'] + expect(lines[3][10]).toEqual value: ' For old Opera (11.1 to 12.0) ', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.gradient.css', 'comment.block.css'] + expect(lines[3][13]).toEqual value: 'hsl', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.gradient.css', 'meta.function.color.css', 'support.function.misc.css'] + expect(lines[3][14]).toEqual value: '(', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.gradient.css', 'meta.function.color.css', 'punctuation.section.function.begin.bracket.round.css'] + + it 'highlights antique Webkit syntax as deprecated', -> + lines = grammar.tokenizeLines """ + .grad { + background-image: -webkit-gradient(linear, 0% 0%, 0% 100%, + from( rgb(0, 171, 235)), + color-stop(0.5, rgb(255, 255, 255)), + color-stop(0.5, rgb(102, 204, 0)), + to(rgb(255, 255, 255))), + -webkit-gradient(radial, 45 45, 10, 52 50, 30, from(#A7D30C), to(rgba(1,159,98,0)), color-stop(90%, #019F62)), + -webkit-gradient(radial, 105 105, 20, 112 120, 50, from(#ff5f98), to(rgba(255,1,136,0)), color-stop(75%, #ff0188)), + -webkit-gradient(radial, 95 15, 15, 102 20, 40, from(#00c9ff), to(rgba(0,201,255,0)), color-stop(80%, #00b5e2)), + -webkit-gradient(radial, 0 150, 50, 0 140, 90, from(#f4f201), to(rgba(228, 199,0,0)), color-stop(80%, #e4c700)); + } + """ + expect(lines[1][4]).toEqual value: '-webkit-gradient', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.gradient.invalid.deprecated.gradient.css', 'invalid.deprecated.gradient.function.css'] + expect(lines[1][5]).toEqual value: '(', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.gradient.invalid.deprecated.gradient.css', 'punctuation.section.function.begin.bracket.round.css'] + expect(lines[1][6]).toEqual value: 'linear', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.gradient.invalid.deprecated.gradient.css', 'support.constant.property-value.css'] + expect(lines[1][7]).toEqual value: ',', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.gradient.invalid.deprecated.gradient.css', 'punctuation.separator.list.comma.css'] + expect(lines[1][19]).toEqual value: '100', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.gradient.invalid.deprecated.gradient.css', 'constant.numeric.css'] + expect(lines[1][20]).toEqual value: '%', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.gradient.invalid.deprecated.gradient.css', 'constant.numeric.css', 'keyword.other.unit.percentage.css'] + expect(lines[2][1]).toEqual value: 'from', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.gradient.invalid.deprecated.gradient.css', 'invalid.deprecated.function.css'] + expect(lines[2][2]).toEqual value: '(', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.gradient.invalid.deprecated.gradient.css', 'punctuation.section.function.begin.bracket.round.css'] + expect(lines[2][4]).toEqual value: 'rgb', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.gradient.invalid.deprecated.gradient.css', 'meta.function.color.css', 'support.function.misc.css'] + expect(lines[2][5]).toEqual value: '(', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.gradient.invalid.deprecated.gradient.css', 'meta.function.color.css', 'punctuation.section.function.begin.bracket.round.css'] + expect(lines[2][9]).toEqual value: '171', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.gradient.invalid.deprecated.gradient.css', 'meta.function.color.css', 'constant.numeric.css'] + expect(lines[2][10]).toEqual value: ',', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.gradient.invalid.deprecated.gradient.css', 'meta.function.color.css', 'punctuation.separator.list.comma.css'] + expect(lines[2][14]).toEqual value: ')', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.gradient.invalid.deprecated.gradient.css', 'punctuation.section.function.end.bracket.round.css'] + expect(lines[3][1]).toEqual value: 'color-stop', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.gradient.invalid.deprecated.gradient.css', 'invalid.deprecated.function.css'] + expect(lines[3][2]).toEqual value: '(', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.gradient.invalid.deprecated.gradient.css', 'punctuation.section.function.begin.bracket.round.css'] + expect(lines[3][3]).toEqual value: '0.5', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.gradient.invalid.deprecated.gradient.css', 'constant.numeric.css'] + expect(lines[3][4]).toEqual value: ',', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.gradient.invalid.deprecated.gradient.css', 'punctuation.separator.list.comma.css'] + expect(lines[3][16]).toEqual value: ')', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.gradient.invalid.deprecated.gradient.css', 'punctuation.section.function.end.bracket.round.css'] + expect(lines[3][17]).toEqual value: ',', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.gradient.invalid.deprecated.gradient.css', 'punctuation.separator.list.comma.css'] + expect(lines[4][1]).toEqual value: 'color-stop', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.gradient.invalid.deprecated.gradient.css', 'invalid.deprecated.function.css'] + expect(lines[4][2]).toEqual value: '(', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.gradient.invalid.deprecated.gradient.css', 'punctuation.section.function.begin.bracket.round.css'] + expect(lines[4][3]).toEqual value: '0.5', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.gradient.invalid.deprecated.gradient.css', 'constant.numeric.css'] + expect(lines[4][4]).toEqual value: ',', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.gradient.invalid.deprecated.gradient.css', 'punctuation.separator.list.comma.css'] + expect(lines[4][6]).toEqual value: 'rgb', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.gradient.invalid.deprecated.gradient.css', 'meta.function.color.css', 'support.function.misc.css'] + expect(lines[4][7]).toEqual value: '(', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.gradient.invalid.deprecated.gradient.css', 'meta.function.color.css', 'punctuation.section.function.begin.bracket.round.css'] + expect(lines[4][8]).toEqual value: '102', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.gradient.invalid.deprecated.gradient.css', 'meta.function.color.css', 'constant.numeric.css'] + expect(lines[4][9]).toEqual value: ',', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.gradient.invalid.deprecated.gradient.css', 'meta.function.color.css', 'punctuation.separator.list.comma.css'] + expect(lines[4][11]).toEqual value: '204', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.gradient.invalid.deprecated.gradient.css', 'meta.function.color.css', 'constant.numeric.css'] + expect(lines[4][12]).toEqual value: ',', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.gradient.invalid.deprecated.gradient.css', 'meta.function.color.css', 'punctuation.separator.list.comma.css'] + expect(lines[4][14]).toEqual value: '0', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.gradient.invalid.deprecated.gradient.css', 'meta.function.color.css', 'constant.numeric.css'] + expect(lines[4][15]).toEqual value: ')', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.gradient.invalid.deprecated.gradient.css', 'meta.function.color.css', 'punctuation.section.function.end.bracket.round.css'] + expect(lines[4][16]).toEqual value: ')', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.gradient.invalid.deprecated.gradient.css', 'punctuation.section.function.end.bracket.round.css'] + expect(lines[4][17]).toEqual value: ',', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.gradient.invalid.deprecated.gradient.css', 'punctuation.separator.list.comma.css'] + expect(lines[5][1]).toEqual value: 'to', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.gradient.invalid.deprecated.gradient.css', 'invalid.deprecated.function.css'] + expect(lines[5][2]).toEqual value: '(', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.gradient.invalid.deprecated.gradient.css', 'punctuation.section.function.begin.bracket.round.css'] + expect(lines[5][12]).toEqual value: ')', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.gradient.invalid.deprecated.gradient.css', 'meta.function.color.css', 'punctuation.section.function.end.bracket.round.css'] + expect(lines[5][13]).toEqual value: ')', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.gradient.invalid.deprecated.gradient.css', 'punctuation.section.function.end.bracket.round.css'] + expect(lines[5][14]).toEqual value: ')', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.gradient.invalid.deprecated.gradient.css', 'punctuation.section.function.end.bracket.round.css'] + expect(lines[5][15]).toEqual value: ',', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'punctuation.separator.list.comma.css'] + expect(lines[6][1]).toEqual value: '-webkit-gradient', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.gradient.invalid.deprecated.gradient.css', 'invalid.deprecated.gradient.function.css'] + expect(lines[6][2]).toEqual value: '(', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.gradient.invalid.deprecated.gradient.css', 'punctuation.section.function.begin.bracket.round.css'] + expect(lines[6][3]).toEqual value: 'radial', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.gradient.invalid.deprecated.gradient.css', 'support.constant.property-value.css'] + expect(lines[6][4]).toEqual value: ',', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.gradient.invalid.deprecated.gradient.css', 'punctuation.separator.list.comma.css'] + expect(lines[6][8]).toEqual value: '45', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.gradient.invalid.deprecated.gradient.css', 'constant.numeric.css'] + expect(lines[6][31]).toEqual value: 'rgba', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.gradient.invalid.deprecated.gradient.css', 'meta.function.color.css', 'support.function.misc.css'] + expect(lines[7][1]).toEqual value: '-webkit-gradient', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.gradient.invalid.deprecated.gradient.css', 'invalid.deprecated.gradient.function.css'] + expect(lines[7][2]).toEqual value: '(', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.gradient.invalid.deprecated.gradient.css', 'punctuation.section.function.begin.bracket.round.css'] + expect(lines[9][1]).toEqual value: '-webkit-gradient', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.gradient.invalid.deprecated.gradient.css', 'invalid.deprecated.gradient.function.css'] + expect(lines[9][2]).toEqual value: '(', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.gradient.invalid.deprecated.gradient.css', 'punctuation.section.function.begin.bracket.round.css'] + expect(lines[9][3]).toEqual value: 'radial', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.gradient.invalid.deprecated.gradient.css', 'support.constant.property-value.css'] + expect(lines[9][4]).toEqual value: ',', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.gradient.invalid.deprecated.gradient.css', 'punctuation.separator.list.comma.css'] + expect(lines[9][6]).toEqual value: '0', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.gradient.invalid.deprecated.gradient.css', 'constant.numeric.css'] + expect(lines[9][8]).toEqual value: '150', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.gradient.invalid.deprecated.gradient.css', 'constant.numeric.css'] + expect(lines[9][54]).toEqual value: ')', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.gradient.invalid.deprecated.gradient.css', 'punctuation.section.function.end.bracket.round.css'] + expect(lines[9][55]).toEqual value: ';', scopes: ['source.css', 'meta.property-list.css', 'punctuation.terminator.rule.css'] + expect(lines[10][0]).toEqual value: '}', scopes: ['source.css', 'meta.property-list.css', 'punctuation.section.property-list.end.bracket.curly.css'] + + describe 'other functions', -> + it 'tokenises basic-shape functions', -> + lines = grammar.tokenizeLines """ + a{ + shape-outside: circle(20em/*=*/at 50% 50%); + shape-outside: inset(1em, 1em, 1em, 1em); + } + """ + expect(lines[1][4]).toEqual value: 'circle', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.shape.css', 'support.function.shape.css'] + expect(lines[1][5]).toEqual value: '(', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.shape.css', 'punctuation.section.function.begin.bracket.round.css'] + expect(lines[1][6]).toEqual value: '20', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.shape.css', 'constant.numeric.css'] + expect(lines[1][7]).toEqual value: 'em', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.shape.css', 'constant.numeric.css', 'keyword.other.unit.em.css'] + expect(lines[1][8]).toEqual value: '/*', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.shape.css', 'comment.block.css', 'punctuation.definition.comment.begin.css'] + expect(lines[1][9]).toEqual value: '=', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.shape.css', 'comment.block.css'] + expect(lines[1][10]).toEqual value: '*/', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.shape.css', 'comment.block.css', 'punctuation.definition.comment.end.css'] + expect(lines[1][11]).toEqual value: 'at', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.shape.css', 'keyword.operator.shape.css'] + expect(lines[1][13]).toEqual value: '50', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.shape.css', 'constant.numeric.css'] + expect(lines[1][14]).toEqual value: '%', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.shape.css', 'constant.numeric.css', 'keyword.other.unit.percentage.css'] + expect(lines[1][16]).toEqual value: '50', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.shape.css', 'constant.numeric.css'] + expect(lines[1][17]).toEqual value: '%', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.shape.css', 'constant.numeric.css', 'keyword.other.unit.percentage.css'] + expect(lines[1][18]).toEqual value: ')', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.shape.css', 'punctuation.section.function.end.bracket.round.css'] + expect(lines[2][4]).toEqual value: 'inset', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.shape.css', 'support.function.shape.css'] + expect(lines[2][5]).toEqual value: '(', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.shape.css', 'punctuation.section.function.begin.bracket.round.css'] + expect(lines[2][6]).toEqual value: '1', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.shape.css', 'constant.numeric.css'] + expect(lines[2][7]).toEqual value: 'em', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.shape.css', 'constant.numeric.css', 'keyword.other.unit.em.css'] + expect(lines[2][8]).toEqual value: ',', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.shape.css', 'punctuation.separator.list.comma.css'] + expect(lines[2][10]).toEqual value: '1', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.shape.css', 'constant.numeric.css'] + expect(lines[2][11]).toEqual value: 'em', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.shape.css', 'constant.numeric.css', 'keyword.other.unit.em.css'] + expect(lines[2][12]).toEqual value: ',', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.shape.css', 'punctuation.separator.list.comma.css'] + expect(lines[2][14]).toEqual value: '1', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.shape.css', 'constant.numeric.css'] + expect(lines[2][15]).toEqual value: 'em', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.shape.css', 'constant.numeric.css', 'keyword.other.unit.em.css'] + expect(lines[2][16]).toEqual value: ',', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.shape.css', 'punctuation.separator.list.comma.css'] + expect(lines[2][18]).toEqual value: '1', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.shape.css', 'constant.numeric.css'] + expect(lines[2][19]).toEqual value: 'em', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.shape.css', 'constant.numeric.css', 'keyword.other.unit.em.css'] + expect(lines[2][20]).toEqual value: ')', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.shape.css', 'punctuation.section.function.end.bracket.round.css'] + + it 'tokenises OpenType feature functions', -> + lines = grammar.tokenizeLines """ + .font{ + font-variant-alternates: stylistic(user-defined-ident); + font-variant-alternates: styleset(user-defined-ident); + font-variant-alternates: character-variant(user-defined-ident); + font-variant-alternates: swash(user-defined-ident); + font-variant-alternates: ornaments(user-defined-ident); + font-variant-alternates: annotation(user-defined-ident); + font-variant-alternates: swash(ident1) annotation(ident2); + } + """ + expect(lines[1][4]).toEqual value: 'stylistic', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.misc.css', 'support.function.misc.css'] + expect(lines[1][5]).toEqual value: '(', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.misc.css', 'punctuation.section.function.begin.bracket.round.css'] + expect(lines[1][6]).toEqual value: 'user-defined-ident', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.misc.css', 'variable.parameter.misc.css'] + expect(lines[1][7]).toEqual value: ')', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.misc.css', 'punctuation.section.function.end.bracket.round.css'] + expect(lines[2][4]).toEqual value: 'styleset', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.misc.css', 'support.function.misc.css'] + expect(lines[2][5]).toEqual value: '(', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.misc.css', 'punctuation.section.function.begin.bracket.round.css'] + expect(lines[2][6]).toEqual value: 'user-defined-ident', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.misc.css', 'variable.parameter.misc.css'] + expect(lines[2][7]).toEqual value: ')', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.misc.css', 'punctuation.section.function.end.bracket.round.css'] + expect(lines[3][4]).toEqual value: 'character-variant', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.misc.css', 'support.function.misc.css'] + expect(lines[3][5]).toEqual value: '(', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.misc.css', 'punctuation.section.function.begin.bracket.round.css'] + expect(lines[3][6]).toEqual value: 'user-defined-ident', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.misc.css', 'variable.parameter.misc.css'] + expect(lines[3][7]).toEqual value: ')', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.misc.css', 'punctuation.section.function.end.bracket.round.css'] + expect(lines[4][4]).toEqual value: 'swash', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.misc.css', 'support.function.misc.css'] + expect(lines[4][5]).toEqual value: '(', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.misc.css', 'punctuation.section.function.begin.bracket.round.css'] + expect(lines[4][6]).toEqual value: 'user-defined-ident', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.misc.css', 'variable.parameter.misc.css'] + expect(lines[4][7]).toEqual value: ')', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.misc.css', 'punctuation.section.function.end.bracket.round.css'] + expect(lines[5][4]).toEqual value: 'ornaments', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.misc.css', 'support.function.misc.css'] + expect(lines[5][5]).toEqual value: '(', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.misc.css', 'punctuation.section.function.begin.bracket.round.css'] + expect(lines[5][6]).toEqual value: 'user-defined-ident', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.misc.css', 'variable.parameter.misc.css'] + expect(lines[5][7]).toEqual value: ')', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.misc.css', 'punctuation.section.function.end.bracket.round.css'] + expect(lines[6][4]).toEqual value: 'annotation', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.misc.css', 'support.function.misc.css'] + expect(lines[6][5]).toEqual value: '(', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.misc.css', 'punctuation.section.function.begin.bracket.round.css'] + expect(lines[6][6]).toEqual value: 'user-defined-ident', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.misc.css', 'variable.parameter.misc.css'] + expect(lines[6][7]).toEqual value: ')', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.misc.css', 'punctuation.section.function.end.bracket.round.css'] + expect(lines[7][4]).toEqual value: 'swash', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.misc.css', 'support.function.misc.css'] + expect(lines[7][5]).toEqual value: '(', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.misc.css', 'punctuation.section.function.begin.bracket.round.css'] + expect(lines[7][6]).toEqual value: 'ident1', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.misc.css', 'variable.parameter.misc.css'] + expect(lines[7][7]).toEqual value: ')', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.misc.css', 'punctuation.section.function.end.bracket.round.css'] + expect(lines[7][9]).toEqual value: 'annotation', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.misc.css', 'support.function.misc.css'] + expect(lines[7][10]).toEqual value: '(', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.misc.css', 'punctuation.section.function.begin.bracket.round.css'] + expect(lines[7][11]).toEqual value: 'ident2', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.misc.css', 'variable.parameter.misc.css'] + expect(lines[7][12]).toEqual value: ')', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.misc.css', 'punctuation.section.function.end.bracket.round.css'] + + it 'tokenises image-set()', -> + lines = grammar.tokenizeLines """ + a{ + background-image: image-set( "foo.png" 1x, + "foo-2x.png" 2x, + "foo-print.png" 600dpi ); + } + """ + expect(lines[0][0]).toEqual value: 'a', scopes: ['source.css', 'meta.selector.css', 'entity.name.tag.css'] + expect(lines[0][1]).toEqual value: '{', scopes: ['source.css', 'meta.property-list.css', 'punctuation.section.property-list.begin.bracket.curly.css'] + expect(lines[1][1]).toEqual value: 'background-image', scopes: ['source.css', 'meta.property-list.css', 'meta.property-name.css', 'support.type.property-name.css'] + expect(lines[1][2]).toEqual value: ':', scopes: ['source.css', 'meta.property-list.css', 'punctuation.separator.key-value.css'] + expect(lines[1][4]).toEqual value: 'image-set', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.misc.css', 'support.function.misc.css'] + expect(lines[1][5]).toEqual value: '(', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.misc.css', 'punctuation.section.function.begin.bracket.round.css'] + expect(lines[1][7]).toEqual value: '"', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.misc.css', 'string.quoted.double.css', 'punctuation.definition.string.begin.css'] + expect(lines[1][8]).toEqual value: 'foo.png', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.misc.css', 'string.quoted.double.css'] + expect(lines[1][9]).toEqual value: '"', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.misc.css', 'string.quoted.double.css', 'punctuation.definition.string.end.css'] + expect(lines[1][11]).toEqual value: '1x', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.misc.css', 'constant.numeric.other.density.css'] + expect(lines[1][12]).toEqual value: ',', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.misc.css', 'punctuation.separator.list.comma.css'] + expect(lines[2][1]).toEqual value: '"', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.misc.css', 'string.quoted.double.css', 'punctuation.definition.string.begin.css'] + expect(lines[2][2]).toEqual value: 'foo-2x.png', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.misc.css', 'string.quoted.double.css'] + expect(lines[2][3]).toEqual value: '"', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.misc.css', 'string.quoted.double.css', 'punctuation.definition.string.end.css'] + expect(lines[2][5]).toEqual value: '2x', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.misc.css', 'constant.numeric.other.density.css'] + expect(lines[2][6]).toEqual value: ',', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.misc.css', 'punctuation.separator.list.comma.css'] + expect(lines[3][1]).toEqual value: '"', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.misc.css', 'string.quoted.double.css', 'punctuation.definition.string.begin.css'] + expect(lines[3][2]).toEqual value: 'foo-print.png', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.misc.css', 'string.quoted.double.css'] + expect(lines[3][3]).toEqual value: '"', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.misc.css', 'string.quoted.double.css', 'punctuation.definition.string.end.css'] + expect(lines[3][5]).toEqual value: '600', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.misc.css', 'constant.numeric.css'] + expect(lines[3][6]).toEqual value: 'dpi', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.misc.css', 'constant.numeric.css', 'keyword.other.unit.dpi.css'] + expect(lines[3][8]).toEqual value: ')', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.misc.css', 'punctuation.section.function.end.bracket.round.css'] + expect(lines[3][9]).toEqual value: ';', scopes: ['source.css', 'meta.property-list.css', 'punctuation.terminator.rule.css'] + expect(lines[4][0]).toEqual value: '}', scopes: ['source.css', 'meta.property-list.css', 'punctuation.section.property-list.end.bracket.curly.css'] + + describe 'timing-functions', -> + it 'tokenises them correctly', -> + {tokens} = grammar.tokenizeLine('a{ zoom: cubic-bezier(/**/1.2,/*=*/0,0,0/**/)}') + expect(tokens[6]).toEqual value: 'cubic-bezier', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.timing-function.css', 'support.function.timing-function.css'] + expect(tokens[7]).toEqual value: '(', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.timing-function.css', 'punctuation.section.function.begin.bracket.round.css'] + expect(tokens[8]).toEqual value: '/*', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.timing-function.css', 'comment.block.css', 'punctuation.definition.comment.begin.css'] + expect(tokens[9]).toEqual value: '*/', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.timing-function.css', 'comment.block.css', 'punctuation.definition.comment.end.css'] + expect(tokens[10]).toEqual value: '1.2', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.timing-function.css', 'constant.numeric.css'] + expect(tokens[11]).toEqual value: ',', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.timing-function.css', 'punctuation.separator.list.comma.css'] + expect(tokens[12]).toEqual value: '/*', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.timing-function.css', 'comment.block.css', 'punctuation.definition.comment.begin.css'] + expect(tokens[13]).toEqual value: '=', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.timing-function.css', 'comment.block.css'] + expect(tokens[14]).toEqual value: '*/', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.timing-function.css', 'comment.block.css', 'punctuation.definition.comment.end.css'] + expect(tokens[15]).toEqual value: '0', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.timing-function.css', 'constant.numeric.css'] + expect(tokens[16]).toEqual value: ',', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.timing-function.css', 'punctuation.separator.list.comma.css'] + expect(tokens[17]).toEqual value: '0', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.timing-function.css', 'constant.numeric.css'] + expect(tokens[18]).toEqual value: ',', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.timing-function.css', 'punctuation.separator.list.comma.css'] + expect(tokens[19]).toEqual value: '0', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.timing-function.css', 'constant.numeric.css'] + expect(tokens[20]).toEqual value: '/*', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.timing-function.css', 'comment.block.css', 'punctuation.definition.comment.begin.css'] + expect(tokens[21]).toEqual value: '*/', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.timing-function.css', 'comment.block.css', 'punctuation.definition.comment.end.css'] + expect(tokens[22]).toEqual value: ')', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.timing-function.css', 'punctuation.section.function.end.bracket.round.css'] + + it 'highlights the "start" and "end" keywords', -> + {tokens} = grammar.tokenizeLine('a{ before: steps(0, start); after: steps(1, end); }') + expect(tokens[6]).toEqual value: 'steps', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.timing-function.css', 'support.function.timing-function.css'] + expect(tokens[7]).toEqual value: '(', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.timing-function.css', 'punctuation.section.function.begin.bracket.round.css'] + expect(tokens[8]).toEqual value: '0', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.timing-function.css', 'constant.numeric.css'] + expect(tokens[9]).toEqual value: ',', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.timing-function.css', 'punctuation.separator.list.comma.css'] + expect(tokens[11]).toEqual value: 'start', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.timing-function.css', 'support.constant.step-direction.css'] + expect(tokens[12]).toEqual value: ')', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.timing-function.css', 'punctuation.section.function.end.bracket.round.css'] + expect(tokens[23]).toEqual value: 'end', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.timing-function.css', 'support.constant.step-direction.css'] + + describe 'variables', -> + it 'scopes var() statements as variables', -> + {tokens} = grammar.tokenizeLine('a{color: var(--name)}') + expect(tokens[0]).toEqual value: 'a', scopes: ['source.css', 'meta.selector.css', 'entity.name.tag.css'] + expect(tokens[1]).toEqual value: '{', scopes: ['source.css', 'meta.property-list.css', 'punctuation.section.property-list.begin.bracket.curly.css'] + expect(tokens[2]).toEqual value: 'color', scopes: ['source.css', 'meta.property-list.css', 'meta.property-name.css', 'support.type.property-name.css'] + expect(tokens[3]).toEqual value: ':', scopes: ['source.css', 'meta.property-list.css', 'punctuation.separator.key-value.css'] + expect(tokens[5]).toEqual value: 'var', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.variable.css', 'support.function.misc.css'] + expect(tokens[6]).toEqual value: '(', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.variable.css', 'punctuation.section.function.begin.bracket.round.css'] + expect(tokens[7]).toEqual value: '--name', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.variable.css', 'variable.argument.css'] + expect(tokens[8]).toEqual value: ')', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.variable.css', 'punctuation.section.function.end.bracket.round.css'] + expect(tokens[9]).toEqual value: '}', scopes: ['source.css', 'meta.property-list.css', 'punctuation.section.property-list.end.bracket.curly.css'] + + {tokens} = grammar.tokenizeLine('a{color: var( --name )}') + expect(tokens[5]).toEqual value: 'var', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.variable.css', 'support.function.misc.css'] + expect(tokens[6]).toEqual value: '(', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.variable.css', 'punctuation.section.function.begin.bracket.round.css'] + expect(tokens[8]).toEqual value: '--name', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.variable.css', 'variable.argument.css'] + expect(tokens[10]).toEqual value: ')', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.variable.css', 'punctuation.section.function.end.bracket.round.css'] + + it 'allows injected comments', -> + {tokens} = grammar.tokenizeLine('a{ color: var( /*=*/ --something ) }') + expect(tokens[6]).toEqual value: 'var', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.variable.css', 'support.function.misc.css'] + expect(tokens[7]).toEqual value: '(', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.variable.css', 'punctuation.section.function.begin.bracket.round.css'] + expect(tokens[9]).toEqual value: '/*', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.variable.css', 'comment.block.css', 'punctuation.definition.comment.begin.css'] + expect(tokens[10]).toEqual value: '=', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.variable.css', 'comment.block.css'] + expect(tokens[11]).toEqual value: '*/', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.variable.css', 'comment.block.css', 'punctuation.definition.comment.end.css'] + expect(tokens[13]).toEqual value: '--something', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.variable.css', 'variable.argument.css'] + expect(tokens[15]).toEqual value: ')', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.variable.css', 'punctuation.section.function.end.bracket.round.css'] + + it 'tokenises fallback values', -> + {tokens} = grammar.tokenizeLine('.bar{ width: var(--page-width, /*;;;);*/ 2); }') + expect(tokens[7]).toEqual value: 'var', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.variable.css', 'support.function.misc.css'] + expect(tokens[8]).toEqual value: '(', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.variable.css', 'punctuation.section.function.begin.bracket.round.css'] + expect(tokens[9]).toEqual value: '--page-width', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.variable.css', 'variable.argument.css'] + expect(tokens[10]).toEqual value: ',', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.variable.css', 'punctuation.separator.list.comma.css'] + expect(tokens[12]).toEqual value: '/*', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.variable.css', 'comment.block.css', 'punctuation.definition.comment.begin.css'] + expect(tokens[13]).toEqual value: ';;;);', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.variable.css', 'comment.block.css'] + expect(tokens[14]).toEqual value: '*/', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.variable.css', 'comment.block.css', 'punctuation.definition.comment.end.css'] + expect(tokens[16]).toEqual value: '2', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.variable.css', 'constant.numeric.css'] + expect(tokens[17]).toEqual value: ')', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'meta.function.variable.css', 'punctuation.section.function.end.bracket.round.css'] + expect(tokens[18]).toEqual value: ';', scopes: ['source.css', 'meta.property-list.css', 'punctuation.terminator.rule.css'] + + it 'does not tokenise functions with whitespace between name and parameters', -> + {tokens} = grammar.tokenizeLine('a{ p: attr (title); }') + expect(tokens[0]).toEqual value: 'a', scopes: ['source.css', 'meta.selector.css', 'entity.name.tag.css'] + expect(tokens[1]).toEqual value: '{', scopes: ['source.css', 'meta.property-list.css', 'punctuation.section.property-list.begin.bracket.curly.css'] + expect(tokens[3]).toEqual value: 'p', scopes: ['source.css', 'meta.property-list.css', 'meta.property-name.css'] + expect(tokens[4]).toEqual value: ':', scopes: ['source.css', 'meta.property-list.css', 'punctuation.separator.key-value.css'] + expect(tokens[6]).toEqual value: 'attr (title', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css'] + expect(tokens[7]).toEqual value: ')', scopes: ['source.css', 'meta.property-list.css'] + expect(tokens[8]).toEqual value: ';', scopes: ['source.css', 'meta.property-list.css', 'punctuation.terminator.rule.css'] + expect(tokens[10]).toEqual value: '}', scopes: ['source.css', 'meta.property-list.css', 'punctuation.section.property-list.end.bracket.curly.css'] + + {tokens} = grammar.tokenizeLine('a{url:url (s)}') + expect(tokens[0]).toEqual value: 'a', scopes: ['source.css', 'meta.selector.css', 'entity.name.tag.css'] + expect(tokens[1]).toEqual value: '{', scopes: ['source.css', 'meta.property-list.css', 'punctuation.section.property-list.begin.bracket.curly.css'] + expect(tokens[2]).toEqual value: 'url', scopes: ['source.css', 'meta.property-list.css', 'meta.property-name.css'] + expect(tokens[3]).toEqual value: ':', scopes: ['source.css', 'meta.property-list.css', 'punctuation.separator.key-value.css'] + expect(tokens[4]).toEqual value: 'url (s', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css'] + expect(tokens[5]).toEqual value: ')', scopes: ['source.css', 'meta.property-list.css'] + expect(tokens[6]).toEqual value: '}', scopes: ['source.css', 'meta.property-list.css', 'punctuation.section.property-list.end.bracket.curly.css'] + + {tokens} = grammar.tokenizeLine('a{content:url ("http://github.com/");}') + expect(tokens[0]).toEqual value: 'a', scopes: ['source.css', 'meta.selector.css', 'entity.name.tag.css'] + expect(tokens[1]).toEqual value: '{', scopes: ['source.css', 'meta.property-list.css', 'punctuation.section.property-list.begin.bracket.curly.css'] + expect(tokens[2]).toEqual value: 'content', scopes: ['source.css', 'meta.property-list.css', 'meta.property-name.css', 'support.type.property-name.css'] + expect(tokens[3]).toEqual value: ':', scopes: ['source.css', 'meta.property-list.css', 'punctuation.separator.key-value.css'] + expect(tokens[4]).toEqual value: 'url (', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css'] + expect(tokens[5]).toEqual value: '"', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'string.quoted.double.css', 'punctuation.definition.string.begin.css'] + expect(tokens[6]).toEqual value: 'http://github.com/', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'string.quoted.double.css'] + expect(tokens[7]).toEqual value: '"', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'string.quoted.double.css', 'punctuation.definition.string.end.css'] + expect(tokens[8]).toEqual value: ')', scopes: ['source.css', 'meta.property-list.css'] + expect(tokens[9]).toEqual value: ';', scopes: ['source.css', 'meta.property-list.css', 'punctuation.terminator.rule.css'] + expect(tokens[10]).toEqual value: '}', scopes: ['source.css', 'meta.property-list.css', 'punctuation.section.property-list.end.bracket.curly.css'] + + {tokens} = grammar.tokenizeLine('a{content: url (http://a.pl/)}') + expect(tokens[0]).toEqual value: 'a', scopes: ['source.css', 'meta.selector.css', 'entity.name.tag.css'] + expect(tokens[1]).toEqual value: '{', scopes: ['source.css', 'meta.property-list.css', 'punctuation.section.property-list.begin.bracket.curly.css'] + expect(tokens[2]).toEqual value: 'content', scopes: ['source.css', 'meta.property-list.css', 'meta.property-name.css', 'support.type.property-name.css'] + expect(tokens[3]).toEqual value: ':', scopes: ['source.css', 'meta.property-list.css', 'punctuation.separator.key-value.css'] + expect(tokens[5]).toEqual value: 'url (http://a.pl/', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css'] + expect(tokens[6]).toEqual value: ')', scopes: ['source.css', 'meta.property-list.css'] + expect(tokens[7]).toEqual value: '}', scopes: ['source.css', 'meta.property-list.css', 'punctuation.section.property-list.end.bracket.curly.css'] + + {tokens} = grammar.tokenizeLine('a{ color: rgb (187,255,221); }') + expect(tokens[0]).toEqual value: 'a', scopes: ['source.css', 'meta.selector.css', 'entity.name.tag.css'] + expect(tokens[1]).toEqual value: '{', scopes: ['source.css', 'meta.property-list.css', 'punctuation.section.property-list.begin.bracket.curly.css'] + expect(tokens[3]).toEqual value: 'color', scopes: ['source.css', 'meta.property-list.css', 'meta.property-name.css', 'support.type.property-name.css'] + expect(tokens[4]).toEqual value: ':', scopes: ['source.css', 'meta.property-list.css', 'punctuation.separator.key-value.css'] + expect(tokens[6]).toEqual value: 'rgb (', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css'] + expect(tokens[7]).toEqual value: '187', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'constant.numeric.css'] + expect(tokens[8]).toEqual value: ',', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'punctuation.separator.list.comma.css'] + expect(tokens[9]).toEqual value: '255', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'constant.numeric.css'] + expect(tokens[10]).toEqual value: ',', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'punctuation.separator.list.comma.css'] + expect(tokens[11]).toEqual value: '221', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'constant.numeric.css'] + expect(tokens[12]).toEqual value: ')', scopes: ['source.css', 'meta.property-list.css'] + expect(tokens[13]).toEqual value: ';', scopes: ['source.css', 'meta.property-list.css', 'punctuation.terminator.rule.css'] + expect(tokens[15]).toEqual value: '}', scopes: ['source.css', 'meta.property-list.css', 'punctuation.section.property-list.end.bracket.curly.css'] + + describe 'Unicode ranges', -> + it 'tokenises single codepoints', -> + {tokens} = grammar.tokenizeLine('a{ a: U+A5 }') + expect(tokens[6]).toEqual value: 'U+A5', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'constant.other.unicode-range.css'] + + it 'tokenises codepoint ranges', -> + {tokens} = grammar.tokenizeLine('a{ a: U+0025-00FF }') + expect(tokens[6]).toEqual value: 'U+0025', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'constant.other.unicode-range.css'] + expect(tokens[7]).toEqual value: '-', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'constant.other.unicode-range.css', 'punctuation.separator.dash.unicode-range.css'] + expect(tokens[8]).toEqual value: '00FF', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'constant.other.unicode-range.css'] + + {tokens} = grammar.tokenizeLine('a{ unicode-range: u+0-7F }') + expect(tokens[6]).toEqual value: 'u+0', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'constant.other.unicode-range.css'] + expect(tokens[7]).toEqual value: '-', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'constant.other.unicode-range.css', 'punctuation.separator.dash.unicode-range.css'] + expect(tokens[8]).toEqual value: '7F', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'constant.other.unicode-range.css'] + + it 'tokenises wildcard ranges', -> + {tokens} = grammar.tokenizeLine('a{ unicode-range: U+4?? }') + expect(tokens[6]).toEqual value: 'U+4??', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'constant.other.unicode-range.css'] + + {tokens} = grammar.tokenizeLine('a{ unicode-range: U+0025-00FF, U+4?? }') + expect(tokens[6]).toEqual value: 'U+0025', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'constant.other.unicode-range.css'] + expect(tokens[7]).toEqual value: '-', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'constant.other.unicode-range.css', 'punctuation.separator.dash.unicode-range.css'] + expect(tokens[8]).toEqual value: '00FF', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'constant.other.unicode-range.css'] + expect(tokens[9]).toEqual value: ',', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'punctuation.separator.list.comma.css'] + expect(tokens[11]).toEqual value: 'U+4??', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'constant.other.unicode-range.css'] + + describe 'escape sequences', -> + it 'tokenizes escape sequences in single-quoted strings', -> + {tokens} = grammar.tokenizeLine "very-custom { content: '\\c0ffee' }" + + expect(tokens[0]).toEqual value: 'very-custom', scopes: ['source.css', 'meta.selector.css', 'entity.name.tag.custom.css'] + expect(tokens[1]).toEqual value: ' ', scopes: ['source.css'] + expect(tokens[2]).toEqual value: '{', scopes: ['source.css', 'meta.property-list.css', 'punctuation.section.property-list.begin.bracket.curly.css'] + expect(tokens[3]).toEqual value: ' ', scopes: ['source.css', 'meta.property-list.css'] + expect(tokens[4]).toEqual value: 'content', scopes: ['source.css', 'meta.property-list.css', 'meta.property-name.css', 'support.type.property-name.css'] + expect(tokens[5]).toEqual value: ':', scopes: ['source.css', 'meta.property-list.css', 'punctuation.separator.key-value.css'] + expect(tokens[6]).toEqual value: ' ', scopes: ['source.css', 'meta.property-list.css'] + expect(tokens[7]).toEqual value: "'", scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'string.quoted.single.css', 'punctuation.definition.string.begin.css'] + expect(tokens[8]).toEqual value: '\\c0ffee', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'string.quoted.single.css', 'constant.character.escape.codepoint.css'] + + it 'tokenizes escape sequences in double-quoted strings', -> + {tokens} = grammar.tokenizeLine 'very-custom { content: "\\c0ffee" }' + + expect(tokens[0]).toEqual value: 'very-custom', scopes: ['source.css', 'meta.selector.css', 'entity.name.tag.custom.css'] + expect(tokens[1]).toEqual value: ' ', scopes: ['source.css'] + expect(tokens[2]).toEqual value: '{', scopes: ['source.css', 'meta.property-list.css', 'punctuation.section.property-list.begin.bracket.curly.css'] + expect(tokens[3]).toEqual value: ' ', scopes: ['source.css', 'meta.property-list.css'] + expect(tokens[4]).toEqual value: 'content', scopes: ['source.css', 'meta.property-list.css', 'meta.property-name.css', 'support.type.property-name.css'] + expect(tokens[5]).toEqual value: ':', scopes: ['source.css', 'meta.property-list.css', 'punctuation.separator.key-value.css'] + expect(tokens[6]).toEqual value: ' ', scopes: ['source.css', 'meta.property-list.css'] + expect(tokens[7]).toEqual value: '"', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'string.quoted.double.css', 'punctuation.definition.string.begin.css'] + expect(tokens[8]).toEqual value: '\\c0ffee', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'string.quoted.double.css', 'constant.character.escape.codepoint.css'] + + it 'tokenises escape sequences in selectors', -> + {tokens} = grammar.tokenizeLine('\\61 \\{ { } \\}') + expect(tokens[0]).toEqual value: '\\61', scopes: ['source.css', 'constant.character.escape.codepoint.css'] + expect(tokens[2]).toEqual value: '\\{', scopes: ['source.css', 'constant.character.escape.css'] + expect(tokens[4]).toEqual value: '{', scopes: ['source.css', 'meta.property-list.css', 'punctuation.section.property-list.begin.bracket.curly.css'] + expect(tokens[6]).toEqual value: '}', scopes: ['source.css', 'meta.property-list.css', 'punctuation.section.property-list.end.bracket.curly.css'] + expect(tokens[8]).toEqual value: '\\}', scopes: ['source.css', 'constant.character.escape.css'] + + {tokens} = grammar.tokenizeLine('\\61\\ \\. \\@media {}') # Matches <a><.><@media></@media></.></a> + expect(tokens[0]).toEqual value: '\\61', scopes: ['source.css', 'constant.character.escape.codepoint.css'] + expect(tokens[1]).toEqual value: '\\ ', scopes: ['source.css', 'constant.character.escape.css'] + expect(tokens[2]).toEqual value: '\\.', scopes: ['source.css', 'constant.character.escape.css'] + expect(tokens[4]).toEqual value: '\\@', scopes: ['source.css', 'constant.character.escape.css'] + expect(tokens[5]).toEqual value: 'media', scopes: ['source.css', 'meta.selector.css'] + expect(tokens[6]).toEqual value: ' ', scopes: ['source.css'] + expect(tokens[7]).toEqual value: '{', scopes: ['source.css', 'meta.property-list.css', 'punctuation.section.property-list.begin.bracket.curly.css'] + + it 'tokenises escape sequences in property lists', -> + {tokens} = grammar.tokenizeLine('a { \\77\\69\\64\\74\\68: 20px; }') # Same as writing "width: 20px" + expect(tokens[4]).toEqual value: '\\77', scopes: ['source.css', 'meta.property-list.css', 'constant.character.escape.codepoint.css'] + expect(tokens[5]).toEqual value: '\\69', scopes: ['source.css', 'meta.property-list.css', 'constant.character.escape.codepoint.css'] + expect(tokens[6]).toEqual value: '\\64', scopes: ['source.css', 'meta.property-list.css', 'constant.character.escape.codepoint.css'] + expect(tokens[7]).toEqual value: '\\74', scopes: ['source.css', 'meta.property-list.css', 'constant.character.escape.codepoint.css'] + expect(tokens[8]).toEqual value: '\\68', scopes: ['source.css', 'meta.property-list.css', 'constant.character.escape.codepoint.css'] + expect(tokens[9]).toEqual value: ':', scopes: ['source.css', 'meta.property-list.css', 'punctuation.separator.key-value.css'] + + it 'tokenises escape sequences in property values', -> + {tokens} = grammar.tokenizeLine('a { content: \\1F764; }') + expect(tokens[7]).toEqual value: '\\1F764', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'constant.character.escape.codepoint.css'] + expect(tokens[8]).toEqual value: ';', scopes: ['source.css', 'meta.property-list.css', 'punctuation.terminator.rule.css'] + expect(tokens[10]).toEqual value: '}', scopes: ['source.css', 'meta.property-list.css', 'punctuation.section.property-list.end.bracket.curly.css'] + + describe 'unclosed strings', -> + it 'highlights an unterminated string as an error', -> + {tokens} = grammar.tokenizeLine("a{ content: 'aaaa") + expect(tokens[4]).toEqual value: ':', scopes: ['source.css', 'meta.property-list.css', 'punctuation.separator.key-value.css'] + expect(tokens[6]).toEqual value: "'", scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'string.quoted.single.css', 'punctuation.definition.string.begin.css'] + expect(tokens[7]).toEqual value: 'aaaa', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'string.quoted.single.css', 'invalid.illegal.unclosed.string.css'] + + {tokens} = grammar.tokenizeLine('a{ content: "aaaa') + expect(tokens[4]).toEqual value: ':', scopes: ['source.css', 'meta.property-list.css', 'punctuation.separator.key-value.css'] + expect(tokens[6]).toEqual value: '"', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'string.quoted.double.css', 'punctuation.definition.string.begin.css'] + expect(tokens[7]).toEqual value: 'aaaa', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'string.quoted.double.css', 'invalid.illegal.unclosed.string.css'] + + it "knows when a string is line-wrapped", -> + lines = grammar.tokenizeLines """ + a{ + content: "aaaaa\\\\\\ + aaa"; color: red; + } + """ + expect(lines[1][4]).toEqual value: '"', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'string.quoted.double.css', 'punctuation.definition.string.begin.css'] + expect(lines[1][5]).toEqual value: 'aaaaa', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'string.quoted.double.css'] + expect(lines[1][6]).toEqual value: '\\\\', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'string.quoted.double.css', 'constant.character.escape.css'] + expect(lines[1][7]).toEqual value: '\\', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'string.quoted.double.css', 'constant.character.escape.newline.css'] + expect(lines[2][0]).toEqual value: 'aaa', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'string.quoted.double.css'] + expect(lines[2][1]).toEqual value: '"', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'string.quoted.double.css', 'punctuation.definition.string.end.css'] + expect(lines[2][2]).toEqual value: ';', scopes: ['source.css', 'meta.property-list.css', 'punctuation.terminator.rule.css'] + expect(lines[2][4]).toEqual value: 'color', scopes: ['source.css', 'meta.property-list.css', 'meta.property-name.css', 'support.type.property-name.css'] + + lines = grammar.tokenizeLines """ + a{ + content: 'aaaaa\\\\\\ + aaa'; color: red; + } + """ + expect(lines[1][4]).toEqual value: "'", scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'string.quoted.single.css', 'punctuation.definition.string.begin.css'] + expect(lines[1][5]).toEqual value: 'aaaaa', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'string.quoted.single.css'] + expect(lines[1][6]).toEqual value: '\\\\', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'string.quoted.single.css', 'constant.character.escape.css'] + expect(lines[1][7]).toEqual value: '\\', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'string.quoted.single.css', 'constant.character.escape.newline.css'] + expect(lines[2][0]).toEqual value: 'aaa', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'string.quoted.single.css'] + expect(lines[2][1]).toEqual value: "'", scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'string.quoted.single.css', 'punctuation.definition.string.end.css'] + expect(lines[2][2]).toEqual value: ';', scopes: ['source.css', 'meta.property-list.css', 'punctuation.terminator.rule.css'] + expect(lines[2][4]).toEqual value: 'color', scopes: ['source.css', 'meta.property-list.css', 'meta.property-name.css', 'support.type.property-name.css'] + + it 'highlights escape sequences inside invalid strings', -> + {tokens} = grammar.tokenizeLine('a{ content: "aaa\\"aa') + expect(tokens[6]).toEqual value: '"', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'string.quoted.double.css', 'punctuation.definition.string.begin.css'] + expect(tokens[7]).toEqual value: 'aaa', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'string.quoted.double.css', 'invalid.illegal.unclosed.string.css'] + expect(tokens[8]).toEqual value: '\\"', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'string.quoted.double.css', 'invalid.illegal.unclosed.string.css', 'constant.character.escape.css'] + expect(tokens[9]).toEqual value: 'aa', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'string.quoted.double.css', 'invalid.illegal.unclosed.string.css'] + + {tokens} = grammar.tokenizeLine("a{ content: 'aaa\\'aa") + expect(tokens[6]).toEqual value: "'", scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'string.quoted.single.css', 'punctuation.definition.string.begin.css'] + expect(tokens[7]).toEqual value: 'aaa', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'string.quoted.single.css', 'invalid.illegal.unclosed.string.css'] + expect(tokens[8]).toEqual value: "\\'", scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'string.quoted.single.css', 'invalid.illegal.unclosed.string.css', 'constant.character.escape.css'] + expect(tokens[9]).toEqual value: 'aa', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'string.quoted.single.css', 'invalid.illegal.unclosed.string.css'] + + it 'highlights unclosed lines in line-wrapped strings', -> + lines = grammar.tokenizeLines """ + a{ + content: "aaa\\"aa\\ + aaaa + aaaa; color: red; + } + """ + expect(lines[1][4]).toEqual value: '"', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'string.quoted.double.css', 'punctuation.definition.string.begin.css'] + expect(lines[1][5]).toEqual value: 'aaa', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'string.quoted.double.css'] + expect(lines[1][6]).toEqual value: '\\"', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'string.quoted.double.css', 'constant.character.escape.css'] + expect(lines[1][7]).toEqual value: 'aa', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'string.quoted.double.css'] + expect(lines[1][8]).toEqual value: '\\', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'string.quoted.double.css', 'constant.character.escape.newline.css'] + expect(lines[2][0]).toEqual value: 'aaaa', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'string.quoted.double.css', 'invalid.illegal.unclosed.string.css'] + expect(lines[3][0]).toEqual value: 'aaaa', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css'] + expect(lines[3][1]).toEqual value: ';', scopes: ['source.css', 'meta.property-list.css', 'punctuation.terminator.rule.css'] + expect(lines[3][3]).toEqual value: 'color', scopes: ['source.css', 'meta.property-list.css', 'meta.property-name.css', 'support.type.property-name.css'] + expect(lines[3][4]).toEqual value: ':', scopes: ['source.css', 'meta.property-list.css', 'punctuation.separator.key-value.css'] + expect(lines[3][6]).toEqual value: 'red', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'support.constant.color.w3c-standard-color-name.css'] + expect(lines[3][7]).toEqual value: ';', scopes: ['source.css', 'meta.property-list.css', 'punctuation.terminator.rule.css'] + expect(lines[4][0]).toEqual value: '}', scopes: ['source.css', 'meta.property-list.css', 'punctuation.section.property-list.end.bracket.curly.css'] + + describe 'comments', -> + it 'tokenises comments inside @import statements', -> + {tokens} = grammar.tokenizeLine('@import /* url("name"); */ "1.css";') + expect(tokens[0]).toEqual value: '@', scopes: ['source.css', 'meta.at-rule.import.css', 'keyword.control.at-rule.import.css', 'punctuation.definition.keyword.css'] + expect(tokens[1]).toEqual value: 'import', scopes: ['source.css', 'meta.at-rule.import.css', 'keyword.control.at-rule.import.css'] + expect(tokens[3]).toEqual value: '/*', scopes: ['source.css', 'meta.at-rule.import.css', 'comment.block.css', 'punctuation.definition.comment.begin.css'] + expect(tokens[4]).toEqual value: ' url("name"); ', scopes: ['source.css', 'meta.at-rule.import.css', 'comment.block.css'] + expect(tokens[5]).toEqual value: '*/', scopes: ['source.css', 'meta.at-rule.import.css', 'comment.block.css', 'punctuation.definition.comment.end.css'] + expect(tokens[7]).toEqual value: '"', scopes: ['source.css', 'meta.at-rule.import.css', 'string.quoted.double.css', 'punctuation.definition.string.begin.css'] + expect(tokens[8]).toEqual value: '1.css', scopes: ['source.css', 'meta.at-rule.import.css', 'string.quoted.double.css'] + expect(tokens[9]).toEqual value: '"', scopes: ['source.css', 'meta.at-rule.import.css', 'string.quoted.double.css', 'punctuation.definition.string.end.css'] + expect(tokens[10]).toEqual value: ';', scopes: ['source.css', 'meta.at-rule.import.css', 'punctuation.terminator.rule.css'] + + {tokens} = grammar.tokenizeLine('@import/*";"*/ url("2.css");') + expect(tokens[0]).toEqual value: '@', scopes: ['source.css', 'meta.at-rule.import.css', 'keyword.control.at-rule.import.css', 'punctuation.definition.keyword.css'] + expect(tokens[1]).toEqual value: 'import', scopes: ['source.css', 'meta.at-rule.import.css', 'keyword.control.at-rule.import.css'] + expect(tokens[2]).toEqual value: '/*', scopes: ['source.css', 'meta.at-rule.import.css', 'comment.block.css', 'punctuation.definition.comment.begin.css'] + expect(tokens[3]).toEqual value: '";"', scopes: ['source.css', 'meta.at-rule.import.css', 'comment.block.css'] + expect(tokens[4]).toEqual value: '*/', scopes: ['source.css', 'meta.at-rule.import.css', 'comment.block.css', 'punctuation.definition.comment.end.css'] + expect(tokens[6]).toEqual value: 'url', scopes: ['source.css', 'meta.at-rule.import.css', 'meta.function.url.css', 'support.function.url.css'] + expect(tokens[7]).toEqual value: '(', scopes: ['source.css', 'meta.at-rule.import.css', 'meta.function.url.css', 'punctuation.section.function.begin.bracket.round.css'] + expect(tokens[8]).toEqual value: '"', scopes: ['source.css', 'meta.at-rule.import.css', 'meta.function.url.css', 'string.quoted.double.css', 'punctuation.definition.string.begin.css'] + expect(tokens[9]).toEqual value: '2.css', scopes: ['source.css', 'meta.at-rule.import.css', 'meta.function.url.css', 'string.quoted.double.css'] + expect(tokens[10]).toEqual value: '"', scopes: ['source.css', 'meta.at-rule.import.css', 'meta.function.url.css', 'string.quoted.double.css', 'punctuation.definition.string.end.css'] + expect(tokens[11]).toEqual value: ')', scopes: ['source.css', 'meta.at-rule.import.css', 'meta.function.url.css', 'punctuation.section.function.end.bracket.round.css'] + expect(tokens[12]).toEqual value: ';', scopes: ['source.css', 'meta.at-rule.import.css', 'punctuation.terminator.rule.css'] + + {tokens} = grammar.tokenizeLine('@import url("3.css") print /* url(";"); */;') + expect(tokens[0]).toEqual value: '@', scopes: ['source.css', 'meta.at-rule.import.css', 'keyword.control.at-rule.import.css', 'punctuation.definition.keyword.css'] + expect(tokens[1]).toEqual value: 'import', scopes: ['source.css', 'meta.at-rule.import.css', 'keyword.control.at-rule.import.css'] + expect(tokens[3]).toEqual value: 'url', scopes: ['source.css', 'meta.at-rule.import.css', 'meta.function.url.css', 'support.function.url.css'] + expect(tokens[4]).toEqual value: '(', scopes: ['source.css', 'meta.at-rule.import.css', 'meta.function.url.css', 'punctuation.section.function.begin.bracket.round.css'] + expect(tokens[5]).toEqual value: '"', scopes: ['source.css', 'meta.at-rule.import.css', 'meta.function.url.css', 'string.quoted.double.css', 'punctuation.definition.string.begin.css'] + expect(tokens[6]).toEqual value: '3.css', scopes: ['source.css', 'meta.at-rule.import.css', 'meta.function.url.css', 'string.quoted.double.css'] + expect(tokens[7]).toEqual value: '"', scopes: ['source.css', 'meta.at-rule.import.css', 'meta.function.url.css', 'string.quoted.double.css', 'punctuation.definition.string.end.css'] + expect(tokens[8]).toEqual value: ')', scopes: ['source.css', 'meta.at-rule.import.css', 'meta.function.url.css', 'punctuation.section.function.end.bracket.round.css'] + expect(tokens[10]).toEqual value: 'print', scopes: ['source.css', 'meta.at-rule.import.css', 'support.constant.media.css'] + expect(tokens[12]).toEqual value: '/*', scopes: ['source.css', 'meta.at-rule.import.css', 'comment.block.css', 'punctuation.definition.comment.begin.css'] + expect(tokens[13]).toEqual value: ' url(";"); ', scopes: ['source.css', 'meta.at-rule.import.css', 'comment.block.css'] + expect(tokens[14]).toEqual value: '*/', scopes: ['source.css', 'meta.at-rule.import.css', 'comment.block.css', 'punctuation.definition.comment.end.css'] + expect(tokens[15]).toEqual value: ';', scopes: ['source.css', 'meta.at-rule.import.css', 'punctuation.terminator.rule.css'] + + it 'tokenises comments inside @font-face statements', -> + {tokens} = grammar.tokenizeLine('@font-face/*"{;}"*/{}') + expect(tokens[0]).toEqual value: '@', scopes: ['source.css', 'meta.at-rule.font-face.css', 'keyword.control.at-rule.font-face.css', 'punctuation.definition.keyword.css'] + expect(tokens[1]).toEqual value: 'font-face', scopes: ['source.css', 'meta.at-rule.font-face.css', 'keyword.control.at-rule.font-face.css'] + expect(tokens[2]).toEqual value: '/*', scopes: ['source.css', 'meta.at-rule.font-face.css', 'comment.block.css', 'punctuation.definition.comment.begin.css'] + expect(tokens[3]).toEqual value: '"{;}"', scopes: ['source.css', 'meta.at-rule.font-face.css', 'comment.block.css'] + expect(tokens[4]).toEqual value: '*/', scopes: ['source.css', 'meta.at-rule.font-face.css', 'comment.block.css', 'punctuation.definition.comment.end.css'] + expect(tokens[5]).toEqual value: '{', scopes: ['source.css', 'meta.property-list.css', 'punctuation.section.property-list.begin.bracket.curly.css'] + expect(tokens[6]).toEqual value: '}', scopes: ['source.css', 'meta.property-list.css', 'punctuation.section.property-list.end.bracket.curly.css'] + + it 'tokenizes comments before media queries', -> + {tokens} = grammar.tokenizeLine '/* comment */ @media' + + expect(tokens[0]).toEqual value: '/*', scopes: ['source.css', 'comment.block.css', 'punctuation.definition.comment.begin.css'] + expect(tokens[1]).toEqual value: ' comment ', scopes: ['source.css', 'comment.block.css'] + expect(tokens[2]).toEqual value: '*/', scopes: ['source.css', 'comment.block.css', 'punctuation.definition.comment.end.css'] + expect(tokens[4]).toEqual value: '@', scopes: ['source.css', 'meta.at-rule.media.header.css', 'keyword.control.at-rule.media.css', 'punctuation.definition.keyword.css'] + expect(tokens[5]).toEqual value: 'media', scopes: ['source.css', 'meta.at-rule.media.header.css', 'keyword.control.at-rule.media.css'] + + it 'tokenizes comments after media queries', -> + {tokens} = grammar.tokenizeLine '@media/* comment */ ()' + + expect(tokens[0]).toEqual value: '@', scopes: ['source.css', 'meta.at-rule.media.header.css', 'keyword.control.at-rule.media.css', 'punctuation.definition.keyword.css'] + expect(tokens[1]).toEqual value: 'media', scopes: ['source.css', 'meta.at-rule.media.header.css', 'keyword.control.at-rule.media.css'] + expect(tokens[2]).toEqual value: '/*', scopes: ['source.css', 'meta.at-rule.media.header.css', 'comment.block.css', 'punctuation.definition.comment.begin.css'] + expect(tokens[3]).toEqual value: ' comment ', scopes: ['source.css', 'meta.at-rule.media.header.css', 'comment.block.css'] + expect(tokens[4]).toEqual value: '*/', scopes: ['source.css', 'meta.at-rule.media.header.css', 'comment.block.css', 'punctuation.definition.comment.end.css'] + + it 'tokenizes comments inside query lists', -> + {tokens} = grammar.tokenizeLine '@media (max-height: 40em/* comment */)' + + expect(tokens[0]).toEqual value: '@', scopes: ['source.css', 'meta.at-rule.media.header.css', 'keyword.control.at-rule.media.css', 'punctuation.definition.keyword.css'] + expect(tokens[1]).toEqual value: 'media', scopes: ['source.css', 'meta.at-rule.media.header.css', 'keyword.control.at-rule.media.css'] + expect(tokens[3]).toEqual value: '(', scopes: ['source.css', 'meta.at-rule.media.header.css', 'punctuation.definition.parameters.begin.bracket.round.css'] + expect(tokens[4]).toEqual value: 'max-height', scopes: ['source.css', 'meta.at-rule.media.header.css', 'support.type.property-name.media.css'] + expect(tokens[5]).toEqual value: ':', scopes: ['source.css', 'meta.at-rule.media.header.css', 'punctuation.separator.key-value.css'] + expect(tokens[7]).toEqual value: '40', scopes: ['source.css', 'meta.at-rule.media.header.css', 'constant.numeric.css'] + expect(tokens[8]).toEqual value: 'em', scopes: ['source.css', 'meta.at-rule.media.header.css', 'constant.numeric.css', 'keyword.other.unit.em.css'] + expect(tokens[9]).toEqual value: '/*', scopes: ['source.css', 'meta.at-rule.media.header.css', 'comment.block.css', 'punctuation.definition.comment.begin.css'] + expect(tokens[10]).toEqual value: ' comment ', scopes: ['source.css', 'meta.at-rule.media.header.css', 'comment.block.css'] + expect(tokens[11]).toEqual value: '*/', scopes: ['source.css', 'meta.at-rule.media.header.css', 'comment.block.css', 'punctuation.definition.comment.end.css'] + expect(tokens[12]).toEqual value: ')', scopes: ['source.css', 'meta.at-rule.media.header.css', 'punctuation.definition.parameters.end.bracket.round.css'] + + it 'tokenizes inline comments', -> + {tokens} = grammar.tokenizeLine 'section {border:4px/*padding:1px*/}' + + expect(tokens[0]).toEqual value: 'section', scopes: ['source.css', 'meta.selector.css', 'entity.name.tag.css'] + expect(tokens[1]).toEqual value: ' ', scopes: ['source.css'] + expect(tokens[2]).toEqual value: '{', scopes: ['source.css', 'meta.property-list.css', 'punctuation.section.property-list.begin.bracket.curly.css'] + expect(tokens[3]).toEqual value: 'border', scopes: ['source.css', 'meta.property-list.css', 'meta.property-name.css', 'support.type.property-name.css'] + expect(tokens[4]).toEqual value: ':', scopes: ['source.css', 'meta.property-list.css', 'punctuation.separator.key-value.css'] + expect(tokens[5]).toEqual value: '4', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'constant.numeric.css'] + expect(tokens[6]).toEqual value: 'px', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'constant.numeric.css', 'keyword.other.unit.px.css'] + expect(tokens[7]).toEqual value: '/*', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'comment.block.css', 'punctuation.definition.comment.begin.css'] + expect(tokens[8]).toEqual value: 'padding:1px', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'comment.block.css'] + expect(tokens[9]).toEqual value: '*/', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'comment.block.css', 'punctuation.definition.comment.end.css'] + expect(tokens[10]).toEqual value: '}', scopes: ['source.css', 'meta.property-list.css', 'punctuation.section.property-list.end.bracket.curly.css'] + + it 'tokenizes multi-line comments', -> + lines = grammar.tokenizeLines """ + section { + border:4px /*1px; + padding:1px*/ + } + """ + + expect(lines[1][5]).toEqual value: ' ', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css'] + expect(lines[1][6]).toEqual value: '/*', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'comment.block.css', 'punctuation.definition.comment.begin.css'] + expect(lines[1][7]).toEqual value: '1px;', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'comment.block.css'] + + expect(lines[2][0]).toEqual value: ' padding:1px', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'comment.block.css'] + expect(lines[2][1]).toEqual value: '*/', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'comment.block.css', 'punctuation.definition.comment.end.css'] + + expect(lines[3][0]).toEqual value: '}', scopes: ['source.css', 'meta.property-list.css', 'punctuation.section.property-list.end.bracket.curly.css'] + + describe 'Animations', -> + it 'does not confuse animation names with predefined keywords', -> + tokens = grammar.tokenizeLines ''' + .animated { + animation-name: orphan-black; + animation-name: line-scale; + } + ''' + expect(tokens[1][4]).toEqual value: 'orphan-black', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css'] + expect(tokens[2][4]).toEqual value: 'line-scale', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css'] + + describe 'Transforms', -> + it 'tokenizes transform functions', -> + tokens = grammar.tokenizeLines ''' + .transformed { + transform: matrix(0, 1.5, -1.5, 0, 0, 100px); + transform: rotate(90deg) translateX(100px) scale(1.5); + } + ''' + expect(tokens[1][1]).toEqual value: 'transform', scopes: ['source.css', 'meta.property-list.css', 'meta.property-name.css', 'support.type.property-name.css'] + expect(tokens[1][2]).toEqual value: ':', scopes: ['source.css', 'meta.property-list.css', 'punctuation.separator.key-value.css'] + expect(tokens[1][4]).toEqual value: 'matrix', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'support.function.transform.css'] + expect(tokens[1][5]).toEqual value: '(', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'punctuation.section.function.begin.bracket.round.css'] + expect(tokens[1][6]).toEqual value: '0', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'constant.numeric.css'] + expect(tokens[1][7]).toEqual value: ',', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'punctuation.separator.list.comma.css'] + expect(tokens[1][12]).toEqual value: '-1.5', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'constant.numeric.css'] + expect(tokens[1][22]).toEqual value: 'px', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'constant.numeric.css', 'keyword.other.unit.px.css'] + expect(tokens[1][23]).toEqual value: ')', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'punctuation.section.function.end.bracket.round.css'] + expect(tokens[2][4]).toEqual value: 'rotate', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'support.function.transform.css'] + expect(tokens[2][10]).toEqual value: 'translateX', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'support.function.transform.css'] + expect(tokens[2][16]).toEqual value: 'scale', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'support.function.transform.css'] + + describe "performance regressions", -> + it "does not hang when tokenizing invalid input preceding an equals sign", -> + grammar = atom.grammars.grammarForScopeName('source.css') + start = Date.now() + grammar.tokenizeLine('<![CDATA[啊啊啊啊啊啊啊啊啊啊啊啊啊啊啊啊啊啊啊啊啊啊啊啊啊啊啊啊"=') + expect(Date.now() - start).toBeLessThan(5000) + + it "does not hang when tokenizing accidental HTML tags", -> + start = Date.now() + grammar.tokenizeLines """ + <body> + [}~#{'ÁÂÃÄÅÆÇÈÊËÍÎ'.repeat(100)} + </body> + """ + expect(Date.now() - start).toBeLessThan(5000) + + describe "firstLineMatch", -> + it "recognises Emacs modelines", -> + valid = """ + #-*- CSS -*- + #-*- mode: CSS -*- + /* -*-css-*- */ + // -*- CSS -*- + /* -*- mode:CSS -*- */ + // -*- font:bar;mode:CSS -*- + // -*- font:bar;mode:CSS;foo:bar; -*- + // -*-font:mode;mode:CSS-*- + // -*- foo:bar mode: css bar:baz -*- + " -*-foo:bar;mode:css;bar:foo-*- "; + " -*-font-mode:foo;mode:css;foo-bar:quux-*-" + "-*-font:x;foo:bar; mode : CsS; bar:foo;foooooo:baaaaar;fo:ba;-*-"; + "-*- font:x;foo : bar ; mode : cSS ; bar : foo ; foooooo:baaaaar;fo:ba-*-"; + """ + for line in valid.split /\n/ + expect(grammar.firstLineRegex.scanner.findNextMatchSync(line)).not.toBeNull() + + invalid = """ + /* --*css-*- */ + /* -*-- CSS -*- + /* -*- -- CSS -*- + /* -*- CSS -;- -*- + // -*- CCSS -*- + // -*- CSS; -*- + // -*- css-stuff -*- + /* -*- model:css -*- + /* -*- indent-mode:css -*- + // -*- font:mode;CSS -*- + // -*- mode: -*- CSS + // -*- mode: I-miss-plain-old-css -*- + // -*-font:mode;mode:css--*- + """ + for line in invalid.split /\n/ + expect(grammar.firstLineRegex.scanner.findNextMatchSync(line)).toBeNull() + + it "recognises Vim modelines", -> + valid = """ + vim: se filetype=css: + # vim: se ft=css: + # vim: set ft=CSS: + # vim: set filetype=CSS: + # vim: ft=CSS + # vim: syntax=CSS + # vim: se syntax=css: + # ex: syntax=CSS + # vim:ft=css + # vim600: ft=css + # vim>600: set ft=css: + # vi:noai:sw=3 ts=6 ft=CSS + # vi::::::::::noai:::::::::::: ft=CSS + # vim:ts=4:sts=4:sw=4:noexpandtab:ft=cSS + # vi:: noai : : : : sw =3 ts =6 ft =Css + # vim: ts=4: pi sts=4: ft=CSS: noexpandtab: sw=4: + # vim: ts=4 sts=4: ft=css noexpandtab: + # vim:noexpandtab sts=4 ft=css ts=4 + # vim:noexpandtab:ft=css + # vim:ts=4:sts=4 ft=css:noexpandtab:\x20 + # vim:noexpandtab titlestring=hi\|there\\\\ ft=css ts=4 + """ + for line in valid.split /\n/ + expect(grammar.firstLineRegex.scanner.findNextMatchSync(line)).not.toBeNull() + + invalid = """ + ex: se filetype=css: + _vi: se filetype=CSS: + vi: se filetype=CSS + # vim set ft=css3 + # vim: soft=css + # vim: clean-syntax=css: + # vim set ft=css: + # vim: setft=CSS: + # vim: se ft=css backupdir=tmp + # vim: set ft=css set cmdheight=1 + # vim:noexpandtab sts:4 ft:CSS ts:4 + # vim:noexpandtab titlestring=hi\\|there\\ ft=CSS ts=4 + # vim:noexpandtab titlestring=hi\\|there\\\\\\ ft=CSS ts=4 + """ + for line in invalid.split /\n/ + expect(grammar.firstLineRegex.scanner.findNextMatchSync(line)).toBeNull() + + describe "Missing supported properties regressions", -> + it "recognises place-items property as supported", -> + tokens = grammar.tokenizeLines 'a { place-items: center center; }' + expect(tokens[0][0]).toEqual value: 'a', scopes: ['source.css', 'meta.selector.css', 'entity.name.tag.css'] + expect(tokens[0][1]).toEqual value: ' ', scopes: ['source.css'] + expect(tokens[0][2]).toEqual value: '{', scopes: ['source.css', 'meta.property-list.css', 'punctuation.section.property-list.begin.bracket.curly.css'] + expect(tokens[0][3]).toEqual value: ' ', scopes: ['source.css', 'meta.property-list.css'] + expect(tokens[0][4]).toEqual value: 'place-items', scopes: ['source.css', 'meta.property-list.css', 'meta.property-name.css', 'support.type.property-name.css'] + expect(tokens[0][5]).toEqual value: ':', scopes: ['source.css', 'meta.property-list.css', 'punctuation.separator.key-value.css'] + expect(tokens[0][6]).toEqual value: ' ', scopes: ['source.css', 'meta.property-list.css'] + expect(tokens[0][7]).toEqual value: 'center', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'support.constant.property-value.css'] + expect(tokens[0][8]).toEqual value: ' ', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css'] + expect(tokens[0][9]).toEqual value: 'center', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'support.constant.property-value.css'] + expect(tokens[0][10]).toEqual value: ';', scopes: ['source.css', 'meta.property-list.css', 'punctuation.terminator.rule.css'] + expect(tokens[0][11]).toEqual value: ' ', scopes: ['source.css', 'meta.property-list.css'] + expect(tokens[0][12]).toEqual value: '}', scopes: ['source.css', 'meta.property-list.css', 'punctuation.section.property-list.end.bracket.curly.css'] + + it "recognises place-self property as supported", -> + tokens = grammar.tokenizeLines 'a { place-self: center center; }' + expect(tokens[0][0]).toEqual value: 'a', scopes: ['source.css', 'meta.selector.css', 'entity.name.tag.css'] + expect(tokens[0][1]).toEqual value: ' ', scopes: ['source.css'] + expect(tokens[0][2]).toEqual value: '{', scopes: ['source.css', 'meta.property-list.css', 'punctuation.section.property-list.begin.bracket.curly.css'] + expect(tokens[0][3]).toEqual value: ' ', scopes: ['source.css', 'meta.property-list.css'] + expect(tokens[0][4]).toEqual value: 'place-self', scopes: ['source.css', 'meta.property-list.css', 'meta.property-name.css', 'support.type.property-name.css'] + expect(tokens[0][5]).toEqual value: ':', scopes: ['source.css', 'meta.property-list.css', 'punctuation.separator.key-value.css'] + expect(tokens[0][6]).toEqual value: ' ', scopes: ['source.css', 'meta.property-list.css'] + expect(tokens[0][7]).toEqual value: 'center', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'support.constant.property-value.css'] + expect(tokens[0][8]).toEqual value: ' ', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css'] + expect(tokens[0][9]).toEqual value: 'center', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'support.constant.property-value.css'] + expect(tokens[0][10]).toEqual value: ';', scopes: ['source.css', 'meta.property-list.css', 'punctuation.terminator.rule.css'] + expect(tokens[0][11]).toEqual value: ' ', scopes: ['source.css', 'meta.property-list.css'] + expect(tokens[0][12]).toEqual value: '}', scopes: ['source.css', 'meta.property-list.css', 'punctuation.section.property-list.end.bracket.curly.css'] + + it "recognises place-content property as supported", -> + tokens = grammar.tokenizeLines 'a { place-content: center center; }' + expect(tokens[0][0]).toEqual value: 'a', scopes: ['source.css', 'meta.selector.css', 'entity.name.tag.css'] + expect(tokens[0][1]).toEqual value: ' ', scopes: ['source.css'] + expect(tokens[0][2]).toEqual value: '{', scopes: ['source.css', 'meta.property-list.css', 'punctuation.section.property-list.begin.bracket.curly.css'] + expect(tokens[0][3]).toEqual value: ' ', scopes: ['source.css', 'meta.property-list.css'] + expect(tokens[0][4]).toEqual value: 'place-content', scopes: ['source.css', 'meta.property-list.css', 'meta.property-name.css', 'support.type.property-name.css'] + expect(tokens[0][5]).toEqual value: ':', scopes: ['source.css', 'meta.property-list.css', 'punctuation.separator.key-value.css'] + expect(tokens[0][6]).toEqual value: ' ', scopes: ['source.css', 'meta.property-list.css'] + expect(tokens[0][7]).toEqual value: 'center', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'support.constant.property-value.css'] + expect(tokens[0][8]).toEqual value: ' ', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css'] + expect(tokens[0][9]).toEqual value: 'center', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'support.constant.property-value.css'] + expect(tokens[0][10]).toEqual value: ';', scopes: ['source.css', 'meta.property-list.css', 'punctuation.terminator.rule.css'] + expect(tokens[0][11]).toEqual value: ' ', scopes: ['source.css', 'meta.property-list.css'] + expect(tokens[0][12]).toEqual value: '}', scopes: ['source.css', 'meta.property-list.css', 'punctuation.section.property-list.end.bracket.curly.css'] + + it "recognises row-gap property as supported", -> + tokens = grammar.tokenizeLines 'a { row-gap: 5px; }' + expect(tokens[0][0]).toEqual value: 'a', scopes: ['source.css', 'meta.selector.css', 'entity.name.tag.css'] + expect(tokens[0][1]).toEqual value: ' ', scopes: ['source.css'] + expect(tokens[0][2]).toEqual value: '{', scopes: ['source.css', 'meta.property-list.css', 'punctuation.section.property-list.begin.bracket.curly.css'] + expect(tokens[0][3]).toEqual value: ' ', scopes: ['source.css', 'meta.property-list.css'] + expect(tokens[0][4]).toEqual value: 'row-gap', scopes: ['source.css', 'meta.property-list.css', 'meta.property-name.css', 'support.type.property-name.css'] + expect(tokens[0][5]).toEqual value: ':', scopes: ['source.css', 'meta.property-list.css', 'punctuation.separator.key-value.css'] + expect(tokens[0][6]).toEqual value: ' ', scopes: ['source.css', 'meta.property-list.css'] + expect(tokens[0][7]).toEqual value: '5', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'constant.numeric.css'] + expect(tokens[0][8]).toEqual value: 'px', scopes: ['source.css', 'meta.property-list.css', 'meta.property-value.css', 'constant.numeric.css', 'keyword.other.unit.px.css'] + expect(tokens[0][9]).toEqual value: ';', scopes: ['source.css', 'meta.property-list.css', 'punctuation.terminator.rule.css'] + expect(tokens[0][10]).toEqual value: ' ', scopes: ['source.css', 'meta.property-list.css'] + expect(tokens[0][11]).toEqual value: '}', scopes: ['source.css', 'meta.property-list.css', 'punctuation.section.property-list.end.bracket.curly.css'] diff --git a/packages/language-gfm/.github/no-response.yml b/packages/language-gfm/.github/no-response.yml new file mode 100644 index 000000000..1c8799d13 --- /dev/null +++ b/packages/language-gfm/.github/no-response.yml @@ -0,0 +1,15 @@ +# Configuration for probot-no-response - https://github.com/probot/no-response + +# Number of days of inactivity before an issue is closed for lack of response +daysUntilClose: 28 + +# Label requiring a response +responseRequiredLabel: more-information-needed + +# Comment to post when closing an issue for lack of response. Set to `false` to disable. +closeComment: > + This issue has been automatically closed because there has been no response + to our request for more information from the original author. With only the + information that is currently in the issue, we don't have enough information + to take action. Please reach out if you have or find the answers we need so + that we can investigate further. diff --git a/packages/language-gfm/.github/workflows/ci.yml b/packages/language-gfm/.github/workflows/ci.yml new file mode 100644 index 000000000..ab77c1f1f --- /dev/null +++ b/packages/language-gfm/.github/workflows/ci.yml @@ -0,0 +1,23 @@ +name: CI + +on: [push] + +env: + CI: true + +jobs: + Test: + strategy: + matrix: + os: [ubuntu-latest, macos-latest, windows-latest] + channel: [stable, beta] + runs-on: ${{ matrix.os }} + steps: + - uses: actions/checkout@v1 + - uses: UziTech/action-setup-atom@v2 + with: + version: ${{ matrix.channel }} + - name: Install dependencies + run: apm install + - name: Run tests + run: atom --test spec diff --git a/packages/language-gfm/.gitignore b/packages/language-gfm/.gitignore new file mode 100644 index 000000000..3c3629e64 --- /dev/null +++ b/packages/language-gfm/.gitignore @@ -0,0 +1 @@ +node_modules diff --git a/packages/language-gfm/CONTRIBUTING.md b/packages/language-gfm/CONTRIBUTING.md new file mode 100644 index 000000000..0fd0ad696 --- /dev/null +++ b/packages/language-gfm/CONTRIBUTING.md @@ -0,0 +1 @@ +See the [Atom contributing guide](https://github.com/atom/atom/blob/master/CONTRIBUTING.md) diff --git a/packages/language-gfm/ISSUE_TEMPLATE.md b/packages/language-gfm/ISSUE_TEMPLATE.md new file mode 100644 index 000000000..b60bb86c9 --- /dev/null +++ b/packages/language-gfm/ISSUE_TEMPLATE.md @@ -0,0 +1,40 @@ +<!-- + +Have you read Atom's Code of Conduct? By filing an Issue, you are expected to comply with it, including treating everyone with respect: https://github.com/atom/atom/blob/master/CODE_OF_CONDUCT.md + +Do you want to ask a question? Are you looking for support? The Atom message board is the best place for getting support: https://discuss.atom.io + +--> + +### Prerequisites + +* [ ] Put an X between the brackets on this line if you have done all of the following: + * Reproduced the problem in Safe Mode: http://flight-manual.atom.io/hacking-atom/sections/debugging/#using-safe-mode + * Followed all applicable steps in the debugging guide: http://flight-manual.atom.io/hacking-atom/sections/debugging/ + * Checked the FAQs on the message board for common solutions: https://discuss.atom.io/c/faq + * Checked that your issue isn't already filed: https://github.com/issues?utf8=✓&q=is%3Aissue+user%3Aatom + * Checked that there is not already an Atom package that provides the described functionality: https://atom.io/packages + +### Description + +[Description of the issue] + +### Steps to Reproduce + +1. [First Step] +2. [Second Step] +3. [and so on...] + +**Expected behavior:** [What you expect to happen] + +**Actual behavior:** [What actually happens] + +**Reproduces how often:** [What percentage of the time does it reproduce?] + +### Versions + +You can get this information from copy and pasting the output of `atom --version` and `apm --version` from the command line. Also, please include the OS and what version of the OS you're running. + +### Additional Information + +Any additional information, configuration or data that might be necessary to reproduce the issue. diff --git a/packages/language-gfm/LICENSE.md b/packages/language-gfm/LICENSE.md new file mode 100644 index 000000000..4d231b456 --- /dev/null +++ b/packages/language-gfm/LICENSE.md @@ -0,0 +1,20 @@ +Copyright (c) 2014 GitHub Inc. + +Permission is hereby granted, free of charge, to any person obtaining +a copy of this software and associated documentation files (the +"Software"), to deal in the Software without restriction, including +without limitation the rights to use, copy, modify, merge, publish, +distribute, sublicense, and/or sell copies of the Software, and to +permit persons to whom the Software is furnished to do so, subject to +the following conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/packages/language-gfm/PULL_REQUEST_TEMPLATE.md b/packages/language-gfm/PULL_REQUEST_TEMPLATE.md new file mode 100644 index 000000000..cdaa94a86 --- /dev/null +++ b/packages/language-gfm/PULL_REQUEST_TEMPLATE.md @@ -0,0 +1,28 @@ +### Requirements + +* Filling out the template is required. Any pull request that does not include enough information to be reviewed in a timely manner may be closed at the maintainers' discretion. +* All new code requires tests to ensure against regressions + +### Description of the Change + +<!-- + +We must be able to understand the design of your change from this description. If we can't get a good idea of what the code will be doing from the description here, the pull request may be closed at the maintainers' discretion. Keep in mind that the maintainer reviewing this PR may not be familiar with or have worked with the code here recently, so please walk us through the concepts. + +--> + +### Alternate Designs + +<!-- Explain what other alternates were considered and why the proposed version was selected --> + +### Benefits + +<!-- What benefits will be realized by the code change? --> + +### Possible Drawbacks + +<!-- What are the possible side-effects or negative impacts of the code change? --> + +### Applicable Issues + +<!-- Enter any applicable Issues here --> diff --git a/packages/language-gfm/README.md b/packages/language-gfm/README.md new file mode 100644 index 000000000..20ee3f1c9 --- /dev/null +++ b/packages/language-gfm/README.md @@ -0,0 +1,6 @@ +# GitHub flavored Markdown package +[![OS X Build Status](https://travis-ci.org/atom/language-gfm.svg?branch=master)](https://travis-ci.org/atom/language-gfm) [![Windows Build Status](https://ci.appveyor.com/api/projects/status/rpub8qjyd8lt7wai/branch/master?svg=true)](https://ci.appveyor.com/project/Atom/language-gfm/branch/master) [![Dependency Status](https://david-dm.org/atom/language-gfm.svg)](https://david-dm.org/atom/language-gfm) + +Adds syntax highlighting and snippets to [GitHub flavored Markdown](https://help.github.com/articles/github-flavored-markdown) files in Atom. + +Contributions are greatly appreciated. Please fork this repository and open a pull request to add snippets, make grammar tweaks, etc. diff --git a/packages/language-gfm/coffeelint.json b/packages/language-gfm/coffeelint.json new file mode 100644 index 000000000..a5dd715e3 --- /dev/null +++ b/packages/language-gfm/coffeelint.json @@ -0,0 +1,37 @@ +{ + "max_line_length": { + "level": "ignore" + }, + "no_empty_param_list": { + "level": "error" + }, + "arrow_spacing": { + "level": "error" + }, + "no_interpolation_in_single_quotes": { + "level": "error" + }, + "no_debugger": { + "level": "error" + }, + "prefer_english_operator": { + "level": "error" + }, + "colon_assignment_spacing": { + "spacing": { + "left": 0, + "right": 1 + }, + "level": "error" + }, + "braces_spacing": { + "spaces": 0, + "level": "error" + }, + "spacing_after_comma": { + "level": "error" + }, + "no_stand_alone_at": { + "level": "error" + } +} diff --git a/packages/language-gfm/grammars/gfm.json b/packages/language-gfm/grammars/gfm.json new file mode 100644 index 000000000..3eda2fe12 --- /dev/null +++ b/packages/language-gfm/grammars/gfm.json @@ -0,0 +1,1926 @@ +{ + "name": "GitHub Markdown", + "scopeName": "source.gfm", + "limitLineLength": false, + "fileTypes": [ + "markdown", + "md", + "mdown", + "mdwn", + "mkd", + "mkdn", + "mkdown", + "rmd", + "ron", + "workbook" + ], + "patterns": [ + { + "include": "#blocks" + }, + { + "include": "#inlines" + }, + { + "include": "#flavors" + } + ], + "repository": { + "blocks": { + "patterns": [ + { + "include": "#headings" + }, + { + "include": "#fenced-code-blocks" + }, + { + "include": "#fenced-code" + }, + { + "include": "#comments" + }, + { + "include": "#front-matter" + }, + { + "include": "#hr" + }, + { + "include": "#lists" + }, + { + "include": "#quotes" + }, + { + "include": "#github-blocks" + } + ] + }, + "inlines": { + "patterns": [ + { + "include": "#escapes" + }, + { + "include": "#code" + }, + { + "include": "#links" + }, + { + "include": "#emphasis" + }, + { + "include": "#line-breaks" + }, + { + "include": "#entities" + }, + { + "include": "#github-inlines" + } + ] + }, + "flavors": { + "patterns": [ + { + "include": "#criticmark" + }, + { + "include": "#github-inlines" + } + ] + }, + "inlines-in-blocks": { + "patterns": [ + { + "include": "#escapes" + }, + { + "include": "#code" + }, + { + "include": "#entities" + }, + { + "include": "#links" + }, + { + "include": "#emphasis" + }, + { + "include": "#flavors" + } + ] + }, + "inlines-in-inlines": { + "patterns": [ + { + "include": "#escapes" + }, + { + "include": "#code" + }, + { + "include": "#entities" + }, + { + "include": "#links" + }, + { + "include": "#emphasis" + }, + { + "include": "#github-inlines" + }, + { + "include": "#criticmark" + } + ] + }, + "headings": { + "patterns": [ + { + "begin": "^(#{6})(\\s*)", + "end": "$", + "name": "markup.heading.heading-6.gfm", + "captures": { + "1": { + "name": "markup.heading.marker.gfm" + }, + "2": { + "name": "markup.heading.space.gfm" + } + }, + "patterns": [ + { + "include": "$self" + } + ] + }, + { + "begin": "^(#{5})(\\s*)", + "end": "$", + "name": "markup.heading.heading-5.gfm", + "captures": { + "1": { + "name": "markup.heading.marker.gfm" + }, + "2": { + "name": "markup.heading.space.gfm" + } + }, + "patterns": [ + { + "include": "$self" + } + ] + }, + { + "begin": "^(#{4})(\\s*)", + "end": "$", + "name": "markup.heading.heading-4.gfm", + "captures": { + "1": { + "name": "markup.heading.marker.gfm" + }, + "2": { + "name": "markup.heading.space.gfm" + } + }, + "patterns": [ + { + "include": "$self" + } + ] + }, + { + "begin": "^(#{3})(\\s*)", + "end": "$", + "name": "markup.heading.heading-3.gfm", + "captures": { + "1": { + "name": "markup.heading.marker.gfm" + }, + "2": { + "name": "markup.heading.space.gfm" + } + }, + "patterns": [ + { + "include": "$self" + } + ] + }, + { + "begin": "^(#{2})(\\s*)", + "end": "$", + "name": "markup.heading.heading-2.gfm", + "captures": { + "1": { + "name": "markup.heading.marker.gfm" + }, + "2": { + "name": "markup.heading.space.gfm" + } + }, + "patterns": [ + { + "include": "$self" + } + ] + }, + { + "begin": "^(#{1})(\\s*)", + "end": "$", + "name": "markup.heading.heading-1.gfm", + "captures": { + "1": { + "name": "markup.heading.marker.gfm" + }, + "2": { + "name": "markup.heading.space.gfm" + } + }, + "patterns": [ + { + "include": "$self" + } + ] + } + ] + }, + "comments": { + "patterns": [ + { + "begin": "<!--", + "captures": { + "0": { + "name": "punctuation.definition.comment.gfm" + } + }, + "end": "--\\s*>", + "name": "comment.block.gfm" + } + ] + }, + "fenced-code-blocks": { + "patterns": [ + { + "begin": "^\\s*(`{3,}|~{3,})\\s*(?i:(apib|apiblueprint))\\s*$", + "beginCaptures": { + "0": { + "name": "support.gfm" + } + }, + "end": "^\\s*\\1((?<=`)`+|(?<=~)~+)?\\s*$", + "endCaptures": { + "0": { + "name": "support.gfm" + } + }, + "name": "markup.code.gfm", + "contentName": "text.embedded.html.markdown.source.gfm.apib", + "patterns": [ + { + "include": "text.html.markdown.source.gfm.apib" + } + ] + }, + { + "begin": "^\\s*(`{3,}|~{3,})\\s*(?i:(mson))\\s*$", + "beginCaptures": { + "0": { + "name": "support.gfm" + } + }, + "end": "^\\s*\\1((?<=`)`+|(?<=~)~+)?\\s*$", + "endCaptures": { + "0": { + "name": "support.gfm" + } + }, + "name": "markup.code.gfm", + "contentName": "text.embedded.html.markdown.source.gfm.mson", + "patterns": [ + { + "include": "text.html.markdown.source.gfm.mson" + } + ] + }, + { + "begin": "^\\s*(`{3,}|~{3,})\\s*(?i:(sql))\\s*$", + "beginCaptures": { + "0": { + "name": "support.gfm" + } + }, + "end": "^\\s*\\1((?<=`)`+|(?<=~)~+)?\\s*$", + "endCaptures": { + "0": { + "name": "support.gfm" + } + }, + "name": "markup.code.sql.gfm", + "contentName": "source.embedded.sql", + "patterns": [ + { + "include": "source.sql" + } + ] + }, + { + "begin": "^\\s*(`{3,}|~{3,})\\s*(?i:(graphql))\\s*$", + "beginCaptures": { + "0": { + "name": "support.gfm" + } + }, + "end": "^\\s*\\1((?<=`)`+|(?<=~)~+)?\\s*$", + "endCaptures": { + "0": { + "name": "support.gfm" + } + }, + "name": "markup.code.graphql.gfm", + "contentName": "source.embedded.graphql", + "patterns": [ + { + "include": "source.graphql" + } + ] + }, + { + "begin": "^\\s*(`{3,}|~{3,})\\s*(?i:(clj|clojure))\\s*$", + "beginCaptures": { + "0": { + "name": "support.gfm" + } + }, + "end": "^\\s*\\1((?<=`)`+|(?<=~)~+)?\\s*$", + "endCaptures": { + "0": { + "name": "support.gfm" + } + }, + "name": "markup.code.clojure.gfm", + "contentName": "source.embedded.clojure", + "patterns": [ + { + "include": "source.clojure" + } + ] + }, + { + "begin": "^\\s*(`{3,}|~{3,})\\s*(?i:(coffee-?(script)?|cson))\\s*$", + "beginCaptures": { + "0": { + "name": "support.gfm" + } + }, + "end": "^\\s*\\1((?<=`)`+|(?<=~)~+)?\\s*$", + "endCaptures": { + "0": { + "name": "support.gfm" + } + }, + "name": "markup.code.coffee.gfm", + "contentName": "source.embedded.coffee", + "patterns": [ + { + "include": "source.coffee" + } + ] + }, + { + "begin": "^\\s*(`{3,}|~{3,})\\s*(?i:(javascript|js))\\s*$", + "beginCaptures": { + "0": { + "name": "support.gfm" + } + }, + "end": "^\\s*\\1((?<=`)`+|(?<=~)~+)?\\s*$", + "endCaptures": { + "0": { + "name": "support.gfm" + } + }, + "name": "markup.code.js.gfm", + "contentName": "source.embedded.js", + "patterns": [ + { + "include": "source.js" + } + ] + }, + { + "begin": "^\\s*(`{3,}|~{3,})\\s*(?i:(typescript|ts))\\s*$", + "beginCaptures": { + "0": { + "name": "support.gfm" + } + }, + "end": "^\\s*\\1((?<=`)`+|(?<=~)~+)?\\s*$", + "endCaptures": { + "0": { + "name": "support.gfm" + } + }, + "name": "markup.code.ts.gfm", + "contentName": "source.embedded.ts", + "patterns": [ + { + "include": "source.ts" + } + ] + }, + { + "begin": "^\\s*(`{3,}|~{3,})\\s*(?i:(markdown|md|mdo?wn|mkdn?|mkdown))\\s*$", + "beginCaptures": { + "0": { + "name": "support.gfm" + } + }, + "end": "^\\s*\\1((?<=`)`+|(?<=~)~+)?\\s*$", + "endCaptures": { + "0": { + "name": "support.gfm" + } + }, + "name": "markup.code.gfm", + "contentName": "text.embedded.md", + "patterns": [ + { + "include": "$self" + } + ] + }, + { + "begin": "^\\s*(`{3,}|~{3,})\\s*(?i:(json))\\s*$", + "beginCaptures": { + "0": { + "name": "support.gfm" + } + }, + "end": "^\\s*\\1((?<=`)`+|(?<=~)~+)?\\s*$", + "endCaptures": { + "0": { + "name": "support.gfm" + } + }, + "name": "markup.code.json.gfm", + "contentName": "source.embedded.json", + "patterns": [ + { + "include": "source.json" + } + ] + }, + { + "begin": "^\\s*(`{3,}|~{3,})\\s*(?i:(css))\\s*$", + "beginCaptures": { + "0": { + "name": "support.gfm" + } + }, + "end": "^\\s*\\1((?<=`)`+|(?<=~)~+)?\\s*$", + "endCaptures": { + "0": { + "name": "support.gfm" + } + }, + "name": "markup.code.css.gfm", + "contentName": "source.embedded.css", + "patterns": [ + { + "include": "source.css" + } + ] + }, + { + "begin": "^\\s*(`{3,}|~{3,})\\s*(?i:(less))\\s*$", + "beginCaptures": { + "0": { + "name": "support.gfm" + } + }, + "end": "^\\s*\\1((?<=`)`+|(?<=~)~+)?\\s*$", + "endCaptures": { + "0": { + "name": "support.gfm" + } + }, + "name": "markup.code.less.gfm", + "contentName": "source.embedded.css.less", + "patterns": [ + { + "include": "source.css.less" + } + ] + }, + { + "begin": "^\\s*(`{3,}|~{3,})\\s*(?i:(xml))\\s*$", + "beginCaptures": { + "0": { + "name": "support.gfm" + } + }, + "end": "^\\s*\\1((?<=`)`+|(?<=~)~+)?\\s*$", + "endCaptures": { + "0": { + "name": "support.gfm" + } + }, + "name": "markup.code.xml.gfm", + "contentName": "text.embedded.xml", + "patterns": [ + { + "include": "text.xml" + } + ] + }, + { + "begin": "^\\s*(`{3,}|~{3,})\\s*(?i:(ruby|rb))\\s*$", + "beginCaptures": { + "0": { + "name": "support.gfm" + } + }, + "end": "^\\s*\\1((?<=`)`+|(?<=~)~+)?\\s*$", + "endCaptures": { + "0": { + "name": "support.gfm" + } + }, + "name": "markup.code.ruby.gfm", + "contentName": "source.embedded.ruby", + "patterns": [ + { + "include": "source.ruby" + } + ] + }, + { + "begin": "^\\s*(`{3,}|~{3,})\\s*(?i:(rust|rs))\\s*$", + "beginCaptures": { + "0": { + "name": "support.gfm" + } + }, + "end": "^\\s*\\1((?<=`)`+|(?<=~)~+)?\\s*$", + "endCaptures": { + "0": { + "name": "support.gfm" + } + }, + "name": "markup.code.rust.gfm", + "contentName": "source.embedded.rust", + "patterns": [ + { + "include": "source.rust" + } + ] + }, + { + "begin": "^\\s*(`{3,}|~{3,})\\s*(?i:(java))\\s*$", + "beginCaptures": { + "0": { + "name": "support.gfm" + } + }, + "end": "^\\s*\\1((?<=`)`+|(?<=~)~+)?\\s*$", + "endCaptures": { + "0": { + "name": "support.gfm" + } + }, + "name": "markup.code.java.gfm", + "contentName": "source.embedded.java", + "patterns": [ + { + "include": "source.java" + } + ] + }, + { + "begin": "^\\s*(`{3,}|~{3,})\\s*(?i:(kotlin))\\s*$", + "beginCaptures": { + "0": { + "name": "support.gfm" + } + }, + "end": "^\\s*\\1((?<=`)`+|(?<=~)~+)?\\s*$", + "endCaptures": { + "0": { + "name": "support.gfm" + } + }, + "name": "markup.code.kotlin.gfm", + "contentName": "source.embedded.kotlin", + "patterns": [ + { + "include": "source.kotlin" + } + ] + }, + { + "begin": "^\\s*(`{3,}|~{3,})\\s*(?i:(scala|sbt))\\s*$", + "beginCaptures": { + "0": { + "name": "support.gfm" + } + }, + "end": "^\\s*\\1((?<=`)`+|(?<=~)~+)?\\s*$", + "endCaptures": { + "0": { + "name": "support.gfm" + } + }, + "name": "markup.code.scala.gfm", + "contentName": "source.embedded.scala", + "patterns": [ + { + "include": "source.scala" + } + ] + }, + { + "begin": "^\\s*(`{3,}|~{3,})\\s*(?i:(erlang))\\s*$", + "beginCaptures": { + "0": { + "name": "support.gfm" + } + }, + "end": "^\\s*\\1((?<=`)`+|(?<=~)~+)?\\s*$", + "endCaptures": { + "0": { + "name": "support.gfm" + } + }, + "name": "markup.code.erlang.gfm", + "contentName": "source.embedded.erlang", + "patterns": [ + { + "include": "source.erlang" + } + ] + }, + { + "begin": "^\\s*(`{3,}|~{3,})\\s*(?i:(go(lang)?))\\s*$", + "beginCaptures": { + "0": { + "name": "support.gfm" + } + }, + "end": "^\\s*\\1((?<=`)`+|(?<=~)~+)?\\s*$", + "endCaptures": { + "0": { + "name": "support.gfm" + } + }, + "name": "markup.code.go.gfm", + "contentName": "source.embedded.go", + "patterns": [ + { + "include": "source.go" + } + ] + }, + { + "begin": "^\\s*(`{3,}|~{3,})\\s*(?i:(cs(harp)?))\\s*$", + "beginCaptures": { + "0": { + "name": "support.gfm" + } + }, + "end": "^\\s*\\1((?<=`)`+|(?<=~)~+)?\\s*$", + "endCaptures": { + "0": { + "name": "support.gfm" + } + }, + "name": "markup.code.cs.gfm", + "contentName": "source.embedded.cs", + "patterns": [ + { + "include": "source.cs" + } + ] + }, + { + "begin": "^\\s*(`{3,}|~{3,})\\s*(?i:(php))\\s*$", + "beginCaptures": { + "0": { + "name": "support.gfm" + } + }, + "end": "^\\s*\\1((?<=`)`+|(?<=~)~+)?\\s*$", + "endCaptures": { + "0": { + "name": "support.gfm" + } + }, + "name": "markup.code.php.gfm", + "contentName": "source.embedded.php", + "patterns": [ + { + "include": "source.php" + } + ] + }, + { + "begin": "^\\s*(`{3,}|~{3,})\\s*(?i:(sh|bash|shell))\\s*$", + "beginCaptures": { + "0": { + "name": "support.gfm" + } + }, + "end": "^\\s*\\1((?<=`)`+|(?<=~)~+)?\\s*$", + "endCaptures": { + "0": { + "name": "support.gfm" + } + }, + "name": "markup.code.shell.gfm", + "contentName": "source.embedded.shell", + "patterns": [ + { + "include": "source.shell" + } + ] + }, + { + "begin": "^\\s*([`~]{3,})\\s*(?i:(properties))\\s*$", + "beginCaptures": { + "0": { + "name": "support.gfm" + } + }, + "end": "^\\s*\\1\\s*$", + "endCaptures": { + "0": { + "name": "support.gfm" + } + }, + "name": "markup.code.git-config.gfm", + "contentName": "source.embedded.git-config", + "patterns": [ + { + "include": "source.git-config" + } + ] + }, + { + "begin": "^\\s*(`{3,}|~{3,})\\s*(?i:(shellsession|console))\\s*$", + "beginCaptures": { + "0": { + "name": "support.gfm" + } + }, + "end": "^\\s*\\1((?<=`)`+|(?<=~)~+)?\\s*$", + "endCaptures": { + "0": { + "name": "support.gfm" + } + }, + "name": "markup.code.shell-session.gfm", + "contentName": "text.embedded.shell-session", + "patterns": [ + { + "include": "text.shell-session" + } + ] + }, + { + "begin": "^\\s*(`{3,}|~{3,})\\s*(?i:(py(thon)?))\\s*$", + "beginCaptures": { + "0": { + "name": "support.gfm" + } + }, + "end": "^\\s*\\1((?<=`)`+|(?<=~)~+)?\\s*$", + "endCaptures": { + "0": { + "name": "support.gfm" + } + }, + "name": "markup.code.python.gfm", + "contentName": "source.embedded.python", + "patterns": [ + { + "include": "source.python" + } + ] + }, + { + "begin": "^\\s*(`{3,}|~{3,})\\s*(?i:(pycon))\\s*$", + "beginCaptures": { + "0": { + "name": "support.gfm" + } + }, + "end": "^\\s*\\1((?<=`)`+|(?<=~)~+)?\\s*$", + "endCaptures": { + "0": { + "name": "support.gfm" + } + }, + "name": "markup.code.python.console.gfm", + "contentName": "source.embedded.python.console", + "patterns": [ + { + "include": "text.python.console" + } + ] + }, + { + "begin": "^\\s*(`{3,}|~{3,})\\s*(?i:(c))\\s*$", + "beginCaptures": { + "0": { + "name": "support.gfm" + } + }, + "end": "^\\s*\\1((?<=`)`+|(?<=~)~+)?\\s*$", + "endCaptures": { + "0": { + "name": "support.gfm" + } + }, + "name": "markup.code.c.gfm", + "contentName": "source.embedded.c", + "patterns": [ + { + "include": "source.c" + } + ] + }, + { + "begin": "^\\s*(`{3,}|~{3,})\\s*(?i:(c(pp|\\+\\+)))\\s*$", + "beginCaptures": { + "0": { + "name": "support.gfm" + } + }, + "end": "^\\s*\\1((?<=`)`+|(?<=~)~+)?\\s*$", + "endCaptures": { + "0": { + "name": "support.gfm" + } + }, + "name": "markup.code.cpp.gfm", + "contentName": "source.embedded.cpp", + "patterns": [ + { + "include": "source.cpp" + } + ] + }, + { + "begin": "^\\s*(`{3,}|~{3,})\\s*(?i:(objc|objective-c))\\s*$", + "beginCaptures": { + "0": { + "name": "support.gfm" + } + }, + "end": "^\\s*\\1((?<=`)`+|(?<=~)~+)?\\s*$", + "endCaptures": { + "0": { + "name": "support.gfm" + } + }, + "name": "markup.code.objc.gfm", + "contentName": "source.embedded.objc", + "patterns": [ + { + "include": "source.objc" + } + ] + }, + { + "begin": "^\\s*(`{3,}|~{3,})\\s*(?i:(adoc|asciidoc|asciidoctor|asc))\\s*$", + "beginCaptures": { + "0": { + "name": "support.gfm" + } + }, + "end": "^\\s*\\1((?<=`)`+|(?<=~)~+)?\\s*$", + "endCaptures": { + "0": { + "name": "support.gfm" + } + }, + "name": "markup.code.asciidoc.gfm", + "contentName": "source.embedded.asciidoc", + "patterns": [ + { + "include": "source.asciidoc" + } + ] + }, + { + "begin": "^\\s*(`{3,}|~{3,})\\s*(?i:(swift))\\s*$", + "beginCaptures": { + "0": { + "name": "support.gfm" + } + }, + "end": "^\\s*\\1((?<=`)`+|(?<=~)~+)?\\s*$", + "endCaptures": { + "0": { + "name": "support.gfm" + } + }, + "name": "markup.code.swift.gfm", + "contentName": "source.embedded.swift", + "patterns": [ + { + "include": "source.swift" + } + ] + }, + { + "begin": "^\\s*(`{3,}|~{3,})\\s*(?i:(dockerfile|docker))\\s*$", + "beginCaptures": { + "0": { + "name": "support.gfm" + } + }, + "end": "^\\s*\\1((?<=`)`+|(?<=~)~+)?\\s*$", + "endCaptures": { + "0": { + "name": "support.gfm" + } + }, + "name": "markup.code.dockerfile.gfm", + "contentName": "source.embedded.dockerfile", + "patterns": [ + { + "include": "source.dockerfile" + } + ] + }, + { + "begin": "^\\s*(`{3,}|~{3,})\\s*(?i:(makefile|make))\\s*$", + "beginCaptures": { + "0": { + "name": "support.gfm" + } + }, + "end": "^\\s*\\1((?<=`)`+|(?<=~)~+)?\\s*$", + "endCaptures": { + "0": { + "name": "support.gfm" + } + }, + "name": "markup.code.makefile.gfm", + "contentName": "source.embedded.makefile", + "patterns": [ + { + "include": "source.makefile" + } + ] + }, + { + "begin": "^\\s*(`{3,}|~{3,})\\s*(?i:(perl))\\s*$", + "beginCaptures": { + "0": { + "name": "support.gfm" + } + }, + "end": "^\\s*\\1((?<=`)`+|(?<=~)~+)?\\s*$", + "endCaptures": { + "0": { + "name": "support.gfm" + } + }, + "name": "markup.code.perl.gfm", + "contentName": "source.embedded.perl", + "patterns": [ + { + "include": "source.perl" + } + ] + }, + { + "begin": "^\\s*(`{3,}|~{3,})\\s*(?i:(perl6))\\s*$", + "beginCaptures": { + "0": { + "name": "support.gfm" + } + }, + "end": "^\\s*\\1((?<=`)`+|(?<=~)~+)?\\s*$", + "endCaptures": { + "0": { + "name": "support.gfm" + } + }, + "name": "markup.code.perl6.gfm", + "contentName": "source.embedded.perl6", + "patterns": [ + { + "include": "source.perl6" + } + ] + }, + { + "begin": "^\\s*(`{3,}|~{3,})\\s*(?i:(toml))\\s*$", + "beginCaptures": { + "0": { + "name": "support.gfm" + } + }, + "end": "^\\s*\\1((?<=`)`+|(?<=~)~+)?\\s*$", + "endCaptures": { + "0": { + "name": "support.gfm" + } + }, + "name": "markup.code.toml.gfm", + "contentName": "source.embedded.toml", + "patterns": [ + { + "include": "source.toml" + } + ] + }, + { + "begin": "^\\s*(`{3,}|~{3,})\\s*(?i:(html))\\s*$", + "beginCaptures": { + "0": { + "name": "support.gfm" + } + }, + "end": "^\\s*\\1((?<=`)`+|(?<=~)~+)?\\s*$", + "endCaptures": { + "0": { + "name": "support.gfm" + } + }, + "name": "markup.code.html.gfm", + "contentName": "text.embedded.html.basic", + "patterns": [ + { + "include": "text.html.basic" + } + ] + }, + { + "begin": "^\\s*(`{3,}|~{3,})\\s*(?i:(ya?ml))\\s*$", + "beginCaptures": { + "0": { + "name": "support.gfm" + } + }, + "end": "^\\s*\\1((?<=`)`+|(?<=~)~+)?\\s*$", + "endCaptures": { + "0": { + "name": "support.gfm" + } + }, + "name": "markup.code.yaml.gfm", + "contentName": "source.embedded.yaml", + "patterns": [ + { + "include": "source.yaml" + } + ] + }, + { + "begin": "^\\s*(`{3,}|~{3,})\\s*(?i:(elixir))\\s*$", + "beginCaptures": { + "0": { + "name": "support.gfm" + } + }, + "end": "^\\s*\\1((?<=`)`+|(?<=~)~+)?\\s*$", + "endCaptures": { + "0": { + "name": "support.gfm" + } + }, + "name": "markup.code.elixir.gfm", + "contentName": "source.embedded.elixir", + "patterns": [ + { + "include": "source.elixir" + } + ] + }, + { + "begin": "^\\s*(`{3,}|~{3,})\\s*(?i:(diff|patch|rej))\\s*$", + "beginCaptures": { + "0": { + "name": "support.gfm" + } + }, + "end": "^\\s*\\1((?<=`)`+|(?<=~)~+)?\\s*$", + "endCaptures": { + "0": { + "name": "support.gfm" + } + }, + "name": "markup.code.diff.gfm", + "contentName": "source.embedded.diff", + "patterns": [ + { + "include": "source.diff" + } + ] + }, + { + "begin": "^\\s*(`{3,}|~{3,})\\s*(?i:(julia|jl))\\s*$", + "beginCaptures": { + "0": { + "name": "support.gfm" + } + }, + "end": "^\\s*\\1((?<=`)`+|(?<=~)~+)?\\s*$", + "endCaptures": { + "0": { + "name": "support.gfm" + } + }, + "name": "markup.code.julia.gfm", + "contentName": "source.embedded.julia", + "patterns": [ + { + "include": "source.julia" + } + ] + }, + { + "begin": "^\\s*(`{3,}|~{3,})\\s*([\\{]{0,1})(?i:(r))([^\\}]*)([\\}]{0,1})\\s*$", + "beginCaptures": { + "0": { + "name": "support.gfm" + } + }, + "end": "^\\s*\\1((?<=`)`+|(?<=~)~+)?\\s*$", + "endCaptures": { + "0": { + "name": "support.gfm" + } + }, + "name": "markup.code.r.gfm", + "contentName": "source.embedded.r", + "patterns": [ + { + "include": "source.r" + } + ] + }, + { + "begin": "^\\s*(`{3,}|~{3,})\\s*(?i:(haskell))\\s*$", + "beginCaptures": { + "0": { + "name": "support.gfm" + } + }, + "end": "^\\s*\\1((?<=`)`+|(?<=~)~+)?\\s*$", + "endCaptures": { + "0": { + "name": "support.gfm" + } + }, + "name": "markup.code.haskell.gfm", + "contentName": "source.embedded.haskell", + "patterns": [ + { + "include": "source.haskell" + } + ] + }, + { + "begin": "^\\s*(`{3,}|~{3,})\\s*(?i:(elm))\\s*$", + "beginCaptures": { + "0": { + "name": "support.gfm" + } + }, + "end": "^\\s*\\1((?<=`)`+|(?<=~)~+)?\\s*$", + "endCaptures": { + "0": { + "name": "support.gfm" + } + }, + "name": "markup.code.elm.gfm", + "contentName": "source.embedded.elm", + "patterns": [ + { + "include": "source.elm" + } + ] + } + ] + }, + "fenced-code": { + "patterns": [ + { + "begin": "^\\s*(`{3,}|~{3,})\\s*([-\\w]+)\\s*$", + "beginCaptures": { + "0": { + "name": "support.gfm" + } + }, + "end": "^\\s*\\1((?<=`)`+|(?<=~)~+)?\\s*$", + "endCaptures": { + "0": { + "name": "support.gfm" + } + }, + "name": "markup.code.other.gfm", + "contentName": "source.embedded.${2:/downcase}" + }, + { + "begin": "^\\s*(`{3,}|~{3,}).*$", + "beginCaptures": { + "0": { + "name": "support.gfm" + } + }, + "end": "^\\s*\\1((?<=`)`+|(?<=~)~+)?\\s*$", + "endCaptures": { + "0": { + "name": "support.gfm" + } + }, + "name": "markup.raw.gfm" + } + ] + }, + "front-matter": { + "patterns": [ + { + "begin": "\\A---$", + "end": "^(---|\\.\\.\\.)$", + "captures": { + "0": { + "name": "comment.hr.gfm" + } + }, + "name": "front-matter.yaml.gfm", + "patterns": [ + { + "include": "source.yaml" + } + ] + } + ] + }, + "hr": { + "patterns": [ + { + "match": "^\\s*[*]{3,}\\s*$", + "name": "comment.hr.gfm" + }, + { + "match": "^\\s*[-]{3,}\\s*$", + "name": "comment.hr.gfm" + }, + { + "match": "^\\s*[_]{3,}\\s*$", + "name": "comment.hr.gfm" + } + ] + }, + "lists": { + "patterns": [ + { + "match": "^\\s*([*+-])[ \\t]+", + "captures": { + "1": { + "name": "variable.unordered.list.gfm" + } + } + }, + { + "match": "^\\s*(\\d+\\.)[ \\t]+", + "captures": { + "1": { + "name": "variable.ordered.list.gfm" + } + } + } + ] + }, + "quotes": { + "patterns": [ + { + "begin": "^\\s*(>)", + "end": "^\\s*?$", + "beginCaptures": { + "1": { + "name": "support.quote.gfm" + } + }, + "name": "comment.quote.gfm", + "patterns": [ + { + "include": "#blocks" + } + ] + } + ] + }, + "github-blocks": { + "patterns": [ + { + "begin": "^\\|", + "end": "(\\|)?\\s*$", + "beginCaptures": { + "0": { + "name": "border.pipe.outer" + } + }, + "endCaptures": { + "1": { + "name": "border.pipe.outer" + } + }, + "name": "table.gfm", + "patterns": [ + { + "match": "(:?)(-+)(:?)", + "captures": { + "1": { + "name": "border.alignment" + }, + "2": { + "name": "border.header" + }, + "3": { + "name": "border.alignment" + } + } + }, + { + "match": "\\|", + "name": "border.pipe.inner" + } + ] + } + ] + }, + "github-inlines": { + "patterns": [ + { + "match": "(:)(\\+1|\\-1|100|1234|8ball|a|ab|abc|abcd|accept|aerial_tramway|airplane|alarm_clock|alien|ambulance|anchor|angel|anger|angry|anguished|ant|apple|aquarius|aries|arrow_backward|arrow_double_down|arrow_double_up|arrow_down|arrow_down_small|arrow_forward|arrow_heading_down|arrow_heading_up|arrow_left|arrow_lower_left|arrow_lower_right|arrow_right|arrow_right_hook|arrow_up|arrow_up_down|arrow_up_small|arrow_upper_left|arrow_upper_right|arrows_clockwise|arrows_counterclockwise|art|articulated_lorry|astonished|atm|b|baby|baby_bottle|baby_chick|baby_symbol|back|baggage_claim|balloon|ballot_box_with_check|bamboo|banana|bangbang|bank|bar_chart|barber|baseball|basketball|bath|bathtub|battery|bear|bee|beer|beers|beetle|beginner|bell|bento|bicyclist|bike|bikini|bird|birthday|black_circle|black_joker|black_medium_small_square|black_medium_square|black_nib|black_small_square|black_square|black_square_button|blossom|blowfish|blue_book|blue_car|blue_heart|blush|boar|boat|bomb|book|bookmark|bookmark_tabs|books|boom|boot|bouquet|bow|bowling|bowtie|boy|bread|bride_with_veil|bridge_at_night|briefcase|broken_heart|bug|bulb|bullettrain_front|bullettrain_side|bus|busstop|bust_in_silhouette|busts_in_silhouette|cactus|cake|calendar|calling|camel|camera|cancer|candy|capital_abcd|capricorn|car|card_index|carousel_horse|cat|cat2|cd|chart|chart_with_downwards_trend|chart_with_upwards_trend|checkered_flag|cherries|cherry_blossom|chestnut|chicken|children_crossing|chocolate_bar|christmas_tree|church|cinema|circus_tent|city_sunrise|city_sunset|cl|clap|clapper|clipboard|clock1|clock10|clock1030|clock11|clock1130|clock12|clock1230|clock130|clock2|clock230|clock3|clock330|clock4|clock430|clock5|clock530|clock6|clock630|clock7|clock730|clock8|clock830|clock9|clock930|closed_book|closed_lock_with_key|closed_umbrella|cloud|clubs|cn|cocktail|coffee|cold_sweat|collision|computer|confetti_ball|confounded|confused|congratulations|construction|construction_worker|convenience_store|cookie|cool|cop|copyright|corn|couple|couple_with_heart|couplekiss|cow|cow2|credit_card|crocodile|crossed_flags|crown|cry|crying_cat_face|crystal_ball|cupid|curly_loop|currency_exchange|curry|custard|customs|cyclone|dancer|dancers|dango|dart|dash|date|de|deciduous_tree|department_store|diamond_shape_with_a_dot_inside|diamonds|disappointed|disappointed_relieved|dizzy|dizzy_face|do_not_litter|dog|dog2|dollar|dolls|dolphin|donut|door|doughnut|dragon|dragon_face|dress|dromedary_camel|droplet|dvd|e\\-mail|ear|ear_of_rice|earth_africa|earth_americas|earth_asia|egg|eggplant|eight|eight_pointed_black_star|eight_spoked_asterisk|electric_plug|elephant|email|end|envelope|es|euro|european_castle|european_post_office|evergreen_tree|exclamation|expressionless|eyeglasses|eyes|facepunch|factory|fallen_leaf|family|fast_forward|fax|fearful|feelsgood|feet|ferris_wheel|file_folder|finnadie|fire|fire_engine|fireworks|first_quarter_moon|first_quarter_moon_with_face|fish|fish_cake|fishing_pole_and_fish|fist|five|flags|flashlight|floppy_disk|flower_playing_cards|flushed|foggy|football|fork_and_knife|fountain|four|four_leaf_clover|fr|free|fried_shrimp|fries|frog|frowning|fu|fuelpump|full_moon|full_moon_with_face|game_die|gb|gem|gemini|ghost|gift|gift_heart|girl|globe_with_meridians|goat|goberserk|godmode|golf|grapes|green_apple|green_book|green_heart|grey_exclamation|grey_question|grimacing|grin|grinning|guardsman|guitar|gun|haircut|hamburger|hammer|hamster|hand|handbag|hankey|hash|hatched_chick|hatching_chick|headphones|hear_no_evil|heart|heart_decoration|heart_eyes|heart_eyes_cat|heartbeat|heartpulse|hearts|heavy_check_mark|heavy_division_sign|heavy_dollar_sign|heavy_exclamation_mark|heavy_minus_sign|heavy_multiplication_x|heavy_plus_sign|helicopter|herb|hibiscus|high_brightness|high_heel|hocho|honey_pot|honeybee|horse|horse_racing|hospital|hotel|hotsprings|hourglass|hourglass_flowing_sand|house|house_with_garden|hurtrealbad|hushed|ice_cream|icecream|id|ideograph_advantage|imp|inbox_tray|incoming_envelope|information_desk_person|information_source|innocent|interrobang|iphone|it|izakaya_lantern|jack_o_lantern|japan|japanese_castle|japanese_goblin|japanese_ogre|jeans|joy|joy_cat|jp|key|keycap_ten|kimono|kiss|kissing|kissing_cat|kissing_closed_eyes|kissing_face|kissing_heart|kissing_smiling_eyes|koala|koko|kr|large_blue_circle|large_blue_diamond|large_orange_diamond|last_quarter_moon|last_quarter_moon_with_face|laughing|leaves|ledger|left_luggage|left_right_arrow|leftwards_arrow_with_hook|lemon|leo|leopard|libra|light_rail|link|lips|lipstick|lock|lock_with_ink_pen|lollipop|loop|loudspeaker|love_hotel|love_letter|low_brightness|m|mag|mag_right|mahjong|mailbox|mailbox_closed|mailbox_with_mail|mailbox_with_no_mail|man|man_with_gua_pi_mao|man_with_turban|mans_shoe|maple_leaf|mask|massage|meat_on_bone|mega|melon|memo|mens|metal|metro|microphone|microscope|milky_way|minibus|minidisc|mobile_phone_off|money_with_wings|moneybag|monkey|monkey_face|monorail|moon|mortar_board|mount_fuji|mountain_bicyclist|mountain_cableway|mountain_railway|mouse|mouse2|movie_camera|moyai|muscle|mushroom|musical_keyboard|musical_note|musical_score|mute|nail_care|name_badge|neckbeard|necktie|negative_squared_cross_mark|neutral_face|new|new_moon|new_moon_with_face|newspaper|ng|nine|no_bell|no_bicycles|no_entry|no_entry_sign|no_good|no_mobile_phones|no_mouth|no_pedestrians|no_smoking|non\\-potable_water|nose|notebook|notebook_with_decorative_cover|notes|nut_and_bolt|o|o2|ocean|octocat|octopus|oden|office|ok|ok_hand|ok_woman|older_man|older_woman|on|oncoming_automobile|oncoming_bus|oncoming_police_car|oncoming_taxi|one|open_file_folder|open_hands|open_mouth|ophiuchus|orange_book|outbox_tray|ox|package|page_facing_up|page_with_curl|pager|palm_tree|panda_face|paperclip|parking|part_alternation_mark|partly_sunny|passport_control|paw_prints|peach|pear|pencil|pencil2|penguin|pensive|performing_arts|persevere|person_frowning|person_with_blond_hair|person_with_pouting_face|phone|pig|pig2|pig_nose|pill|pineapple|pisces|pizza|plus1|point_down|point_left|point_right|point_up|point_up_2|police_car|poodle|poop|post_office|postal_horn|postbox|potable_water|pouch|poultry_leg|pound|pouting_cat|pray|princess|punch|purple_heart|purse|pushpin|put_litter_in_its_place|question|rabbit|rabbit2|racehorse|radio|radio_button|rage|rage1|rage2|rage3|rage4|railway_car|rainbow|raised_hand|raised_hands|raising_hand|ram|ramen|rat|recycle|red_car|red_circle|registered|relaxed|relieved|repeat|repeat_one|restroom|revolving_hearts|rewind|ribbon|rice|rice_ball|rice_cracker|rice_scene|ring|rocket|roller_coaster|rooster|rose|rotating_light|round_pushpin|rowboat|ru|rugby_football|runner|running|running_shirt_with_sash|sa|sagittarius|sailboat|sake|sandal|santa|satellite|satisfied|saxophone|school|school_satchel|scissors|scorpius|scream|scream_cat|scroll|seat|secret|see_no_evil|seedling|seven|shaved_ice|sheep|shell|ship|shipit|shirt|shit|shoe|shower|signal_strength|six|six_pointed_star|ski|skull|sleeping|sleepy|slot_machine|small_blue_diamond|small_orange_diamond|small_red_triangle|small_red_triangle_down|smile|smile_cat|smiley|smiley_cat|smiling_imp|smirk|smirk_cat|smoking|snail|snake|snowboarder|snowflake|snowman|sob|soccer|soon|sos|sound|space_invader|spades|spaghetti|sparkle|sparkler|sparkles|sparkling_heart|speak_no_evil|speaker|speech_balloon|speedboat|squirrel|star|star2|stars|station|statue_of_liberty|steam_locomotive|stew|straight_ruler|strawberry|stuck_out_tongue|stuck_out_tongue_closed_eyes|stuck_out_tongue_winking_eye|sun_with_face|sunflower|sunglasses|sunny|sunrise|sunrise_over_mountains|surfer|sushi|suspect|suspension_railway|sweat|sweat_drops|sweat_smile|sweet_potato|swimmer|symbols|syringe|tada|tanabata_tree|tangerine|taurus|taxi|tea|telephone|telephone_receiver|telescope|tennis|tent|thought_balloon|three|thumbsdown|thumbsup|ticket|tiger|tiger2|tired_face|tm|toilet|tokyo_tower|tomato|tongue|top|tophat|tractor|traffic_light|train|train2|tram|triangular_flag_on_post|triangular_ruler|trident|triumph|trolleybus|trollface|trophy|tropical_drink|tropical_fish|truck|trumpet|tshirt|tulip|turtle|tv|twisted_rightwards_arrows|two|two_hearts|two_men_holding_hands|two_women_holding_hands|u5272|u5408|u55b6|u6307|u6708|u6709|u6e80|u7121|u7533|u7981|u7a7a|uk|umbrella|unamused|underage|unlock|up|us|v|vertical_traffic_light|vhs|vibration_mode|video_camera|video_game|violin|virgo|volcano|vs|walking|waning_crescent_moon|waning_gibbous_moon|warning|watch|water_buffalo|watermelon|wave|wavy_dash|waxing_crescent_moon|waxing_gibbous_moon|wc|weary|wedding|whale|whale2|wheelchair|white_check_mark|white_circle|white_flower|white_large_square|white_medium_small_square|white_medium_square|white_small_square|white_square_button|wind_chime|wine_glass|wink|wolf|woman|womans_clothes|womans_hat|womens|worried|wrench|x|yellow_heart|yen|yum|zap|zero|zzz)(:)", + "name": "string.emoji.gfm", + "captures": { + "1": { + "name": "string.emoji.start.gfm" + }, + "2": { + "name": "string.emoji.word.gfm" + }, + "3": { + "name": "string.emoji.end.gfm" + } + } + }, + { + "match": "(?<=^|\\s|\"|'|\\(|\\[)(#)(\\d+)(?=[\\s\"'\\.,;\\)\\]])", + "captures": { + "1": { + "name": "variable.issue.tag.gfm" + }, + "2": { + "name": "string.issue.number.gfm" + } + } + }, + { + "match": "(?<=^|\\s|\"|'|\\(|\\[)(@)(\\w[-\\w:]*)(?=[\\s\"'.,;\\)\\]])", + "captures": { + "1": { + "name": "variable.mention.gfm" + }, + "2": { + "name": "string.username.gfm" + } + } + }, + { + "begin": "(?<=^|[^\\w\\d~])~~(?!$|~|\\s)", + "end": "(?<!^|\\s)~~*~(?=$|[^\\w|\\d])", + "name": "markup.strike.gfm", + "patterns": [ + { + "match": "(&)([a-zA-Z0-9]+|#[0-9]+|#x[0-9a-fA-F]+)(;)", + "name": "constant.character.entity.gfm", + "captures": { + "1": { + "name": "punctuation.definition.entity.gfm" + }, + "3": { + "name": "punctuation.definition.entity.gfm" + } + } + } + ] + } + ] + }, + "escapes": { + "patterns": [ + { + "match": "\\\\.", + "name": "constant.character.escape.gfm" + } + ] + }, + "code": { + "patterns": [ + { + "begin": "(`+)(?!$)", + "end": "\\1", + "name": "markup.raw.gfm" + } + ] + }, + "links": { + "patterns": [ + { + "match": "(\\[!)(\\[)([^\\]]*)(\\])(\\()([^\\)]+)(\\))(\\])((\\()([^\\)]+)(\\))|(\\[)([^\\]]+)(\\]))", + "name": "link", + "captures": { + "1": { + "name": "punctuation.definition.begin.gfm" + }, + "2": { + "name": "punctuation.definition.begin.gfm" + }, + "3": { + "name": "entity.gfm" + }, + "4": { + "name": "punctuation.definition.end.gfm" + }, + "5": { + "name": "punctuation.definition.begin.gfm" + }, + "6": { + "name": "markup.underline.link.gfm" + }, + "7": { + "name": "punctuation.definition.end.gfm" + }, + "8": { + "name": "punctuation.definition.end.gfm" + }, + "10": { + "name": "punctuation.definition.begin.gfm" + }, + "11": { + "name": "markup.underline.link.gfm" + }, + "12": { + "name": "punctuation.definition.end.gfm" + }, + "13": { + "name": "punctuation.definition.begin.gfm" + }, + "14": { + "name": "markup.underline.link.gfm" + }, + "15": { + "name": "punctuation.definition.end.gfm" + } + } + }, + { + "match": "(\\[!)(\\[)([^\\]]*)(\\])(\\[)([^\\)]+)(\\])(\\])((\\()([^\\)]+)(\\))|(\\[)([^\\]]+)(\\]))", + "name": "link", + "captures": { + "1": { + "name": "punctuation.definition.begin.gfm" + }, + "2": { + "name": "punctuation.definition.begin.gfm" + }, + "3": { + "name": "entity.gfm" + }, + "4": { + "name": "punctuation.definition.end.gfm" + }, + "5": { + "name": "punctuation.definition.begin.gfm" + }, + "6": { + "name": "markup.underline.link.gfm" + }, + "7": { + "name": "punctuation.definition.end.gfm" + }, + "8": { + "name": "punctuation.definition.end.gfm" + }, + "10": { + "name": "punctuation.definition.begin.gfm" + }, + "11": { + "name": "markup.underline.link.gfm" + }, + "12": { + "name": "punctuation.definition.end.gfm" + }, + "13": { + "name": "punctuation.definition.begin.gfm" + }, + "14": { + "name": "markup.underline.link.gfm" + }, + "15": { + "name": "punctuation.definition.end.gfm" + } + } + }, + { + "match": "!?(\\[)([^\\]]*)(\\])(\\()([^\\)]+)(\\))", + "name": "link", + "captures": { + "1": { + "name": "punctuation.definition.begin.gfm" + }, + "2": { + "name": "entity.gfm" + }, + "3": { + "name": "punctuation.definition.end.gfm" + }, + "4": { + "name": "punctuation.definition.begin.gfm" + }, + "5": { + "name": "markup.underline.link.gfm" + }, + "6": { + "name": "punctuation.definition.end.gfm" + } + } + }, + { + "match": "!?(\\[)([^\\]]*)(\\])(\\[)([^\\]]*)(\\])", + "name": "link", + "captures": { + "1": { + "name": "punctuation.definition.begin.gfm" + }, + "2": { + "name": "entity.gfm" + }, + "3": { + "name": "punctuation.definition.end.gfm" + }, + "4": { + "name": "punctuation.definition.begin.gfm" + }, + "5": { + "name": "markup.underline.link.gfm" + }, + "6": { + "name": "punctuation.definition.end.gfm" + } + } + }, + { + "match": "^\\s*(\\[)([^\\]]+)(\\])\\s*:\\s*<([^>]+)>", + "name": "link", + "captures": { + "1": { + "name": "punctuation.definition.begin.gfm" + }, + "2": { + "name": "entity.gfm" + }, + "3": { + "name": "punctuation.definition.end.gfm" + }, + "4": { + "name": "markup.underline.link.gfm" + } + } + }, + { + "match": "^\\s*(\\[)([^\\]]+)(\\])\\s*(:)\\s*(\\S+)", + "name": "link", + "captures": { + "1": { + "name": "punctuation.definition.begin.gfm" + }, + "2": { + "name": "entity.gfm" + }, + "3": { + "name": "punctuation.definition.end.gfm" + }, + "4": { + "name": "punctuation.separator.key-value.gfm" + }, + "5": { + "name": "markup.underline.link.gfm" + } + } + } + ] + }, + "emphasis": { + "patterns": [ + { + "begin": "(?<=^|[^\\w\\d\\*])\\*\\*\\*(?!$|\\*|\\s)", + "end": "(?<!^|\\s)\\*\\*\\**\\*(?=$|[^\\w|\\d])", + "name": "markup.bold.italic.gfm", + "patterns": [ + { + "match": "(&)([a-zA-Z0-9]+|#[0-9]+|#x[0-9a-fA-F]+)(;)", + "name": "constant.character.entity.gfm", + "captures": { + "1": { + "name": "punctuation.definition.entity.gfm" + }, + "2": { + "name": "punctuation.definition.entity.gfm" + } + } + } + ] + }, + { + "begin": "(?<=^|[^\\w\\d_])___(?!$|_|\\s)", + "end": "(?<!^|\\s)___*_(?=$|[^\\w|\\d])", + "name": "markup.bold.italic.gfm", + "patterns": [ + { + "match": "(&)([a-zA-Z0-9]+|#[0-9]+|#x[0-9a-fA-F]+)(;)", + "name": "constant.character.entity.gfm", + "captures": { + "1": { + "name": "punctuation.definition.entity.gfm" + }, + "2": { + "name": "punctuation.definition.entity.gfm" + } + } + } + ] + }, + { + "match": "(?<![\\w|\\\\])([_]{2})(?!\\s)(?m:(.+?))(?<![\\s|\\\\])(\\1)(?!\\w)", + "name": "markup.bold.gfm", + "captures": { + "1": { + "name": "punctuation.definition.entity.gfm" + }, + "2": { + "patterns": [ + { + "include": "#inlines-in-inlines" + } + ] + }, + "3": { + "name": "punctuation.definition.entity.gfm" + } + } + }, + { + "match": "(?<![\\w|\\\\])([\\*]{2})(?!\\s)(?m:(.+?))(?<![\\s|\\\\])(\\1)(?!\\w)", + "name": "markup.bold.gfm", + "captures": { + "1": { + "name": "punctuation.definition.entity.gfm" + }, + "2": { + "patterns": [ + { + "include": "#inlines-in-inlines" + } + ] + }, + "3": { + "name": "punctuation.definition.entity.gfm" + } + } + }, + { + "match": "(?<=\\w)([\\*]{2})(?:.+?)(?<!\\W)(\\1)", + "name": "markup.bold.gfm", + "captures": { + "1": { + "name": "punctuation.definition.entity.gfm" + }, + "2": { + "name": "punctuation.definition.entity.gfm" + } + } + }, + { + "match": "(?<=\\s|^)([\\*]{2})(?=\\w)(?:.+?)(\\1)(?=\\w)", + "name": "markup.bold.gfm", + "captures": { + "1": { + "name": "punctuation.definition.entity.gfm" + }, + "2": { + "name": "punctuation.definition.entity.gfm" + } + } + }, + { + "match": "(?<![\\w|_|\\\\])([_])(?!\\s|\\1)(?m:(.+?))(?<![\\s|\\\\])(\\1)(?!\\w)", + "name": "markup.italic.gfm", + "captures": { + "1": { + "name": "punctuation.definition.entity.gfm" + }, + "2": { + "patterns": [ + { + "include": "#inlines-in-inlines" + } + ] + }, + "3": { + "name": "punctuation.definition.entity.gfm" + } + } + }, + { + "match": "(?<![\\w|\\*|\\\\])([\\*])(?!\\s|\\1)(?m:(.+?))(?<![\\s|\\\\])(\\1)(?!\\w)", + "name": "markup.italic.gfm", + "captures": { + "1": { + "name": "punctuation.definition.entity.gfm" + }, + "2": { + "patterns": [ + { + "include": "#inlines-in-inlines" + } + ] + }, + "3": { + "name": "punctuation.definition.entity.gfm" + } + } + }, + { + "match": "(?<=\\w)([\\*])(?:.+?)(?<!\\W)(\\1)", + "name": "markup.italic.gfm", + "captures": { + "1": { + "name": "punctuation.definition.entity.gfm" + }, + "2": { + "name": "punctuation.definition.entity.gfm" + } + } + }, + { + "match": "(?<=\\s|^)([\\*])(?=\\w)(?:.+?)(\\1)(?=\\w)", + "name": "markup.italic.gfm", + "captures": { + "1": { + "name": "punctuation.definition.entity.gfm" + }, + "2": { + "name": "punctuation.definition.entity.gfm" + } + } + } + ] + }, + "line-breaks": { + "patterns": [ + { + "match": "( )$", + "captures": { + "1": { + "name": "linebreak.gfm" + } + } + } + ] + }, + "entities": { + "patterns": [ + { + "match": "(&)[a-zA-Z0-9]+(;)", + "name": "constant.character.entity.gfm", + "captures": { + "1": { + "name": "punctuation.definition.entity.gfm" + }, + "2": { + "name": "punctuation.definition.entity.gfm" + } + } + }, + { + "match": "(&)#[0-9]+(;)", + "name": "constant.character.entity.gfm", + "captures": { + "1": { + "name": "punctuation.definition.entity.gfm" + }, + "2": { + "name": "punctuation.definition.entity.gfm" + } + } + }, + { + "match": "(&)#x[0-9a-fA-F]+(;)", + "name": "constant.character.entity.gfm", + "captures": { + "1": { + "name": "punctuation.definition.entity.gfm" + }, + "2": { + "name": "punctuation.definition.entity.gfm" + } + } + } + ] + }, + "criticmark": { + "patterns": [ + { + "begin": "{\\+\\+", + "end": "\\+\\+}", + "name": "markup.inserted.critic.gfm.addition", + "captures": { + "0": { + "name": "punctuation.definition.inserted.critic.gfm.addition.marker" + } + }, + "patterns": [ + { + "include": "#emphasis" + } + ] + }, + { + "begin": "{--", + "end": "--}", + "name": "markup.deleted.critic.gfm.deletion", + "captures": { + "0": { + "name": "punctuation.definition.deleted.critic.gfm.deletion.marker" + } + }, + "patterns": [ + { + "include": "#emphasis" + } + ] + }, + { + "begin": "{==", + "end": "==}", + "name": "critic.gfm.highlight", + "captures": { + "0": { + "name": "critic.gfm.highlight.marker" + } + }, + "patterns": [ + { + "include": "#emphasis" + } + ] + }, + { + "begin": "{>>", + "end": "<<}", + "name": "critic.gfm.comment", + "captures": { + "0": { + "name": "critic.gfm.comment.marker" + } + } + }, + { + "begin": "{~~", + "end": "~~}", + "name": "markup.changed.critic.gfm.substitution", + "captures": { + "0": { + "name": "punctuation.definition.changed.critic.gfm.substitution.marker" + } + }, + "patterns": [ + { + "match": "~>", + "name": "punctuation.definition.changed.critic.gfm.substitution.operator" + }, + { + "include": "#emphasis" + } + ] + } + ] + } + } + } diff --git a/packages/language-gfm/package.json b/packages/language-gfm/package.json new file mode 100644 index 000000000..8d6a66c17 --- /dev/null +++ b/packages/language-gfm/package.json @@ -0,0 +1,14 @@ +{ + "name": "language-gfm", + "version": "0.90.8", + "description": "Syntax highlighting and snippets for GitHub Flavored Markdown (GFM).", + "repository": "https://github.com/atom/language-gfm", + "license": "MIT", + "engines": { + "atom": "*" + }, + "devDependencies": { + "coffee-script": "1.7.0", + "coffeelint": "^1.10.1" + } +} diff --git a/packages/language-gfm/settings/gfm.cson b/packages/language-gfm/settings/gfm.cson new file mode 100644 index 000000000..928b6c8c0 --- /dev/null +++ b/packages/language-gfm/settings/gfm.cson @@ -0,0 +1,5 @@ +'.source.gfm:not(.markup.code)': + 'editor': + 'softWrap': true + 'commentStart': '<!-- ' + 'commentEnd': ' -->' diff --git a/packages/language-gfm/snippets/gfm.cson b/packages/language-gfm/snippets/gfm.cson new file mode 100644 index 000000000..48b3ea533 --- /dev/null +++ b/packages/language-gfm/snippets/gfm.cson @@ -0,0 +1,42 @@ +'.source.gfm': + 'bold text': + 'prefix': 'b' + 'body': '**$1**$0' + 'code': + 'prefix': 'code' + 'body': """ + ```$1 + $2 + ```$0 + """ + 'italic text': + 'prefix': 'i' + 'body': '*$1*$0' + 'embedded image': + 'prefix': 'img' + 'body': '![$1]($2)$0' + 'link': + 'prefix': 'l' + 'body': '[$1]($2)$0' + 'bullet point': + 'prefix': 'p' + 'body': '- $1' + 'reference': + 'prefix': 'ref' + 'body': '[${1:id}]: ${2:url}${3: "${4:title}"}$0' + 'reference image': + 'prefix': 'rimg' + 'body': '![$1][$2]$0' + 'reference link': + 'prefix': 'rl' + 'body': '[$1][$2]$0' + 'todo': + 'prefix': 't' + 'body': '- [ ] $1' + 'table': + 'prefix': 'table' + 'body': """ + | ${1:Header One } | ${2:Header Two } | + | :------------- | :------------- | + | ${3:Item One } | ${4:Item Two } |$0 + """ diff --git a/packages/language-gfm/spec/gfm-spec.coffee b/packages/language-gfm/spec/gfm-spec.coffee new file mode 100644 index 000000000..9ba3e60b9 --- /dev/null +++ b/packages/language-gfm/spec/gfm-spec.coffee @@ -0,0 +1,897 @@ +describe "GitHub Flavored Markdown grammar", -> + grammar = null + + beforeEach -> + waitsForPromise -> + atom.packages.activatePackage("language-gfm") + + runs -> + grammar = atom.grammars.grammarForScopeName("source.gfm") + + it "parses the grammar", -> + expect(grammar).toBeDefined() + expect(grammar.scopeName).toBe "source.gfm" + + it "tokenizes spaces", -> + {tokens} = grammar.tokenizeLine(" ") + expect(tokens[0]).toEqual value: " ", scopes: ["source.gfm"] + + it "tokenizes horizontal rules", -> + {tokens} = grammar.tokenizeLine("***") + expect(tokens[0]).toEqual value: "***", scopes: ["source.gfm", "comment.hr.gfm"] + + {tokens} = grammar.tokenizeLine("---") + expect(tokens[0]).toEqual value: "---", scopes: ["source.gfm", "comment.hr.gfm"] + + {tokens} = grammar.tokenizeLine("___") + expect(tokens[0]).toEqual value: "___", scopes: ["source.gfm", "comment.hr.gfm"] + + it "tokenizes escaped characters", -> + {tokens} = grammar.tokenizeLine("\\*") + expect(tokens[0]).toEqual value: "\\*", scopes: ["source.gfm", "constant.character.escape.gfm"] + + {tokens} = grammar.tokenizeLine("\\\\") + expect(tokens[0]).toEqual value: "\\\\", scopes: ["source.gfm", "constant.character.escape.gfm"] + + {tokens} = grammar.tokenizeLine("\\abc") + expect(tokens[0]).toEqual value: "\\a", scopes: ["source.gfm", "constant.character.escape.gfm"] + expect(tokens[1]).toEqual value: "bc", scopes: ["source.gfm"] + + it "tokenizes ***bold italic*** text", -> + {tokens} = grammar.tokenizeLine("this is ***bold italic*** text") + expect(tokens[0]).toEqual value: "this is ", scopes: ["source.gfm"] + expect(tokens[1]).toEqual value: "***", scopes: ["source.gfm", "markup.bold.italic.gfm"] + expect(tokens[2]).toEqual value: "bold italic", scopes: ["source.gfm", "markup.bold.italic.gfm"] + expect(tokens[3]).toEqual value: "***", scopes: ["source.gfm", "markup.bold.italic.gfm"] + expect(tokens[4]).toEqual value: " text", scopes: ["source.gfm"] + + [firstLineTokens, secondLineTokens] = grammar.tokenizeLines("this is ***bold\nitalic***!") + expect(firstLineTokens[0]).toEqual value: "this is ", scopes: ["source.gfm"] + expect(firstLineTokens[1]).toEqual value: "***", scopes: ["source.gfm", "markup.bold.italic.gfm"] + expect(firstLineTokens[2]).toEqual value: "bold", scopes: ["source.gfm", "markup.bold.italic.gfm"] + expect(secondLineTokens[0]).toEqual value: "italic", scopes: ["source.gfm", "markup.bold.italic.gfm"] + expect(secondLineTokens[1]).toEqual value: "***", scopes: ["source.gfm", "markup.bold.italic.gfm"] + expect(secondLineTokens[2]).toEqual value: "!", scopes: ["source.gfm"] + + it "tokenizes ___bold italic___ text", -> + {tokens} = grammar.tokenizeLine("this is ___bold italic___ text") + expect(tokens[0]).toEqual value: "this is ", scopes: ["source.gfm"] + expect(tokens[1]).toEqual value: "___", scopes: ["source.gfm", "markup.bold.italic.gfm"] + expect(tokens[2]).toEqual value: "bold italic", scopes: ["source.gfm", "markup.bold.italic.gfm"] + expect(tokens[3]).toEqual value: "___", scopes: ["source.gfm", "markup.bold.italic.gfm"] + expect(tokens[4]).toEqual value: " text", scopes: ["source.gfm"] + + [firstLineTokens, secondLineTokens] = grammar.tokenizeLines("this is ___bold\nitalic___!") + expect(firstLineTokens[0]).toEqual value: "this is ", scopes: ["source.gfm"] + expect(firstLineTokens[1]).toEqual value: "___", scopes: ["source.gfm", "markup.bold.italic.gfm"] + expect(firstLineTokens[2]).toEqual value: "bold", scopes: ["source.gfm", "markup.bold.italic.gfm"] + expect(secondLineTokens[0]).toEqual value: "italic", scopes: ["source.gfm", "markup.bold.italic.gfm"] + expect(secondLineTokens[1]).toEqual value: "___", scopes: ["source.gfm", "markup.bold.italic.gfm"] + expect(secondLineTokens[2]).toEqual value: "!", scopes: ["source.gfm"] + + it "tokenizes **bold** text", -> + {tokens} = grammar.tokenizeLine("**bold**") + expect(tokens[0]).toEqual value: "**", scopes: ["source.gfm", "markup.bold.gfm", "punctuation.definition.entity.gfm"] + expect(tokens[1]).toEqual value: "bold", scopes: ["source.gfm", "markup.bold.gfm"] + expect(tokens[2]).toEqual value: "**", scopes: ["source.gfm", "markup.bold.gfm", "punctuation.definition.entity.gfm"] + + [firstLineTokens, secondLineTokens] = grammar.tokenizeLines("this is **not\nbold**!") + expect(firstLineTokens[0]).toEqual value: "this is **not", scopes: ["source.gfm"] + expect(secondLineTokens[0]).toEqual value: "bold**!", scopes: ["source.gfm"] + + {tokens} = grammar.tokenizeLine("not**bold**") + expect(tokens[0]).toEqual value: "not", scopes: ["source.gfm"] + expect(tokens[1]).toEqual value: "**", scopes: ["source.gfm", "markup.bold.gfm", "punctuation.definition.entity.gfm"] + expect(tokens[2]).toEqual value: "bold", scopes: ["source.gfm", "markup.bold.gfm"] + expect(tokens[3]).toEqual value: "**", scopes: ["source.gfm", "markup.bold.gfm", "punctuation.definition.entity.gfm"] + + it "tokenizes __bold__ text", -> + {tokens} = grammar.tokenizeLine("____") + expect(tokens[0]).toEqual value: "____", scopes: ["source.gfm", "comment.hr.gfm"] + + {tokens} = grammar.tokenizeLine("__bold__") + expect(tokens[0]).toEqual value: "__", scopes: [ 'source.gfm', 'markup.bold.gfm', 'punctuation.definition.entity.gfm' ] + expect(tokens[1]).toEqual value: "bold", scopes: ["source.gfm", "markup.bold.gfm"] + expect(tokens[2]).toEqual value: "__", scopes: [ 'source.gfm', 'markup.bold.gfm', 'punctuation.definition.entity.gfm' ] + + [firstLineTokens, secondLineTokens] = grammar.tokenizeLines("this is __not\nbold__!") + expect(firstLineTokens[0]).toEqual value: "this is __not", scopes: ["source.gfm"] + expect(secondLineTokens[0]).toEqual value: "bold__!", scopes: ["source.gfm"] + + {tokens} = grammar.tokenizeLine("not__bold__") + expect(tokens[0]).toEqual value: "not__bold__", scopes: ["source.gfm"] + + it "tokenizes *italic* text", -> + {tokens} = grammar.tokenizeLine("**") + expect(tokens[0]).toEqual value: "**", scopes: ["source.gfm"] + + {tokens} = grammar.tokenizeLine("this is *italic* text") + expect(tokens[0]).toEqual value: "this is ", scopes: ["source.gfm"] + expect(tokens[1]).toEqual value: "*", scopes: [ "source.gfm", "markup.italic.gfm", "punctuation.definition.entity.gfm" ] + expect(tokens[2]).toEqual value: "italic", scopes: ["source.gfm", "markup.italic.gfm"] + expect(tokens[3]).toEqual value: "*", scopes: [ "source.gfm", "markup.italic.gfm", "punctuation.definition.entity.gfm" ] + expect(tokens[4]).toEqual value: " text", scopes: ["source.gfm"] + + {tokens} = grammar.tokenizeLine("is*italic*") + expect(tokens[0]).toEqual value: "is", scopes: ["source.gfm"] + expect(tokens[1]).toEqual value: "*", scopes: [ "source.gfm", "markup.italic.gfm", "punctuation.definition.entity.gfm" ] + expect(tokens[2]).toEqual value: "italic", scopes: ["source.gfm", "markup.italic.gfm"] + expect(tokens[3]).toEqual value: "*", scopes: [ "source.gfm", "markup.italic.gfm", "punctuation.definition.entity.gfm" ] + + {tokens} = grammar.tokenizeLine("* not italic") + expect(tokens[0]).toEqual value: "*", scopes: ["source.gfm", "variable.unordered.list.gfm"] + expect(tokens[1]).toEqual value: " ", scopes: ["source.gfm"] + expect(tokens[2]).toEqual value: "not italic", scopes: ["source.gfm"] + + [firstLineTokens, secondLineTokens] = grammar.tokenizeLines("this is *not\nitalic*!") + expect(firstLineTokens[0]).toEqual value: "this is *not", scopes: ["source.gfm"] + expect(secondLineTokens[0]).toEqual value: "italic*!", scopes: ["source.gfm"] + + it "tokenizes _italic_ text", -> + {tokens} = grammar.tokenizeLine("__") + expect(tokens[0]).toEqual value: "__", scopes: ["source.gfm"] + + {tokens} = grammar.tokenizeLine("this is _italic_ text") + expect(tokens[0]).toEqual value: "this is ", scopes: ["source.gfm"] + expect(tokens[1]).toEqual value: "_", scopes: [ 'source.gfm', 'markup.italic.gfm', 'punctuation.definition.entity.gfm' ] + expect(tokens[2]).toEqual value: "italic", scopes: ["source.gfm", "markup.italic.gfm"] + expect(tokens[3]).toEqual value: "_", scopes: [ 'source.gfm', 'markup.italic.gfm', 'punctuation.definition.entity.gfm' ] + expect(tokens[4]).toEqual value: " text", scopes: ["source.gfm"] + + {tokens} = grammar.tokenizeLine("not_italic_") + expect(tokens[0]).toEqual value: "not_italic_", scopes: ["source.gfm"] + + {tokens} = grammar.tokenizeLine("not x^{a}_m y^{b}_n italic") + expect(tokens[0]).toEqual value: "not x^{a}_m y^{b}_n italic", scopes: ["source.gfm"] + + [firstLineTokens, secondLineTokens] = grammar.tokenizeLines("this is _not\nitalic_!") + expect(firstLineTokens[0]).toEqual value: "this is _not", scopes: ["source.gfm"] + expect(secondLineTokens[0]).toEqual value: "italic_!", scopes: ["source.gfm"] + + it "tokenizes ~~strike~~ text", -> + {tokens} = grammar.tokenizeLine("~~strike~~") + expect(tokens[0]).toEqual value: "~~", scopes: ["source.gfm", "markup.strike.gfm"] + expect(tokens[1]).toEqual value: "strike", scopes: ["source.gfm", "markup.strike.gfm"] + expect(tokens[2]).toEqual value: "~~", scopes: ["source.gfm", "markup.strike.gfm"] + + [firstLineTokens, secondLineTokens] = grammar.tokenizeLines("this is ~~str\nike~~!") + expect(firstLineTokens[0]).toEqual value: "this is ", scopes: ["source.gfm"] + expect(firstLineTokens[1]).toEqual value: "~~", scopes: ["source.gfm", "markup.strike.gfm"] + expect(firstLineTokens[2]).toEqual value: "str", scopes: ["source.gfm", "markup.strike.gfm"] + expect(secondLineTokens[0]).toEqual value: "ike", scopes: ["source.gfm", "markup.strike.gfm"] + expect(secondLineTokens[1]).toEqual value: "~~", scopes: ["source.gfm", "markup.strike.gfm"] + expect(secondLineTokens[2]).toEqual value: "!", scopes: ["source.gfm"] + + {tokens} = grammar.tokenizeLine("not~~strike~~") + expect(tokens[0]).toEqual value: "not~~strike~~", scopes: ["source.gfm"] + + it "tokenizes headings", -> + {tokens} = grammar.tokenizeLine("# Heading 1") + expect(tokens[0]).toEqual value: "#", scopes: ["source.gfm", "markup.heading.heading-1.gfm", "markup.heading.marker.gfm"] + expect(tokens[1]).toEqual value: " ", scopes: ["source.gfm", "markup.heading.heading-1.gfm", "markup.heading.space.gfm"] + expect(tokens[2]).toEqual value: "Heading 1", scopes: ["source.gfm", "markup.heading.heading-1.gfm"] + + {tokens} = grammar.tokenizeLine("## Heading 2") + expect(tokens[0]).toEqual value: "##", scopes: ["source.gfm", "markup.heading.heading-2.gfm", "markup.heading.marker.gfm"] + expect(tokens[1]).toEqual value: " ", scopes: ["source.gfm", "markup.heading.heading-2.gfm", "markup.heading.space.gfm"] + expect(tokens[2]).toEqual value: "Heading 2", scopes: ["source.gfm", "markup.heading.heading-2.gfm"] + + {tokens} = grammar.tokenizeLine("### Heading 3") + expect(tokens[0]).toEqual value: "###", scopes: ["source.gfm", "markup.heading.heading-3.gfm", "markup.heading.marker.gfm"] + expect(tokens[1]).toEqual value: " ", scopes: ["source.gfm", "markup.heading.heading-3.gfm", "markup.heading.space.gfm"] + expect(tokens[2]).toEqual value: "Heading 3", scopes: ["source.gfm", "markup.heading.heading-3.gfm"] + + {tokens} = grammar.tokenizeLine("#### Heading 4") + expect(tokens[0]).toEqual value: "####", scopes: ["source.gfm", "markup.heading.heading-4.gfm", "markup.heading.marker.gfm"] + expect(tokens[1]).toEqual value: " ", scopes: ["source.gfm", "markup.heading.heading-4.gfm", "markup.heading.space.gfm"] + expect(tokens[2]).toEqual value: "Heading 4", scopes: ["source.gfm", "markup.heading.heading-4.gfm"] + + {tokens} = grammar.tokenizeLine("##### Heading 5") + expect(tokens[0]).toEqual value: "#####", scopes: ["source.gfm", "markup.heading.heading-5.gfm", "markup.heading.marker.gfm"] + expect(tokens[1]).toEqual value: " ", scopes: ["source.gfm", "markup.heading.heading-5.gfm", "markup.heading.space.gfm"] + expect(tokens[2]).toEqual value: "Heading 5", scopes: ["source.gfm", "markup.heading.heading-5.gfm"] + + {tokens} = grammar.tokenizeLine("###### Heading 6") + expect(tokens[0]).toEqual value: "######", scopes: ["source.gfm", "markup.heading.heading-6.gfm", "markup.heading.marker.gfm"] + expect(tokens[1]).toEqual value: " ", scopes: ["source.gfm", "markup.heading.heading-6.gfm", "markup.heading.space.gfm"] + expect(tokens[2]).toEqual value: "Heading 6", scopes: ["source.gfm", "markup.heading.heading-6.gfm"] + + it "tokenizes matches inside of headers", -> + {tokens} = grammar.tokenizeLine("# Heading :one:") + expect(tokens[0]).toEqual value: "#", scopes: ["source.gfm", "markup.heading.heading-1.gfm", "markup.heading.marker.gfm"] + expect(tokens[1]).toEqual value: " ", scopes: ["source.gfm", "markup.heading.heading-1.gfm", "markup.heading.space.gfm"] + expect(tokens[2]).toEqual value: "Heading ", scopes: ["source.gfm", "markup.heading.heading-1.gfm"] + expect(tokens[3]).toEqual value: ":", scopes: ["source.gfm", "markup.heading.heading-1.gfm", "string.emoji.gfm", "string.emoji.start.gfm"] + expect(tokens[4]).toEqual value: "one", scopes: ["source.gfm", "markup.heading.heading-1.gfm", "string.emoji.gfm", "string.emoji.word.gfm"] + expect(tokens[5]).toEqual value: ":", scopes: ["source.gfm", "markup.heading.heading-1.gfm", "string.emoji.gfm", "string.emoji.end.gfm"] + + it "tokenizes an :emoji:", -> + {tokens} = grammar.tokenizeLine("this is :no_good:") + expect(tokens[0]).toEqual value: "this is ", scopes: ["source.gfm"] + expect(tokens[1]).toEqual value: ":", scopes: ["source.gfm", "string.emoji.gfm", "string.emoji.start.gfm"] + expect(tokens[2]).toEqual value: "no_good", scopes: ["source.gfm", "string.emoji.gfm", "string.emoji.word.gfm"] + expect(tokens[3]).toEqual value: ":", scopes: ["source.gfm", "string.emoji.gfm", "string.emoji.end.gfm"] + + {tokens} = grammar.tokenizeLine("this is :no good:") + expect(tokens[0]).toEqual value: "this is :no good:", scopes: ["source.gfm"] + + {tokens} = grammar.tokenizeLine("http://localhost:8080") + expect(tokens[0]).toEqual value: "http://localhost:8080", scopes: ["source.gfm"] + + it "tokenizes a ``` code block", -> + {tokens, ruleStack} = grammar.tokenizeLine("```") + expect(tokens[0]).toEqual value: "```", scopes: ["source.gfm", "markup.raw.gfm", "support.gfm"] + {tokens, ruleStack} = grammar.tokenizeLine("-> 'hello'", ruleStack) + expect(tokens[0]).toEqual value: "-> 'hello'", scopes: ["source.gfm", "markup.raw.gfm"] + {tokens} = grammar.tokenizeLine("```", ruleStack) + expect(tokens[0]).toEqual value: "```", scopes: ["source.gfm", "markup.raw.gfm", "support.gfm"] + + it "tokenizes a ~~~ code block", -> + {tokens, ruleStack} = grammar.tokenizeLine("~~~") + expect(tokens[0]).toEqual value: "~~~", scopes: ["source.gfm", "markup.raw.gfm", "support.gfm"] + {tokens, ruleStack} = grammar.tokenizeLine("-> 'hello'", ruleStack) + expect(tokens[0]).toEqual value: "-> 'hello'", scopes: ["source.gfm", "markup.raw.gfm"] + {tokens} = grammar.tokenizeLine("~~~", ruleStack) + expect(tokens[0]).toEqual value: "~~~", scopes: ["source.gfm", "markup.raw.gfm", "support.gfm"] + + it "doesn't tokenise ~`~ as a code block", -> + {tokens} = grammar.tokenizeLine("~`~") + expect(tokens[0]).toEqual value: '~', scopes: ['source.gfm'] + expect(tokens[1]).toEqual value: '`', scopes: ['source.gfm', 'markup.raw.gfm'] + expect(tokens[2]).toEqual value: '~', scopes: ['source.gfm', 'markup.raw.gfm'] + + it "tokenises code-blocks with borders of differing lengths", -> + [firstLineTokens, secondLineTokens, thirdLineTokens] = grammar.tokenizeLines("~~~\nfoo bar\n~~~~~~~") + expect(firstLineTokens[0]).toEqual value: '~~~', scopes: ['source.gfm', 'markup.raw.gfm', 'support.gfm'] + expect(secondLineTokens[0]).toEqual value: 'foo bar', scopes: ['source.gfm', 'markup.raw.gfm'] + expect(thirdLineTokens[0]).toEqual value: '~~~~~~~', scopes: ['source.gfm', 'markup.raw.gfm', 'support.gfm'] + + [firstLineTokens, secondLineTokens, thirdLineTokens] = grammar.tokenizeLines("~~~~~~~\nfoo bar\n~~~") + expect(firstLineTokens[0]).toEqual value: '~~~~~~~', scopes: ['source.gfm', 'markup.raw.gfm', 'support.gfm'] + expect(secondLineTokens[0]).toEqual value: 'foo bar', scopes: ['source.gfm', 'markup.raw.gfm'] + expect(thirdLineTokens[0]).toEqual value: '~~~', scopes: ['source.gfm', 'markup.raw.gfm'] + + it "tokenizes a ``` code block with trailing whitespace", -> + {tokens, ruleStack} = grammar.tokenizeLine("```") + expect(tokens[0]).toEqual value: "```", scopes: ["source.gfm", "markup.raw.gfm", "support.gfm"] + {tokens, ruleStack} = grammar.tokenizeLine("-> 'hello'", ruleStack) + expect(tokens[0]).toEqual value: "-> 'hello'", scopes: ["source.gfm", "markup.raw.gfm"] + {tokens} = grammar.tokenizeLine("``` ", ruleStack) + expect(tokens[0]).toEqual value: "``` ", scopes: ["source.gfm", "markup.raw.gfm", "support.gfm"] + + it "tokenizes a ~~~ code block with trailing whitespace", -> + {tokens, ruleStack} = grammar.tokenizeLine("~~~") + expect(tokens[0]).toEqual value: "~~~", scopes: ["source.gfm", "markup.raw.gfm", "support.gfm"] + {tokens, ruleStack} = grammar.tokenizeLine("-> 'hello'", ruleStack) + expect(tokens[0]).toEqual value: "-> 'hello'", scopes: ["source.gfm", "markup.raw.gfm"] + {tokens} = grammar.tokenizeLine("~~~ ", ruleStack) + expect(tokens[0]).toEqual value: "~~~ ", scopes: ["source.gfm", "markup.raw.gfm", "support.gfm"] + + it "tokenises a ``` code block with an unknown language", -> + {tokens, ruleStack} = grammar.tokenizeLine("``` myLanguage") + expect(tokens[0]).toEqual value: '``` myLanguage', scopes: ['source.gfm', 'markup.code.other.gfm', 'support.gfm'] + + {tokens, ruleStack} = grammar.tokenizeLine("-> 'hello'", ruleStack) + expect(tokens[0]).toEqual value: "-> 'hello'", scopes: ['source.gfm', 'markup.code.other.gfm', 'source.embedded.mylanguage'] + + {tokens} = grammar.tokenizeLine("```", ruleStack) + expect(tokens[0]).toEqual value: '```', scopes: ['source.gfm', 'markup.code.other.gfm', 'support.gfm'] + + it "tokenizes a ``` code block with a known language", -> + {tokens, ruleStack} = grammar.tokenizeLine("``` bash") + expect(tokens[0]).toEqual value: "``` bash", scopes: ["source.gfm", "markup.code.shell.gfm", "support.gfm"] + expect(ruleStack[1].contentScopeName).toBe "source.embedded.shell" + + {tokens, ruleStack} = grammar.tokenizeLine("```js ") + expect(tokens[0]).toEqual value: "```js ", scopes: ["source.gfm", "markup.code.js.gfm", "support.gfm"] + expect(ruleStack[1].contentScopeName).toBe "source.embedded.js" + + {tokens, ruleStack} = grammar.tokenizeLine("```JS ") + expect(tokens[0]).toEqual value: "```JS ", scopes: ["source.gfm", "markup.code.js.gfm", "support.gfm"] + expect(ruleStack[1].contentScopeName).toBe "source.embedded.js" + + {tokens, ruleStack} = grammar.tokenizeLine("```r ") + expect(tokens[0]).toEqual value: "```r ", scopes: ["source.gfm", "markup.code.r.gfm", "support.gfm"] + expect(ruleStack[1].contentScopeName).toBe "source.embedded.r" + + {tokens, ruleStack} = grammar.tokenizeLine("```properties ") + expect(tokens[0]).toEqual value: "```properties ", scopes: ["source.gfm", "markup.code.git-config.gfm", "support.gfm"] + expect(ruleStack[1].contentScopeName).toBe "source.embedded.git-config" + + it "tokenizes a Rmarkdown ``` code block", -> + {tokens, ruleStack} = grammar.tokenizeLine("```{r}") + expect(tokens[0]).toEqual value: "```{r}", scopes: ["source.gfm", "markup.code.r.gfm", "support.gfm"] + expect(ruleStack[1].contentScopeName).toBe "source.embedded.r" + + {tokens, ruleStack} = grammar.tokenizeLine("```{r,eval=TRUE,cache=FALSE}") + expect(tokens[0]).toEqual value: "```{r,eval=TRUE,cache=FALSE}", scopes: ["source.gfm", "markup.code.r.gfm", "support.gfm"] + expect(ruleStack[1].contentScopeName).toBe "source.embedded.r" + + {tokens, ruleStack} = grammar.tokenizeLine("```{r eval=TRUE,cache=FALSE}") + expect(tokens[0]).toEqual value: "```{r eval=TRUE,cache=FALSE}", scopes: ["source.gfm", "markup.code.r.gfm", "support.gfm"] + expect(ruleStack[1].contentScopeName).toBe "source.embedded.r" + + it "tokenizes a Rmarkdown ``` code block with whitespace", -> + {tokens, ruleStack} = grammar.tokenizeLine("```{r }") + expect(tokens[0]).toEqual value: "```{r }", scopes: ["source.gfm", "markup.code.r.gfm", "support.gfm"] + expect(ruleStack[1].contentScopeName).toBe "source.embedded.r" + + {tokens, ruleStack} = grammar.tokenizeLine("```{R } ") + expect(tokens[0]).toEqual value: "```{R } ", scopes: ["source.gfm", "markup.code.r.gfm", "support.gfm"] + expect(ruleStack[1].contentScopeName).toBe "source.embedded.r" + + {tokens, ruleStack} = grammar.tokenizeLine("```{r eval = TRUE, cache = FALSE}") + expect(tokens[0]).toEqual value: "```{r eval = TRUE, cache = FALSE}", scopes: ["source.gfm", "markup.code.r.gfm", "support.gfm"] + expect(ruleStack[1].contentScopeName).toBe "source.embedded.r" + + it "tokenizes a ~~~ code block with a language", -> + {tokens, ruleStack} = grammar.tokenizeLine("~~~ bash") + expect(tokens[0]).toEqual value: "~~~ bash", scopes: ["source.gfm", "markup.code.shell.gfm", "support.gfm"] + expect(ruleStack[1].contentScopeName).toBe "source.embedded.shell" + + {tokens, ruleStack} = grammar.tokenizeLine("~~~js ") + expect(tokens[0]).toEqual value: "~~~js ", scopes: ["source.gfm", "markup.code.js.gfm", "support.gfm"] + expect(ruleStack[1].contentScopeName).toBe "source.embedded.js" + + {tokens, ruleStack} = grammar.tokenizeLine("~~~properties ") + expect(tokens[0]).toEqual value: "~~~properties ", scopes: ["source.gfm", "markup.code.git-config.gfm", "support.gfm"] + expect(ruleStack[1].contentScopeName).toBe "source.embedded.git-config" + + it "tokenizes a ``` code block with a language and trailing whitespace", -> + {tokens, ruleStack} = grammar.tokenizeLine("``` bash") + {tokens} = grammar.tokenizeLine("``` ", ruleStack) + expect(tokens[0]).toEqual value: "``` ", scopes: ["source.gfm", "markup.code.shell.gfm", "support.gfm"] + expect(ruleStack[1].contentScopeName).toBe "source.embedded.shell" + + {tokens, ruleStack} = grammar.tokenizeLine("```js ") + {tokens} = grammar.tokenizeLine("``` ", ruleStack) + expect(tokens[0]).toEqual value: "``` ", scopes: ["source.gfm", "markup.code.js.gfm", "support.gfm"] + expect(ruleStack[1].contentScopeName).toBe "source.embedded.js" + + it "tokenizes a ~~~ code block with a language and trailing whitespace", -> + {tokens, ruleStack} = grammar.tokenizeLine("~~~ bash") + {tokens} = grammar.tokenizeLine("~~~ ", ruleStack) + expect(tokens[0]).toEqual value: "~~~ ", scopes: ["source.gfm", "markup.code.shell.gfm", "support.gfm"] + expect(ruleStack[1].contentScopeName).toBe "source.embedded.shell" + + {tokens, ruleStack} = grammar.tokenizeLine("~~~js ") + {tokens} = grammar.tokenizeLine("~~~ ", ruleStack) + expect(tokens[0]).toEqual value: "~~~ ", scopes: ["source.gfm", "markup.code.js.gfm", "support.gfm"] + expect(ruleStack[1].contentScopeName).toBe "source.embedded.js" + + {tokens, ruleStack} = grammar.tokenizeLine("~~~ properties ") + {tokens} = grammar.tokenizeLine("~~~ ", ruleStack) + expect(tokens[0]).toEqual value: "~~~ ", scopes: ["source.gfm", "markup.code.git-config.gfm", "support.gfm"] + expect(ruleStack[1].contentScopeName).toBe "source.embedded.git-config" + + it "tokenizes inline `code` blocks", -> + {tokens} = grammar.tokenizeLine("`this` is `code`") + expect(tokens[0]).toEqual value: "`", scopes: ["source.gfm", "markup.raw.gfm"] + expect(tokens[1]).toEqual value: "this", scopes: ["source.gfm", "markup.raw.gfm"] + expect(tokens[2]).toEqual value: "`", scopes: ["source.gfm", "markup.raw.gfm"] + expect(tokens[3]).toEqual value: " is ", scopes: ["source.gfm"] + expect(tokens[4]).toEqual value: "`", scopes: ["source.gfm", "markup.raw.gfm"] + expect(tokens[5]).toEqual value: "code", scopes: ["source.gfm", "markup.raw.gfm"] + expect(tokens[6]).toEqual value: "`", scopes: ["source.gfm", "markup.raw.gfm"] + + {tokens} = grammar.tokenizeLine("``") + expect(tokens[0]).toEqual value: "`", scopes: ["source.gfm", "markup.raw.gfm"] + expect(tokens[1]).toEqual value: "`", scopes: ["source.gfm", "markup.raw.gfm"] + + {tokens} = grammar.tokenizeLine("``a\\`b``") + expect(tokens[0]).toEqual value: "``", scopes: ["source.gfm", "markup.raw.gfm"] + expect(tokens[1]).toEqual value: "a\\`b", scopes: ["source.gfm", "markup.raw.gfm"] + expect(tokens[2]).toEqual value: "``", scopes: ["source.gfm", "markup.raw.gfm"] + + it "tokenizes [links](links)", -> + {tokens} = grammar.tokenizeLine("please click [this link](website)") + expect(tokens[0]).toEqual value: "please click ", scopes: ["source.gfm"] + expect(tokens[1]).toEqual value: "[", scopes: ["source.gfm", "link", "punctuation.definition.begin.gfm"] + expect(tokens[2]).toEqual value: "this link", scopes: ["source.gfm", "link", "entity.gfm"] + expect(tokens[3]).toEqual value: "]", scopes: ["source.gfm", "link", "punctuation.definition.end.gfm"] + expect(tokens[4]).toEqual value: "(", scopes: ["source.gfm", "link", "punctuation.definition.begin.gfm"] + expect(tokens[5]).toEqual value: "website", scopes: ["source.gfm", "link", "markup.underline.link.gfm"] + expect(tokens[6]).toEqual value: ")", scopes: ["source.gfm", "link", "punctuation.definition.end.gfm"] + + it "tokenizes reference [links][links]", -> + {tokens} = grammar.tokenizeLine("please click [this link][website]") + expect(tokens[0]).toEqual value: "please click ", scopes: ["source.gfm"] + expect(tokens[1]).toEqual value: "[", scopes: ["source.gfm", "link", "punctuation.definition.begin.gfm"] + expect(tokens[2]).toEqual value: "this link", scopes: ["source.gfm", "link", "entity.gfm"] + expect(tokens[3]).toEqual value: "]", scopes: ["source.gfm", "link", "punctuation.definition.end.gfm"] + expect(tokens[4]).toEqual value: "[", scopes: ["source.gfm", "link", "punctuation.definition.begin.gfm"] + expect(tokens[5]).toEqual value: "website", scopes: ["source.gfm", "link", "markup.underline.link.gfm"] + expect(tokens[6]).toEqual value: "]", scopes: ["source.gfm", "link", "punctuation.definition.end.gfm"] + + it "tokenizes id-less reference [links][]", -> + {tokens} = grammar.tokenizeLine("please click [this link][]") + expect(tokens[0]).toEqual value: "please click ", scopes: ["source.gfm"] + expect(tokens[1]).toEqual value: "[", scopes: ["source.gfm", "link", "punctuation.definition.begin.gfm"] + expect(tokens[2]).toEqual value: "this link", scopes: ["source.gfm", "link", "entity.gfm"] + expect(tokens[3]).toEqual value: "]", scopes: ["source.gfm", "link", "punctuation.definition.end.gfm"] + expect(tokens[4]).toEqual value: "[", scopes: ["source.gfm", "link", "punctuation.definition.begin.gfm"] + expect(tokens[5]).toEqual value: "]", scopes: ["source.gfm", "link", "punctuation.definition.end.gfm"] + + it "tokenizes [link]: footers", -> + {tokens} = grammar.tokenizeLine("[aLink]: http://website") + expect(tokens[0]).toEqual value: "[", scopes: ["source.gfm", "link", "punctuation.definition.begin.gfm"] + expect(tokens[1]).toEqual value: "aLink", scopes: ["source.gfm", "link", "entity.gfm"] + expect(tokens[2]).toEqual value: "]", scopes: ["source.gfm", "link", "punctuation.definition.end.gfm"] + expect(tokens[3]).toEqual value: ":", scopes: ["source.gfm", "link", "punctuation.separator.key-value.gfm"] + expect(tokens[4]).toEqual value: " ", scopes: ["source.gfm", "link"] + expect(tokens[5]).toEqual value: "http://website", scopes: ["source.gfm", "link", "markup.underline.link.gfm"] + + it "tokenizes [link]: <footers>", -> + {tokens} = grammar.tokenizeLine("[aLink]: <http://website>") + expect(tokens[0]).toEqual value: "[", scopes: ["source.gfm", "link", "punctuation.definition.begin.gfm"] + expect(tokens[1]).toEqual value: "aLink", scopes: ["source.gfm", "link", "entity.gfm"] + expect(tokens[2]).toEqual value: "]", scopes: ["source.gfm", "link", "punctuation.definition.end.gfm"] + expect(tokens[3]).toEqual value: ": <", scopes: ["source.gfm", "link"] + expect(tokens[4]).toEqual value: "http://website", scopes: ["source.gfm", "link", "markup.underline.link.gfm"] + expect(tokens[5]).toEqual value: ">", scopes: ["source.gfm", "link"] + + it "tokenizes [![links](links)](links)", -> + {tokens} = grammar.tokenizeLine("[![title](image)](link)") + expect(tokens[0]).toEqual value: "[!", scopes: ["source.gfm", "link", "punctuation.definition.begin.gfm"] + expect(tokens[1]).toEqual value: "[", scopes: ["source.gfm", "link", "punctuation.definition.begin.gfm"] + expect(tokens[2]).toEqual value: "title", scopes: ["source.gfm", "link", "entity.gfm"] + expect(tokens[3]).toEqual value: "]", scopes: ["source.gfm", "link", "punctuation.definition.end.gfm"] + expect(tokens[4]).toEqual value: "(", scopes: ["source.gfm", "link", "punctuation.definition.begin.gfm"] + expect(tokens[5]).toEqual value: "image", scopes: ["source.gfm", "link", "markup.underline.link.gfm"] + expect(tokens[6]).toEqual value: ")", scopes: ["source.gfm", "link", "punctuation.definition.end.gfm"] + expect(tokens[7]).toEqual value: "]", scopes: ["source.gfm", "link", "punctuation.definition.end.gfm"] + expect(tokens[8]).toEqual value: "(", scopes: ["source.gfm", "link", "punctuation.definition.begin.gfm"] + expect(tokens[9]).toEqual value: "link", scopes: ["source.gfm", "link", "markup.underline.link.gfm"] + expect(tokens[10]).toEqual value: ")", scopes: ["source.gfm", "link", "punctuation.definition.end.gfm"] + + it "tokenizes [![links](links)][links]", -> + {tokens} = grammar.tokenizeLine("[![title](image)][link]") + expect(tokens[0]).toEqual value: "[!", scopes: ["source.gfm", "link", "punctuation.definition.begin.gfm"] + expect(tokens[1]).toEqual value: "[", scopes: ["source.gfm", "link", "punctuation.definition.begin.gfm"] + expect(tokens[2]).toEqual value: "title", scopes: ["source.gfm", "link", "entity.gfm"] + expect(tokens[3]).toEqual value: "]", scopes: ["source.gfm", "link", "punctuation.definition.end.gfm"] + expect(tokens[4]).toEqual value: "(", scopes: ["source.gfm", "link", "punctuation.definition.begin.gfm"] + expect(tokens[5]).toEqual value: "image", scopes: ["source.gfm", "link", "markup.underline.link.gfm"] + expect(tokens[6]).toEqual value: ")", scopes: ["source.gfm", "link", "punctuation.definition.end.gfm"] + expect(tokens[7]).toEqual value: "]", scopes: ["source.gfm", "link", "punctuation.definition.end.gfm"] + expect(tokens[8]).toEqual value: "[", scopes: ["source.gfm", "link", "punctuation.definition.begin.gfm"] + expect(tokens[9]).toEqual value: "link", scopes: ["source.gfm", "link", "markup.underline.link.gfm"] + expect(tokens[10]).toEqual value: "]", scopes: ["source.gfm", "link", "punctuation.definition.end.gfm"] + + it "tokenizes [![links][links]](links)", -> + {tokens} = grammar.tokenizeLine("[![title][image]](link)") + expect(tokens[0]).toEqual value: "[!", scopes: ["source.gfm", "link", "punctuation.definition.begin.gfm"] + expect(tokens[1]).toEqual value: "[", scopes: ["source.gfm", "link", "punctuation.definition.begin.gfm"] + expect(tokens[2]).toEqual value: "title", scopes: ["source.gfm", "link", "entity.gfm"] + expect(tokens[3]).toEqual value: "]", scopes: ["source.gfm", "link", "punctuation.definition.end.gfm"] + expect(tokens[4]).toEqual value: "[", scopes: ["source.gfm", "link", "punctuation.definition.begin.gfm"] + expect(tokens[5]).toEqual value: "image", scopes: ["source.gfm", "link", "markup.underline.link.gfm"] + expect(tokens[6]).toEqual value: "]", scopes: ["source.gfm", "link", "punctuation.definition.end.gfm"] + expect(tokens[7]).toEqual value: "]", scopes: ["source.gfm", "link", "punctuation.definition.end.gfm"] + expect(tokens[8]).toEqual value: "(", scopes: ["source.gfm", "link", "punctuation.definition.begin.gfm"] + expect(tokens[9]).toEqual value: "link", scopes: ["source.gfm", "link", "markup.underline.link.gfm"] + expect(tokens[10]).toEqual value: ")", scopes: ["source.gfm", "link", "punctuation.definition.end.gfm"] + + it "tokenizes [![links][links]][links]", -> + {tokens} = grammar.tokenizeLine("[![title][image]][link]") + expect(tokens[0]).toEqual value: "[!", scopes: ["source.gfm", "link", "punctuation.definition.begin.gfm"] + expect(tokens[1]).toEqual value: "[", scopes: ["source.gfm", "link", "punctuation.definition.begin.gfm"] + expect(tokens[2]).toEqual value: "title", scopes: ["source.gfm", "link", "entity.gfm"] + expect(tokens[3]).toEqual value: "]", scopes: ["source.gfm", "link", "punctuation.definition.end.gfm"] + expect(tokens[4]).toEqual value: "[", scopes: ["source.gfm", "link", "punctuation.definition.begin.gfm"] + expect(tokens[5]).toEqual value: "image", scopes: ["source.gfm", "link", "markup.underline.link.gfm"] + expect(tokens[6]).toEqual value: "]", scopes: ["source.gfm", "link", "punctuation.definition.end.gfm"] + expect(tokens[7]).toEqual value: "]", scopes: ["source.gfm", "link", "punctuation.definition.end.gfm"] + expect(tokens[8]).toEqual value: "[", scopes: ["source.gfm", "link", "punctuation.definition.begin.gfm"] + expect(tokens[9]).toEqual value: "link", scopes: ["source.gfm", "link", "markup.underline.link.gfm"] + expect(tokens[10]).toEqual value: "]", scopes: ["source.gfm", "link", "punctuation.definition.end.gfm"] + + it "tokenizes mentions", -> + {tokens} = grammar.tokenizeLine("sentence with no space before@name ") + expect(tokens[0]).toEqual value: "sentence with no space before@name ", scopes: ["source.gfm"] + + {tokens} = grammar.tokenizeLine("@name '@name' @name's @name. @name, (@name) [@name]") + expect(tokens[0]).toEqual value: "@", scopes: ["source.gfm", "variable.mention.gfm"] + expect(tokens[1]).toEqual value: "name", scopes: ["source.gfm", "string.username.gfm"] + expect(tokens[2]).toEqual value: " '", scopes: ["source.gfm"] + expect(tokens[3]).toEqual value: "@", scopes: ["source.gfm", "variable.mention.gfm"] + expect(tokens[4]).toEqual value: "name", scopes: ["source.gfm", "string.username.gfm"] + expect(tokens[5]).toEqual value: "' ", scopes: ["source.gfm"] + expect(tokens[6]).toEqual value: "@", scopes: ["source.gfm", "variable.mention.gfm"] + expect(tokens[7]).toEqual value: "name", scopes: ["source.gfm", "string.username.gfm"] + expect(tokens[8]).toEqual value: "'s ", scopes: ["source.gfm"] + expect(tokens[9]).toEqual value: "@", scopes: ["source.gfm", "variable.mention.gfm"] + expect(tokens[10]).toEqual value: "name", scopes: ["source.gfm", "string.username.gfm"] + expect(tokens[11]).toEqual value: ". ", scopes: ["source.gfm"] + expect(tokens[12]).toEqual value: "@", scopes: ["source.gfm", "variable.mention.gfm"] + expect(tokens[13]).toEqual value: "name", scopes: ["source.gfm", "string.username.gfm"] + expect(tokens[14]).toEqual value: ", (", scopes: ["source.gfm"] + expect(tokens[15]).toEqual value: "@", scopes: ["source.gfm", "variable.mention.gfm"] + expect(tokens[16]).toEqual value: "name", scopes: ["source.gfm", "string.username.gfm"] + expect(tokens[17]).toEqual value: ") [", scopes: ["source.gfm"] + expect(tokens[18]).toEqual value: "@", scopes: ["source.gfm", "variable.mention.gfm"] + expect(tokens[19]).toEqual value: "name", scopes: ["source.gfm", "string.username.gfm"] + expect(tokens[20]).toEqual value: "]", scopes: ["source.gfm"] + + {tokens} = grammar.tokenizeLine('"@name"') + expect(tokens[0]).toEqual value: '"', scopes: ["source.gfm"] + expect(tokens[1]).toEqual value: "@", scopes: ["source.gfm", "variable.mention.gfm"] + expect(tokens[2]).toEqual value: "name", scopes: ["source.gfm", "string.username.gfm"] + expect(tokens[3]).toEqual value: '"', scopes: ["source.gfm"] + + {tokens} = grammar.tokenizeLine("sentence with a space before @name/ and an invalid symbol after") + expect(tokens[0]).toEqual value: "sentence with a space before @name/ and an invalid symbol after", scopes: ["source.gfm"] + + {tokens} = grammar.tokenizeLine("sentence with a space before @name that continues") + expect(tokens[0]).toEqual value: "sentence with a space before ", scopes: ["source.gfm"] + expect(tokens[1]).toEqual value: "@", scopes: ["source.gfm", "variable.mention.gfm"] + expect(tokens[2]).toEqual value: "name", scopes: ["source.gfm", "string.username.gfm"] + expect(tokens[3]).toEqual value: " that continues", scopes: ["source.gfm"] + + {tokens} = grammar.tokenizeLine("* @name at the start of an unordered list") + expect(tokens[0]).toEqual value: "*", scopes: ["source.gfm", "variable.unordered.list.gfm"] + expect(tokens[1]).toEqual value: " ", scopes: ["source.gfm"] + expect(tokens[2]).toEqual value: "@", scopes: ["source.gfm", "variable.mention.gfm"] + expect(tokens[3]).toEqual value: "name", scopes: ["source.gfm", "string.username.gfm"] + expect(tokens[4]).toEqual value: " at the start of an unordered list", scopes: ["source.gfm"] + + {tokens} = grammar.tokenizeLine("a username @1337_hubot with numbers, letters and underscores") + expect(tokens[0]).toEqual value: "a username ", scopes: ["source.gfm"] + expect(tokens[1]).toEqual value: "@", scopes: ["source.gfm", "variable.mention.gfm"] + expect(tokens[2]).toEqual value: "1337_hubot", scopes: ["source.gfm", "string.username.gfm"] + expect(tokens[3]).toEqual value: " with numbers, letters and underscores", scopes: ["source.gfm"] + + {tokens} = grammar.tokenizeLine("a username @1337-hubot with numbers, letters and hyphens") + expect(tokens[0]).toEqual value: "a username ", scopes: ["source.gfm"] + expect(tokens[1]).toEqual value: "@", scopes: ["source.gfm", "variable.mention.gfm"] + expect(tokens[2]).toEqual value: "1337-hubot", scopes: ["source.gfm", "string.username.gfm"] + expect(tokens[3]).toEqual value: " with numbers, letters and hyphens", scopes: ["source.gfm"] + + {tokens} = grammar.tokenizeLine("@name at the start of a line") + expect(tokens[0]).toEqual value: "@", scopes: ["source.gfm", "variable.mention.gfm"] + expect(tokens[1]).toEqual value: "name", scopes: ["source.gfm", "string.username.gfm"] + expect(tokens[2]).toEqual value: " at the start of a line", scopes: ["source.gfm"] + + {tokens} = grammar.tokenizeLine("any email like you@domain.com shouldn't mistakenly be matched as a mention") + expect(tokens[0]).toEqual value: "any email like you@domain.com shouldn't mistakenly be matched as a mention", scopes: ["source.gfm"] + + {tokens} = grammar.tokenizeLine("@person's") + expect(tokens[0]).toEqual value: "@", scopes: ["source.gfm", "variable.mention.gfm"] + expect(tokens[1]).toEqual value: "person", scopes: ["source.gfm", "string.username.gfm"] + expect(tokens[2]).toEqual value: "'s", scopes: ["source.gfm"] + + {tokens} = grammar.tokenizeLine("@person;") + expect(tokens[0]).toEqual value: "@", scopes: ["source.gfm", "variable.mention.gfm"] + expect(tokens[1]).toEqual value: "person", scopes: ["source.gfm", "string.username.gfm"] + expect(tokens[2]).toEqual value: ";", scopes: ["source.gfm"] + + it "tokenizes issue numbers", -> + {tokens} = grammar.tokenizeLine("sentence with no space before#12 ") + expect(tokens[0]).toEqual value: "sentence with no space before#12 ", scopes: ["source.gfm"] + + {tokens} = grammar.tokenizeLine(" #101 '#101' #101's #101. #101, (#101) [#101]") + expect(tokens[1]).toEqual value: "#", scopes: ["source.gfm", "variable.issue.tag.gfm"] + expect(tokens[2]).toEqual value: "101", scopes: ["source.gfm", "string.issue.number.gfm"] + expect(tokens[3]).toEqual value: " '", scopes: ["source.gfm"] + expect(tokens[4]).toEqual value: "#", scopes: ["source.gfm", "variable.issue.tag.gfm"] + expect(tokens[5]).toEqual value: "101", scopes: ["source.gfm", "string.issue.number.gfm"] + expect(tokens[6]).toEqual value: "' ", scopes: ["source.gfm"] + expect(tokens[7]).toEqual value: "#", scopes: ["source.gfm", "variable.issue.tag.gfm"] + expect(tokens[8]).toEqual value: "101", scopes: ["source.gfm", "string.issue.number.gfm"] + expect(tokens[9]).toEqual value: "'s ", scopes: ["source.gfm"] + expect(tokens[10]).toEqual value: "#", scopes: ["source.gfm", "variable.issue.tag.gfm"] + expect(tokens[11]).toEqual value: "101", scopes: ["source.gfm", "string.issue.number.gfm"] + expect(tokens[12]).toEqual value: ". ", scopes: ["source.gfm"] + expect(tokens[13]).toEqual value: "#", scopes: ["source.gfm", "variable.issue.tag.gfm"] + expect(tokens[14]).toEqual value: "101", scopes: ["source.gfm", "string.issue.number.gfm"] + expect(tokens[15]).toEqual value: ", (", scopes: ["source.gfm"] + expect(tokens[16]).toEqual value: "#", scopes: ["source.gfm", "variable.issue.tag.gfm"] + expect(tokens[17]).toEqual value: "101", scopes: ["source.gfm", "string.issue.number.gfm"] + expect(tokens[18]).toEqual value: ") [", scopes: ["source.gfm"] + expect(tokens[19]).toEqual value: "#", scopes: ["source.gfm", "variable.issue.tag.gfm"] + expect(tokens[20]).toEqual value: "101", scopes: ["source.gfm", "string.issue.number.gfm"] + expect(tokens[21]).toEqual value: "]", scopes: ["source.gfm"] + + {tokens} = grammar.tokenizeLine('"#101"') + expect(tokens[0]).toEqual value: '"', scopes: ["source.gfm"] + expect(tokens[1]).toEqual value: "#", scopes: ["source.gfm", "variable.issue.tag.gfm"] + expect(tokens[2]).toEqual value: "101", scopes: ["source.gfm", "string.issue.number.gfm"] + expect(tokens[3]).toEqual value: '"', scopes: ["source.gfm"] + + {tokens} = grammar.tokenizeLine("sentence with a space before #123i and a character after") + expect(tokens[0]).toEqual value: "sentence with a space before #123i and a character after", scopes: ["source.gfm"] + + {tokens} = grammar.tokenizeLine("sentence with a space before #123 that continues") + expect(tokens[0]).toEqual value: "sentence with a space before ", scopes: ["source.gfm"] + expect(tokens[1]).toEqual value: "#", scopes: ["source.gfm", "variable.issue.tag.gfm"] + expect(tokens[2]).toEqual value: "123", scopes: ["source.gfm", "string.issue.number.gfm"] + expect(tokens[3]).toEqual value: " that continues", scopes: ["source.gfm"] + + {tokens} = grammar.tokenizeLine(" #123's") + expect(tokens[1]).toEqual value: "#", scopes: ["source.gfm", "variable.issue.tag.gfm"] + expect(tokens[2]).toEqual value: "123", scopes: ["source.gfm", "string.issue.number.gfm"] + expect(tokens[3]).toEqual value: "'s", scopes: ["source.gfm"] + + it "tokenizes unordered lists", -> + {tokens} = grammar.tokenizeLine("*Item 1") + expect(tokens[0]).not.toEqual value: "*Item 1", scopes: ["source.gfm", "variable.unordered.list.gfm"] + + {tokens} = grammar.tokenizeLine(" * Item 1") + expect(tokens[0]).toEqual value: " ", scopes: ["source.gfm"] + expect(tokens[1]).toEqual value: "*", scopes: ["source.gfm", "variable.unordered.list.gfm"] + expect(tokens[2]).toEqual value: " ", scopes: ["source.gfm"] + expect(tokens[3]).toEqual value: "Item 1", scopes: ["source.gfm"] + + {tokens} = grammar.tokenizeLine(" + Item 2") + expect(tokens[0]).toEqual value: " ", scopes: ["source.gfm"] + expect(tokens[1]).toEqual value: "+", scopes: ["source.gfm", "variable.unordered.list.gfm"] + expect(tokens[2]).toEqual value: " ", scopes: ["source.gfm"] + expect(tokens[3]).toEqual value: "Item 2", scopes: ["source.gfm"] + + {tokens} = grammar.tokenizeLine(" - Item 3") + expect(tokens[0]).toEqual value: " ", scopes: ["source.gfm"] + expect(tokens[1]).toEqual value: "-", scopes: ["source.gfm", "variable.unordered.list.gfm"] + expect(tokens[2]).toEqual value: " ", scopes: ["source.gfm"] + expect(tokens[3]).toEqual value: "Item 3", scopes: ["source.gfm"] + + it "tokenizes ordered lists", -> + {tokens} = grammar.tokenizeLine("1.First Item") + expect(tokens[0]).toEqual value: "1.First Item", scopes: ["source.gfm"] + + {tokens} = grammar.tokenizeLine(" 1. First Item") + expect(tokens[0]).toEqual value: " ", scopes: ["source.gfm"] + expect(tokens[1]).toEqual value: "1.", scopes: ["source.gfm", "variable.ordered.list.gfm"] + expect(tokens[2]).toEqual value: " ", scopes: ["source.gfm"] + expect(tokens[3]).toEqual value: "First Item", scopes: ["source.gfm"] + + {tokens} = grammar.tokenizeLine(" 10. Tenth Item") + expect(tokens[0]).toEqual value: " ", scopes: ["source.gfm"] + expect(tokens[1]).toEqual value: "10.", scopes: ["source.gfm", "variable.ordered.list.gfm"] + expect(tokens[2]).toEqual value: " ", scopes: ["source.gfm"] + expect(tokens[3]).toEqual value: "Tenth Item", scopes: ["source.gfm"] + + {tokens} = grammar.tokenizeLine(" 111. Hundred and eleventh item") + expect(tokens[0]).toEqual value: " ", scopes: ["source.gfm"] + expect(tokens[1]).toEqual value: "111.", scopes: ["source.gfm", "variable.ordered.list.gfm"] + expect(tokens[2]).toEqual value: " ", scopes: ["source.gfm"] + expect(tokens[3]).toEqual value: "Hundred and eleventh item", scopes: ["source.gfm"] + + it "tokenizes > quoted text", -> + {tokens} = grammar.tokenizeLine("> Quotation :+1:") + expect(tokens[0]).toEqual value: ">", scopes: ["source.gfm", "comment.quote.gfm", "support.quote.gfm"] + expect(tokens[1]).toEqual value: " Quotation :+1:", scopes: ["source.gfm", "comment.quote.gfm"] + + it "tokenizes HTML entities", -> + {tokens} = grammar.tokenizeLine("™ ™ &a1; ³") + expect(tokens[0]).toEqual value: "&", scopes: ["source.gfm", "constant.character.entity.gfm", "punctuation.definition.entity.gfm"] + expect(tokens[1]).toEqual value: "trade", scopes: ["source.gfm", "constant.character.entity.gfm"] + expect(tokens[2]).toEqual value: ";", scopes: ["source.gfm", "constant.character.entity.gfm", "punctuation.definition.entity.gfm"] + + expect(tokens[3]).toEqual value: " ", scopes: ["source.gfm"] + + expect(tokens[4]).toEqual value: "&", scopes: ["source.gfm", "constant.character.entity.gfm", "punctuation.definition.entity.gfm"] + expect(tokens[5]).toEqual value: "#8482", scopes: ["source.gfm", "constant.character.entity.gfm"] + expect(tokens[6]).toEqual value: ";", scopes: ["source.gfm", "constant.character.entity.gfm", "punctuation.definition.entity.gfm"] + + expect(tokens[7]).toEqual value: " ", scopes: ["source.gfm"] + + expect(tokens[8]).toEqual value: "&", scopes: ["source.gfm", "constant.character.entity.gfm", "punctuation.definition.entity.gfm"] + expect(tokens[9]).toEqual value: "a1", scopes: ["source.gfm", "constant.character.entity.gfm"] + expect(tokens[10]).toEqual value: ";", scopes: ["source.gfm", "constant.character.entity.gfm", "punctuation.definition.entity.gfm"] + + expect(tokens[11]).toEqual value: " ", scopes: ["source.gfm"] + + expect(tokens[12]).toEqual value: "&", scopes: ["source.gfm", "constant.character.entity.gfm", "punctuation.definition.entity.gfm"] + expect(tokens[13]).toEqual value: "#xb3", scopes: ["source.gfm", "constant.character.entity.gfm"] + expect(tokens[14]).toEqual value: ";", scopes: ["source.gfm", "constant.character.entity.gfm", "punctuation.definition.entity.gfm"] + + it "tokenizes HTML entities in *italic* text", -> + {tokens} = grammar.tokenizeLine("*™ ™ ³*") + expect(tokens[0]).toEqual value: "*", scopes: [ 'source.gfm', 'markup.italic.gfm', 'punctuation.definition.entity.gfm' ] + expect(tokens[1]).toEqual value: "&", scopes: ["source.gfm", "markup.italic.gfm", "constant.character.entity.gfm", "punctuation.definition.entity.gfm"] + expect(tokens[2]).toEqual value: "trade", scopes: ["source.gfm", "markup.italic.gfm", "constant.character.entity.gfm"] + expect(tokens[3]).toEqual value: ";", scopes: ["source.gfm", "markup.italic.gfm", "constant.character.entity.gfm", "punctuation.definition.entity.gfm"] + expect(tokens[4]).toEqual value: " ", scopes: ["source.gfm", "markup.italic.gfm"] + expect(tokens[5]).toEqual value: "&", scopes: ["source.gfm", "markup.italic.gfm", "constant.character.entity.gfm", "punctuation.definition.entity.gfm"] + expect(tokens[6]).toEqual value: "#8482", scopes: ["source.gfm", "markup.italic.gfm", "constant.character.entity.gfm"] + expect(tokens[7]).toEqual value: ";", scopes: ["source.gfm", "markup.italic.gfm", "constant.character.entity.gfm", "punctuation.definition.entity.gfm"] + expect(tokens[8]).toEqual value: " ", scopes: ["source.gfm", "markup.italic.gfm"] + expect(tokens[9]).toEqual value: "&", scopes: ["source.gfm", "markup.italic.gfm", "constant.character.entity.gfm", "punctuation.definition.entity.gfm"] + expect(tokens[10]).toEqual value: "#xb3", scopes: ["source.gfm", "markup.italic.gfm", "constant.character.entity.gfm"] + expect(tokens[11]).toEqual value: ";", scopes: ["source.gfm", "markup.italic.gfm", "constant.character.entity.gfm", "punctuation.definition.entity.gfm"] + expect(tokens[12]).toEqual value: "*", scopes: [ 'source.gfm', 'markup.italic.gfm', 'punctuation.definition.entity.gfm' ] + + {tokens} = grammar.tokenizeLine("_™ ™ ³_") + expect(tokens[0]).toEqual value: "_", scopes: [ 'source.gfm', 'markup.italic.gfm', 'punctuation.definition.entity.gfm' ] + expect(tokens[1]).toEqual value: "&", scopes: ["source.gfm", "markup.italic.gfm", "constant.character.entity.gfm", "punctuation.definition.entity.gfm"] + expect(tokens[2]).toEqual value: "trade", scopes: ["source.gfm", "markup.italic.gfm", "constant.character.entity.gfm"] + expect(tokens[3]).toEqual value: ";", scopes: ["source.gfm", "markup.italic.gfm", "constant.character.entity.gfm", "punctuation.definition.entity.gfm"] + expect(tokens[4]).toEqual value: " ", scopes: ["source.gfm", "markup.italic.gfm"] + expect(tokens[5]).toEqual value: "&", scopes: ["source.gfm", "markup.italic.gfm", "constant.character.entity.gfm", "punctuation.definition.entity.gfm"] + expect(tokens[6]).toEqual value: "#8482", scopes: ["source.gfm", "markup.italic.gfm", "constant.character.entity.gfm"] + expect(tokens[7]).toEqual value: ";", scopes: ["source.gfm", "markup.italic.gfm", "constant.character.entity.gfm", "punctuation.definition.entity.gfm"] + expect(tokens[8]).toEqual value: " ", scopes: ["source.gfm", "markup.italic.gfm"] + expect(tokens[9]).toEqual value: "&", scopes: ["source.gfm", "markup.italic.gfm", "constant.character.entity.gfm", "punctuation.definition.entity.gfm"] + expect(tokens[10]).toEqual value: "#xb3", scopes: ["source.gfm", "markup.italic.gfm", "constant.character.entity.gfm"] + expect(tokens[11]).toEqual value: ";", scopes: ["source.gfm", "markup.italic.gfm", "constant.character.entity.gfm", "punctuation.definition.entity.gfm"] + expect(tokens[12]).toEqual value: "_", scopes: [ 'source.gfm', 'markup.italic.gfm', 'punctuation.definition.entity.gfm' ] + + it "tokenizes HTML entities in **bold** text", -> + {tokens} = grammar.tokenizeLine("**™ ™ ³**") + expect(tokens[0]).toEqual value: "**", scopes: ["source.gfm", "markup.bold.gfm", "punctuation.definition.entity.gfm"] + expect(tokens[1]).toEqual value: "&", scopes: ["source.gfm", "markup.bold.gfm", "constant.character.entity.gfm", "punctuation.definition.entity.gfm"] + expect(tokens[2]).toEqual value: "trade", scopes: ["source.gfm", "markup.bold.gfm", "constant.character.entity.gfm"] + expect(tokens[3]).toEqual value: ";", scopes: ["source.gfm", "markup.bold.gfm", "constant.character.entity.gfm", "punctuation.definition.entity.gfm"] + expect(tokens[4]).toEqual value: " ", scopes: ["source.gfm", "markup.bold.gfm"] + expect(tokens[5]).toEqual value: "&", scopes: ["source.gfm", "markup.bold.gfm", "constant.character.entity.gfm", "punctuation.definition.entity.gfm"] + expect(tokens[6]).toEqual value: "#8482", scopes: ["source.gfm", "markup.bold.gfm", "constant.character.entity.gfm"] + expect(tokens[7]).toEqual value: ";", scopes: ["source.gfm", "markup.bold.gfm", "constant.character.entity.gfm", "punctuation.definition.entity.gfm"] + expect(tokens[8]).toEqual value: " ", scopes: ["source.gfm", "markup.bold.gfm"] + expect(tokens[9]).toEqual value: "&", scopes: ["source.gfm", "markup.bold.gfm", "constant.character.entity.gfm", "punctuation.definition.entity.gfm"] + expect(tokens[10]).toEqual value: "#xb3", scopes: ["source.gfm", "markup.bold.gfm", "constant.character.entity.gfm"] + expect(tokens[11]).toEqual value: ";", scopes: ["source.gfm", "markup.bold.gfm", "constant.character.entity.gfm", "punctuation.definition.entity.gfm"] + expect(tokens[12]).toEqual value: "**", scopes: ["source.gfm", "markup.bold.gfm", "punctuation.definition.entity.gfm"] + + {tokens} = grammar.tokenizeLine("__™ ™ ³__") + expect(tokens[0]).toEqual value: "__", scopes: ["source.gfm", "markup.bold.gfm", "punctuation.definition.entity.gfm"] + expect(tokens[1]).toEqual value: "&", scopes: ["source.gfm", "markup.bold.gfm", "constant.character.entity.gfm", "punctuation.definition.entity.gfm"] + expect(tokens[2]).toEqual value: "trade", scopes: ["source.gfm", "markup.bold.gfm", "constant.character.entity.gfm"] + expect(tokens[3]).toEqual value: ";", scopes: ["source.gfm", "markup.bold.gfm", "constant.character.entity.gfm", "punctuation.definition.entity.gfm"] + expect(tokens[4]).toEqual value: " ", scopes: ["source.gfm", "markup.bold.gfm"] + expect(tokens[5]).toEqual value: "&", scopes: ["source.gfm", "markup.bold.gfm", "constant.character.entity.gfm", "punctuation.definition.entity.gfm"] + expect(tokens[6]).toEqual value: "#8482", scopes: ["source.gfm", "markup.bold.gfm", "constant.character.entity.gfm"] + expect(tokens[7]).toEqual value: ";", scopes: ["source.gfm", "markup.bold.gfm", "constant.character.entity.gfm", "punctuation.definition.entity.gfm"] + expect(tokens[8]).toEqual value: " ", scopes: ["source.gfm", "markup.bold.gfm"] + expect(tokens[9]).toEqual value: "&", scopes: ["source.gfm", "markup.bold.gfm", "constant.character.entity.gfm", "punctuation.definition.entity.gfm"] + expect(tokens[10]).toEqual value: "#xb3", scopes: ["source.gfm", "markup.bold.gfm", "constant.character.entity.gfm"] + expect(tokens[11]).toEqual value: ";", scopes: ["source.gfm", "markup.bold.gfm", "constant.character.entity.gfm", "punctuation.definition.entity.gfm"] + expect(tokens[12]).toEqual value: "__", scopes: ["source.gfm", "markup.bold.gfm", "punctuation.definition.entity.gfm"] + + it "tokenizes HTML entities in ***bold italic*** text", -> + {tokens} = grammar.tokenizeLine("***™ ™ ³***") + expect(tokens[0]).toEqual value: "***", scopes: ["source.gfm", "markup.bold.italic.gfm"] + expect(tokens[1]).toEqual value: "&", scopes: ["source.gfm", "markup.bold.italic.gfm", "constant.character.entity.gfm", "punctuation.definition.entity.gfm"] + expect(tokens[2]).toEqual value: "trade", scopes: [ "source.gfm", "markup.bold.italic.gfm", "constant.character.entity.gfm", "punctuation.definition.entity.gfm" ] + expect(tokens[3]).toEqual value: ";", scopes: ["source.gfm", "markup.bold.italic.gfm", "constant.character.entity.gfm"] + expect(tokens[4]).toEqual value: " ", scopes: ["source.gfm", "markup.bold.italic.gfm"] + expect(tokens[5]).toEqual value: "&", scopes: ["source.gfm", "markup.bold.italic.gfm", "constant.character.entity.gfm", "punctuation.definition.entity.gfm"] + expect(tokens[6]).toEqual value: "#8482", scopes: [ "source.gfm", "markup.bold.italic.gfm", "constant.character.entity.gfm", "punctuation.definition.entity.gfm" ] + expect(tokens[7]).toEqual value: ";", scopes: ["source.gfm", "markup.bold.italic.gfm", "constant.character.entity.gfm"] + expect(tokens[8]).toEqual value: " ", scopes: ["source.gfm", "markup.bold.italic.gfm"] + expect(tokens[9]).toEqual value: "&", scopes: ["source.gfm", "markup.bold.italic.gfm", "constant.character.entity.gfm", "punctuation.definition.entity.gfm"] + expect(tokens[10]).toEqual value: "#xb3", scopes: ["source.gfm", "markup.bold.italic.gfm", "constant.character.entity.gfm", "punctuation.definition.entity.gfm"] + expect(tokens[11]).toEqual value: ";", scopes: ["source.gfm", "markup.bold.italic.gfm", "constant.character.entity.gfm"] + expect(tokens[12]).toEqual value: "***", scopes: ["source.gfm", "markup.bold.italic.gfm"] + + {tokens} = grammar.tokenizeLine("___™ ™ ³___") + expect(tokens[0]).toEqual value: "___", scopes: ["source.gfm", "markup.bold.italic.gfm"] + expect(tokens[1]).toEqual value: "&", scopes: ["source.gfm", "markup.bold.italic.gfm", "constant.character.entity.gfm", "punctuation.definition.entity.gfm"] + expect(tokens[2]).toEqual value: "trade", scopes: ["source.gfm", "markup.bold.italic.gfm", "constant.character.entity.gfm", "punctuation.definition.entity.gfm"] + expect(tokens[3]).toEqual value: ";", scopes: ["source.gfm", "markup.bold.italic.gfm", "constant.character.entity.gfm"] + expect(tokens[4]).toEqual value: " ", scopes: ["source.gfm", "markup.bold.italic.gfm"] + expect(tokens[5]).toEqual value: "&", scopes: ["source.gfm", "markup.bold.italic.gfm", "constant.character.entity.gfm", "punctuation.definition.entity.gfm"] + expect(tokens[6]).toEqual value: "#8482", scopes: ["source.gfm", "markup.bold.italic.gfm", "constant.character.entity.gfm", "punctuation.definition.entity.gfm"] + expect(tokens[7]).toEqual value: ";", scopes: ["source.gfm", "markup.bold.italic.gfm", "constant.character.entity.gfm"] + expect(tokens[8]).toEqual value: " ", scopes: ["source.gfm", "markup.bold.italic.gfm"] + expect(tokens[9]).toEqual value: "&", scopes: ["source.gfm", "markup.bold.italic.gfm", "constant.character.entity.gfm", "punctuation.definition.entity.gfm"] + expect(tokens[10]).toEqual value: "#xb3", scopes: ["source.gfm", "markup.bold.italic.gfm", "constant.character.entity.gfm", "punctuation.definition.entity.gfm"] + expect(tokens[11]).toEqual value: ";", scopes: ["source.gfm", "markup.bold.italic.gfm", "constant.character.entity.gfm"] + expect(tokens[12]).toEqual value: "___", scopes: ["source.gfm", "markup.bold.italic.gfm"] + + it "tokenizes HTML entities in strikethrough text", -> + {tokens} = grammar.tokenizeLine("~~™ ™ ³~~") + expect(tokens[0]).toEqual value: "~~", scopes: ["source.gfm", "markup.strike.gfm"] + expect(tokens[1]).toEqual value: "&", scopes: ["source.gfm", "markup.strike.gfm", "constant.character.entity.gfm", "punctuation.definition.entity.gfm"] + expect(tokens[2]).toEqual value: "trade", scopes: ["source.gfm", "markup.strike.gfm", "constant.character.entity.gfm"] + expect(tokens[3]).toEqual value: ";", scopes: ["source.gfm", "markup.strike.gfm", "constant.character.entity.gfm", "punctuation.definition.entity.gfm"] + expect(tokens[4]).toEqual value: " ", scopes: ["source.gfm", "markup.strike.gfm"] + expect(tokens[5]).toEqual value: "&", scopes: ["source.gfm", "markup.strike.gfm", "constant.character.entity.gfm", "punctuation.definition.entity.gfm"] + expect(tokens[6]).toEqual value: "#8482", scopes: ["source.gfm", "markup.strike.gfm", "constant.character.entity.gfm"] + expect(tokens[7]).toEqual value: ";", scopes: ["source.gfm", "markup.strike.gfm", "constant.character.entity.gfm", "punctuation.definition.entity.gfm"] + expect(tokens[8]).toEqual value: " ", scopes: ["source.gfm", "markup.strike.gfm"] + expect(tokens[9]).toEqual value: "&", scopes: ["source.gfm", "markup.strike.gfm", "constant.character.entity.gfm", "punctuation.definition.entity.gfm"] + expect(tokens[10]).toEqual value: "#xb3", scopes: ["source.gfm", "markup.strike.gfm", "constant.character.entity.gfm"] + expect(tokens[11]).toEqual value: ";", scopes: ["source.gfm", "markup.strike.gfm", "constant.character.entity.gfm", "punctuation.definition.entity.gfm"] + expect(tokens[12]).toEqual value: "~~", scopes: ["source.gfm", "markup.strike.gfm"] + + it "tokenizes HTML comments", -> + {tokens} = grammar.tokenizeLine("<!-- a comment -->") + expect(tokens[0]).toEqual value: "<!--", scopes: ["source.gfm", "comment.block.gfm", "punctuation.definition.comment.gfm"] + expect(tokens[1]).toEqual value: " a comment ", scopes: ["source.gfm", "comment.block.gfm"] + expect(tokens[2]).toEqual value: "-->", scopes: ["source.gfm", "comment.block.gfm", "punctuation.definition.comment.gfm"] + + it "tokenizes YAML front matter", -> + [firstLineTokens, secondLineTokens, thirdLineTokens] = grammar.tokenizeLines """ + --- + front: matter + --- + """ + + expect(firstLineTokens[0]).toEqual value: "---", scopes: ["source.gfm", "front-matter.yaml.gfm", "comment.hr.gfm"] + expect(secondLineTokens[0]).toEqual value: "front: matter", scopes: ["source.gfm", "front-matter.yaml.gfm"] + expect(thirdLineTokens[0]).toEqual value: "---", scopes: ["source.gfm", "front-matter.yaml.gfm", "comment.hr.gfm"] + + it "tokenizes linebreaks", -> + {tokens} = grammar.tokenizeLine("line ") + expect(tokens[0]).toEqual value: "line", scopes: ["source.gfm"] + expect(tokens[1]).toEqual value: " ", scopes: ["source.gfm", "linebreak.gfm"] + + it "tokenizes tables", -> + [headerTokens, alignTokens, contentTokens] = grammar.tokenizeLines """ + | Column 1 | Column 2 | + |:----------|:---------:| + | Content 1 | Content 2 | + """ + + # Header line + expect(headerTokens[0]).toEqual value: "|", scopes: ["source.gfm", "table.gfm", "border.pipe.outer"] + expect(headerTokens[1]).toEqual value: " Column 1 ", scopes: ["source.gfm", "table.gfm"] + expect(headerTokens[2]).toEqual value: "|", scopes: ["source.gfm", "table.gfm", "border.pipe.inner"] + expect(headerTokens[3]).toEqual value: " Column 2 ", scopes: ["source.gfm", "table.gfm"] + expect(headerTokens[4]).toEqual value: "|", scopes: ["source.gfm", "table.gfm", "border.pipe.outer"] + + # Alignment line + expect(alignTokens[0]).toEqual value: "|", scopes: ["source.gfm", "table.gfm", "border.pipe.outer"] + expect(alignTokens[1]).toEqual value: ":", scopes: ["source.gfm", "table.gfm", "border.alignment"] + expect(alignTokens[2]).toEqual value: "----------", scopes: ["source.gfm", "table.gfm", "border.header"] + expect(alignTokens[3]).toEqual value: "|", scopes: ["source.gfm", "table.gfm", "border.pipe.inner"] + expect(alignTokens[4]).toEqual value: ":", scopes: ["source.gfm", "table.gfm", "border.alignment"] + expect(alignTokens[5]).toEqual value: "---------", scopes: ["source.gfm", "table.gfm", "border.header"] + expect(alignTokens[6]).toEqual value: ":", scopes: ["source.gfm", "table.gfm", "border.alignment"] + expect(alignTokens[7]).toEqual value: "|", scopes: ["source.gfm", "table.gfm", "border.pipe.outer"] + + # Content line + expect(contentTokens[0]).toEqual value: "|", scopes: ["source.gfm", "table.gfm", "border.pipe.outer"] + expect(contentTokens[1]).toEqual value: " Content 1 ", scopes: ["source.gfm", "table.gfm"] + expect(contentTokens[2]).toEqual value: "|", scopes: ["source.gfm", "table.gfm", "border.pipe.inner"] + expect(contentTokens[3]).toEqual value: " Content 2 ", scopes: ["source.gfm", "table.gfm"] + expect(contentTokens[4]).toEqual value: "|", scopes: ["source.gfm", "table.gfm", "border.pipe.outer"] + + [headerTokens, emptyLineTokens, headingTokens] = grammar.tokenizeLines """ + | Column 1 | Column 2\t + + # Heading + """ + + expect(headerTokens[0]).toEqual value: "|", scopes: ["source.gfm", "table.gfm", "border.pipe.outer"] + expect(headerTokens[1]).toEqual value: " Column 1 ", scopes: ["source.gfm", "table.gfm"] + expect(headerTokens[2]).toEqual value: "|", scopes: ["source.gfm", "table.gfm", "border.pipe.inner"] + expect(headerTokens[3]).toEqual value: " Column 2", scopes: ["source.gfm", "table.gfm"] + expect(headerTokens[4]).toEqual value: "\t", scopes: ["source.gfm", "table.gfm"] + + expect(headingTokens[0]).toEqual value: "#", scopes: ["source.gfm", "markup.heading.heading-1.gfm", "markup.heading.marker.gfm"] + expect(headingTokens[1]).toEqual value: " ", scopes: ["source.gfm", "markup.heading.heading-1.gfm", "markup.heading.space.gfm"] + expect(headingTokens[2]).toEqual value: "Heading", scopes: ["source.gfm", "markup.heading.heading-1.gfm"] + + it "tokenizes criticmarkup", -> + [addToken, delToken, hlToken, subToken] = grammar.tokenizeLines """ + Add{++ some text++} + Delete{-- some text--} + Highlight {==some text==}{>>with comment<<} + Replace {~~this~>by that~~} + """ + # Addition + expect(addToken[0]).toEqual value: "Add", scopes: ["source.gfm"] + expect(addToken[1]).toEqual value: "{++", scopes: ["source.gfm", "markup.inserted.critic.gfm.addition", "punctuation.definition.inserted.critic.gfm.addition.marker"] + expect(addToken[2]).toEqual value: " some text", scopes: ["source.gfm", "markup.inserted.critic.gfm.addition"] + expect(addToken[3]).toEqual value: "++}", scopes: ["source.gfm", "markup.inserted.critic.gfm.addition", "punctuation.definition.inserted.critic.gfm.addition.marker"] + # Deletion + expect(delToken[0]).toEqual value: "Delete", scopes: ["source.gfm"] + expect(delToken[1]).toEqual value: "{--", scopes: ["source.gfm", "markup.deleted.critic.gfm.deletion", "punctuation.definition.deleted.critic.gfm.deletion.marker"] + expect(delToken[2]).toEqual value: " some text", scopes: ["source.gfm", "markup.deleted.critic.gfm.deletion"] + expect(delToken[3]).toEqual value: "--}", scopes: ["source.gfm", "markup.deleted.critic.gfm.deletion", "punctuation.definition.deleted.critic.gfm.deletion.marker"] + # Comment and highlight + expect(hlToken[0]).toEqual value: "Highlight ", scopes: ["source.gfm"] + expect(hlToken[1]).toEqual value: "{==", scopes: ["source.gfm", "critic.gfm.highlight", "critic.gfm.highlight.marker"] + expect(hlToken[2]).toEqual value: "some text", scopes: ["source.gfm", "critic.gfm.highlight"] + expect(hlToken[3]).toEqual value: "==}", scopes: ["source.gfm", "critic.gfm.highlight", "critic.gfm.highlight.marker"] + expect(hlToken[4]).toEqual value: "{>>", scopes: ["source.gfm", "critic.gfm.comment", "critic.gfm.comment.marker"] + expect(hlToken[5]).toEqual value: "with comment", scopes: ["source.gfm", "critic.gfm.comment"] + expect(hlToken[6]).toEqual value: "<<}", scopes: ["source.gfm", "critic.gfm.comment", "critic.gfm.comment.marker"] + # Replace + expect(subToken[0]).toEqual value: "Replace ", scopes: ["source.gfm"] + expect(subToken[1]).toEqual value: "{~~", scopes: ["source.gfm", "markup.changed.critic.gfm.substitution", "punctuation.definition.changed.critic.gfm.substitution.marker"] + expect(subToken[2]).toEqual value: "this", scopes: ["source.gfm", "markup.changed.critic.gfm.substitution"] + expect(subToken[3]).toEqual value: "~>", scopes: ["source.gfm", "markup.changed.critic.gfm.substitution", "punctuation.definition.changed.critic.gfm.substitution.operator"] + expect(subToken[4]).toEqual value: "by that", scopes: ["source.gfm", "markup.changed.critic.gfm.substitution"] + expect(subToken[5]).toEqual value: "~~}", scopes: ["source.gfm", "markup.changed.critic.gfm.substitution", "punctuation.definition.changed.critic.gfm.substitution.marker"] diff --git a/packages/language-git/.coffeelintignore b/packages/language-git/.coffeelintignore new file mode 100644 index 000000000..1db51fed7 --- /dev/null +++ b/packages/language-git/.coffeelintignore @@ -0,0 +1 @@ +spec/fixtures diff --git a/packages/language-git/.github/no-response.yml b/packages/language-git/.github/no-response.yml new file mode 100644 index 000000000..1c8799d13 --- /dev/null +++ b/packages/language-git/.github/no-response.yml @@ -0,0 +1,15 @@ +# Configuration for probot-no-response - https://github.com/probot/no-response + +# Number of days of inactivity before an issue is closed for lack of response +daysUntilClose: 28 + +# Label requiring a response +responseRequiredLabel: more-information-needed + +# Comment to post when closing an issue for lack of response. Set to `false` to disable. +closeComment: > + This issue has been automatically closed because there has been no response + to our request for more information from the original author. With only the + information that is currently in the issue, we don't have enough information + to take action. Please reach out if you have or find the answers we need so + that we can investigate further. diff --git a/packages/language-git/.github/workflows/ci.yml b/packages/language-git/.github/workflows/ci.yml new file mode 100644 index 000000000..ab77c1f1f --- /dev/null +++ b/packages/language-git/.github/workflows/ci.yml @@ -0,0 +1,23 @@ +name: CI + +on: [push] + +env: + CI: true + +jobs: + Test: + strategy: + matrix: + os: [ubuntu-latest, macos-latest, windows-latest] + channel: [stable, beta] + runs-on: ${{ matrix.os }} + steps: + - uses: actions/checkout@v1 + - uses: UziTech/action-setup-atom@v2 + with: + version: ${{ matrix.channel }} + - name: Install dependencies + run: apm install + - name: Run tests + run: atom --test spec diff --git a/packages/language-git/.gitignore b/packages/language-git/.gitignore new file mode 100644 index 000000000..148f149e5 --- /dev/null +++ b/packages/language-git/.gitignore @@ -0,0 +1,4 @@ +/scratchpad.rb +/Support/DEBUG +/Support/log +node_modules diff --git a/packages/language-git/CONTRIBUTING.md b/packages/language-git/CONTRIBUTING.md new file mode 100644 index 000000000..0fd0ad696 --- /dev/null +++ b/packages/language-git/CONTRIBUTING.md @@ -0,0 +1 @@ +See the [Atom contributing guide](https://github.com/atom/atom/blob/master/CONTRIBUTING.md) diff --git a/packages/language-git/ISSUE_TEMPLATE.md b/packages/language-git/ISSUE_TEMPLATE.md new file mode 100644 index 000000000..b60bb86c9 --- /dev/null +++ b/packages/language-git/ISSUE_TEMPLATE.md @@ -0,0 +1,40 @@ +<!-- + +Have you read Atom's Code of Conduct? By filing an Issue, you are expected to comply with it, including treating everyone with respect: https://github.com/atom/atom/blob/master/CODE_OF_CONDUCT.md + +Do you want to ask a question? Are you looking for support? The Atom message board is the best place for getting support: https://discuss.atom.io + +--> + +### Prerequisites + +* [ ] Put an X between the brackets on this line if you have done all of the following: + * Reproduced the problem in Safe Mode: http://flight-manual.atom.io/hacking-atom/sections/debugging/#using-safe-mode + * Followed all applicable steps in the debugging guide: http://flight-manual.atom.io/hacking-atom/sections/debugging/ + * Checked the FAQs on the message board for common solutions: https://discuss.atom.io/c/faq + * Checked that your issue isn't already filed: https://github.com/issues?utf8=✓&q=is%3Aissue+user%3Aatom + * Checked that there is not already an Atom package that provides the described functionality: https://atom.io/packages + +### Description + +[Description of the issue] + +### Steps to Reproduce + +1. [First Step] +2. [Second Step] +3. [and so on...] + +**Expected behavior:** [What you expect to happen] + +**Actual behavior:** [What actually happens] + +**Reproduces how often:** [What percentage of the time does it reproduce?] + +### Versions + +You can get this information from copy and pasting the output of `atom --version` and `apm --version` from the command line. Also, please include the OS and what version of the OS you're running. + +### Additional Information + +Any additional information, configuration or data that might be necessary to reproduce the issue. diff --git a/packages/language-git/LICENSE.md b/packages/language-git/LICENSE.md new file mode 100644 index 000000000..98c7a866f --- /dev/null +++ b/packages/language-git/LICENSE.md @@ -0,0 +1,47 @@ +Copyright (c) 2014 GitHub Inc. + +Permission is hereby granted, free of charge, to any person obtaining +a copy of this software and associated documentation files (the +"Software"), to deal in the Software without restriction, including +without limitation the rights to use, copy, modify, merge, publish, +distribute, sublicense, and/or sell copies of the Software, and to +permit persons to whom the Software is furnished to do so, subject to +the following conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. + +-------------------------------------------------------------------- + +This package was derived from a TextMate bundle located at +https://github.com/textmate/git.tmbundle and distributed under the following +license, located in `MIT-LICENSE`: + +Copyright (c) 2008 Tim Harper + +Permission is hereby granted, free of charge, to any person obtaining +a copy of this software and associated documentation files (the +"Software"), to deal in the Software without restriction, including +without limitation the rights to use, copy, modify, merge, publish, +distribute, sublicense, and/or sell copies of the Software, and to +permit persons to whom the Software is furnished to do so, subject to +the following conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/packages/language-git/PULL_REQUEST_TEMPLATE.md b/packages/language-git/PULL_REQUEST_TEMPLATE.md new file mode 100644 index 000000000..cdaa94a86 --- /dev/null +++ b/packages/language-git/PULL_REQUEST_TEMPLATE.md @@ -0,0 +1,28 @@ +### Requirements + +* Filling out the template is required. Any pull request that does not include enough information to be reviewed in a timely manner may be closed at the maintainers' discretion. +* All new code requires tests to ensure against regressions + +### Description of the Change + +<!-- + +We must be able to understand the design of your change from this description. If we can't get a good idea of what the code will be doing from the description here, the pull request may be closed at the maintainers' discretion. Keep in mind that the maintainer reviewing this PR may not be familiar with or have worked with the code here recently, so please walk us through the concepts. + +--> + +### Alternate Designs + +<!-- Explain what other alternates were considered and why the proposed version was selected --> + +### Benefits + +<!-- What benefits will be realized by the code change? --> + +### Possible Drawbacks + +<!-- What are the possible side-effects or negative impacts of the code change? --> + +### Applicable Issues + +<!-- Enter any applicable Issues here --> diff --git a/packages/language-git/README.md b/packages/language-git/README.md new file mode 100644 index 000000000..38a8d8663 --- /dev/null +++ b/packages/language-git/README.md @@ -0,0 +1,32 @@ +# Git editing support in Atom +[![macOS Build Status](https://travis-ci.org/atom/language-git.svg?branch=master)](https://travis-ci.org/atom/language-git) +[![Windows Build Status](https://ci.appveyor.com/api/projects/status/481319gyrr1feo8b/branch/master?svg=true)](https://ci.appveyor.com/project/Atom/language-git/branch/master) +[![Dependency Status](https://david-dm.org/atom/language-git.svg)](https://david-dm.org/atom/language-git) + +Adds syntax highlighting to Git commit, merge, and rebase messages edited in Atom. + +You can configure Atom to be your Git editor with the following command: + +```sh +git config --global core.editor "atom --wait" +``` + +## Commit message highlighting + +This package uses warning and error highlighting to help bring attention to some violations of [standard conventions around commit message best practices](http://chris.beams.io/posts/git-commit/#seven-rules): + +1. If the subject line goes beyond 50 characters and again if it goes beyond 72 characters +1. If the subject line begins with a lower-case letter (emoji at the beginning of the subject line won't be highlighted) +1. If the subject line ends with a period +1. If any non-comment body line goes beyond 72 characters + +## Diff highlighting + +If [language-diff](https://atom.io/packages/language-diff) is installed, the +diff part of `git commit --verbose` messages is highlighted as well. + +## Background + +Originally [converted](http://flight-manual.atom.io/hacking-atom/sections/converting-from-textmate) from the [Git TextMate bundle](https://github.com/textmate/git.tmbundle). + +Contributions are greatly appreciated. Please fork this repository and open a pull request to add snippets, make grammar tweaks, etc. diff --git a/packages/language-git/coffeelint.json b/packages/language-git/coffeelint.json new file mode 100644 index 000000000..a5dd715e3 --- /dev/null +++ b/packages/language-git/coffeelint.json @@ -0,0 +1,37 @@ +{ + "max_line_length": { + "level": "ignore" + }, + "no_empty_param_list": { + "level": "error" + }, + "arrow_spacing": { + "level": "error" + }, + "no_interpolation_in_single_quotes": { + "level": "error" + }, + "no_debugger": { + "level": "error" + }, + "prefer_english_operator": { + "level": "error" + }, + "colon_assignment_spacing": { + "spacing": { + "left": 0, + "right": 1 + }, + "level": "error" + }, + "braces_spacing": { + "spaces": 0, + "level": "error" + }, + "spacing_after_comma": { + "level": "error" + }, + "no_stand_alone_at": { + "level": "error" + } +} diff --git a/packages/language-git/grammars/git commit message.cson b/packages/language-git/grammars/git commit message.cson new file mode 100644 index 000000000..4d6b16efe --- /dev/null +++ b/packages/language-git/grammars/git commit message.cson @@ -0,0 +1,141 @@ +'name': 'Git Commit Message' +'scopeName': 'text.git-commit' +'fileTypes': [ + 'COMMIT_EDITMSG' + 'MERGE_MSG' +] +'foldingStartMarker': '^\\+\\+\\+' +'foldingStopMarker': '^---' +'patterns': [ + { + 'begin': '\\A(?!#)' + 'end': '^(?<!\\A)(?=# Please enter the commit message)' + 'name': 'meta.scope.message.git-commit' + 'patterns': [ + { + 'match': '\\G((fixup|squash)!)' + 'captures': + '1': + 'name': 'keyword.other.$2.git-commit' + } + { + 'match': '^#.*$' + 'name': 'comment.line.number-sign.git-commit' + } + { + # Subject line 0-50 chars + 'match': '\\A(?!#)(([a-z])|.).{0,48}((\\.)|.)$' + 'captures': + '2': + 'name': 'invalid.illegal.first-char-should-be-uppercase.git-commit' + '4': + 'name': 'invalid.illegal.subject-no-trailing-period.git-commit' + } + { + # Subject line 51-72 chars + 'match': '\\A(?!#)(([a-z])|.).{49}(.{0,21})((\\.)|(.))$' + 'captures': + '2': + 'name': 'invalid.illegal.first-char-should-be-uppercase.git-commit' + '3': + 'name': 'invalid.deprecated.line-too-long.git-commit' + '5': + 'name': 'invalid.illegal.subject-no-trailing-period.git-commit' + '6': + 'name': 'invalid.deprecated.line-too-long.git-commit' + } + { + # Subject line 73 chars or longer + 'match': '\\A(?!#)(([a-z])|.).{49}(.{0,22})(.*?)(\\.?)$' + 'captures': + '2': + 'name': 'invalid.illegal.first-char-should-be-uppercase.git-commit' + '3': + 'name': 'invalid.deprecated.line-too-long.git-commit' + '4': + 'name': 'invalid.illegal.line-too-long.git-commit' + '5': + 'name': 'invalid.illegal.subject-no-trailing-period.git-commit' + } + { + # Body line 73 chars or longer + 'match': '^(?!#).{72}(.+)' + 'captures': + '1': + 'name': 'invalid.illegal.line-too-long.git-commit' + } + ] + } + { + 'begin': '(?<=^)(?=# Please enter the commit message)' + 'end': '\\z' + 'name': 'meta.scope.metadata.git-commit' + 'patterns': [ + { + 'include': '#metadata' + } + ] + } +] +'repository': + 'metadata': + 'patterns': [ + { + 'begin': '(?=^# Changes to be committed:)' + 'end': '(?!\\G)((?=^# \\w)|(?!^#))' + 'patterns': [ + { + 'begin': '(^[ \\t]+)?(?=#)' + 'beginCaptures': + '1': + 'name': 'punctuation.whitespace.comment.leading.git-commit' + 'contentName': 'comment.line.number-sign.git-commit' + 'end': '(?!\\G)^' + 'patterns': [ + { + 'match': '\\G#' + 'name': 'punctuation.definition.comment.git-commit' + } + { + 'match': '((modified|renamed):.*)$\\n?' + 'name': 'markup.changed.git-commit' + } + { + 'match': '(new file:.*)$\\n?' + 'name': 'markup.inserted.git-commit' + } + { + 'match': '(deleted:.*)$\\n?' + 'name': 'markup.deleted.git-commit' + } + ] + } + ] + } + { + 'begin': '^(?=#)' + 'beginCaptures': + '1': + 'name': 'punctuation.whitespace.comment.leading.git-commit' + 'contentName': 'comment.line.number-sign.git-commit' + 'end': '(?!\\G)^' + 'patterns': [ + { + 'match': '\\G#' + 'name': 'punctuation.definition.comment.git-commit' + } + ] + } + { + 'begin': '(?=diff\\ \\-\\-git)' + 'comment': 'diff presented at the end of the commit message when using commit -v.' + 'contentName': 'source.diff' + 'end': '\\z' + 'name': 'meta.embedded.diff.git-commit' + 'patterns': [ + { + 'include': 'source.diff' + } + ] + } + ] diff --git a/packages/language-git/grammars/git config.cson b/packages/language-git/grammars/git config.cson new file mode 100644 index 000000000..afb74c5d8 --- /dev/null +++ b/packages/language-git/grammars/git config.cson @@ -0,0 +1,99 @@ +'name': 'Git Config' +'scopeName': 'source.git-config' +'fileTypes': [ + '.git/config' + 'gitconfig' + 'gitmodules' +] +'patterns': [ + { + 'include': '#section' + } + { + 'include': '#comment' + } +] +'repository': + 'boolean': + 'match': '\\b(?i:yes|no|0|1|true|false)\\b' + 'name': 'constant.language.boolean.git-config' + 'comment': + 'captures': + '1': + 'name': 'comment.line.number-sign.git-config' + '2': + 'name': 'punctuation.definition.comment.git-config' + '3': + 'name': 'comment.line.semi-colon.git-config' + '4': + 'name': 'punctuation.definition.comment.git-config' + 'match': '((#).*$\\n?)|((;).*$\\n?)' + 'section': + 'begin': '\\[\\s*([\\w_-]+)(?:\\s+((")(?:[^"\\\\]|\\\\["\\\\])*("))|\\.([\\w_-]+))?\\s*\\]' + 'captures': + '1': + 'name': 'entity.name.section.git-config' + '2': + 'name': 'entity.name.section.subsection.git-config' + '3': + 'name': 'punctuation.definition.section.subsection.begin.git-config' + '4': + 'name': 'punctuation.definition.section.subsection.end.git-config' + '5': + 'name': 'entity.name.section.subsection.git-config' + 'end': '(?=\\[)' + 'name': 'meta.section.git-config' + 'patterns': [ + { + 'include': '#value_pair' + } + { + 'include': '#comment' + } + ] + 'string': + 'begin': '"' + 'beginCaptures': + '0': + 'name': 'punctuation.definition.string.begin.git-config' + 'end': '"' + 'endCaptures': + '0': + 'name': 'punctuation.definition.string.end.git-config' + 'name': 'string.quoted.double.git-config' + 'patterns': [ + { + 'match': '\\\\[ntb"\\\\]' + 'name': 'constant.character.escape.git-config' + } + { + 'match': '\\\\.' + 'name': 'invalid.illegal.unknown-escape.git-config' + } + ] + 'escaped-string': + 'match': '\\\\"' + 'name': 'constant.character.escape.git-config' + 'value_pair': + 'begin': '([-\\w]+)\\s*(=)\\s*(?!$)' + 'captures': + '1': + 'name': 'support.constant.git-config' + '2': + 'name': 'punctuation.separator.key-value.git-config' + 'end': '$|(?=[#;])' + 'name': 'meta.value-pair.section-item.git-config' + 'patterns': [ + { + 'include': '#boolean' + } + { + 'include': '#escaped-string' + } + { + 'include': '#string' + } + { + 'include': '#comment' + } + ] diff --git a/packages/language-git/grammars/git rebase message.cson b/packages/language-git/grammars/git rebase message.cson new file mode 100644 index 000000000..34a3c97cc --- /dev/null +++ b/packages/language-git/grammars/git rebase message.cson @@ -0,0 +1,38 @@ +'name': 'Git Rebase Message' +'scopeName': 'text.git-rebase' +'fileTypes': [ + 'git-rebase-todo' +] +'patterns': [ + { + 'captures': + '1': + 'name': 'punctuation.definition.comment.git-rebase' + 'match': '^\\s*(#).*$\\n?' + 'name': 'comment.line.number-sign.git-rebase' + } + { + 'captures': + '1': + 'name': 'support.function.git-rebase' + '2': + 'name': 'constant.sha.git-rebase' + '3': + 'name': 'meta.commit-message.git-rebase' + 'match': '^\\s*(pick|p|reword|r|edit|e|squash|s|fixup|f|drop|d)\\s+([0-9a-f]+)\\s+(.*)$' + 'name': 'meta.commit-command.git-rebase' + } + { + 'begin': '^\\s*(exec|x)\\s+' + 'beginCaptures': + '1': + 'name': 'support.function.git-rebase' + 'end': '$' + 'name': 'meta.exec-command.git-rebase' + 'patterns': [ + { + 'include': 'source.shell' + } + ] + } +] diff --git a/packages/language-git/package.json b/packages/language-git/package.json new file mode 100644 index 000000000..813c1c74d --- /dev/null +++ b/packages/language-git/package.json @@ -0,0 +1,21 @@ +{ + "name": "language-git", + "version": "0.19.1", + "description": "Git editing support in Atom", + "engines": { + "atom": "*", + "node": "*" + }, + "homepage": "http://atom.github.io/language-git", + "repository": { + "type": "git", + "url": "https://github.com/atom/language-git.git" + }, + "license": "MIT", + "bugs": { + "url": "https://github.com/atom/language-git/issues" + }, + "devDependencies": { + "coffeelint": "^1.10.1" + } +} diff --git a/packages/language-git/settings/language-git.cson b/packages/language-git/settings/language-git.cson new file mode 100644 index 000000000..abf6c33c6 --- /dev/null +++ b/packages/language-git/settings/language-git.cson @@ -0,0 +1,7 @@ +'.source.git-config': + 'editor': + 'commentStart': '# ' +'.text.git-commit': + 'editor': + 'foldEndPattern': '^---' + 'preferredLineLength': 72 diff --git a/packages/language-git/snippets/language-git.cson b/packages/language-git/snippets/language-git.cson new file mode 100644 index 000000000..b076d6217 --- /dev/null +++ b/packages/language-git/snippets/language-git.cson @@ -0,0 +1,8 @@ +'.text.git-commit': + 'commit-message': + 'prefix': 'comm' + 'body': """ + ${1:Subject < 50 chars} + + ${2:Body in detail} + """ diff --git a/packages/language-git/spec/git-spec.coffee b/packages/language-git/spec/git-spec.coffee new file mode 100644 index 000000000..a239ec506 --- /dev/null +++ b/packages/language-git/spec/git-spec.coffee @@ -0,0 +1,196 @@ +describe "Git grammars", -> + grammar = null + + beforeEach -> + waitsForPromise -> + atom.packages.activatePackage("language-git") + + describe "Git configs", -> + beforeEach -> + grammar = atom.grammars.grammarForScopeName("source.git-config") + + it "parses the Git config grammar", -> + expect(grammar).toBeTruthy() + expect(grammar.scopeName).toBe "source.git-config" + + describe "Git commit messages", -> + scopeNormal = ['text.git-commit', 'meta.scope.message.git-commit'] + + scopeLeadingLowercase = + ['text.git-commit', 'meta.scope.message.git-commit', 'invalid.illegal.first-char-should-be-uppercase.git-commit'] + + scopeTrailingPeriod = + ['text.git-commit', 'meta.scope.message.git-commit', 'invalid.illegal.subject-no-trailing-period.git-commit'] + + scopeLineOver50 = ['text.git-commit', 'meta.scope.message.git-commit', 'invalid.deprecated.line-too-long.git-commit'] + + scopeLineOver72 = ['text.git-commit', 'meta.scope.message.git-commit', 'invalid.illegal.line-too-long.git-commit'] + + beforeEach -> + grammar = atom.grammars.grammarForScopeName("text.git-commit") + + it "parses the Git commit message grammar", -> + expect(grammar).toBeTruthy() + expect(grammar.scopeName).toBe "text.git-commit" + + it "highlights subject lines of less than 50 chars correctly", -> + {tokens} = grammar.tokenizeLine("123456789012345678901234567890", null, true) + expect(tokens[0]).toEqual value: '123456789012345678901234567890', scopes: scopeNormal + + {tokens} = grammar.tokenizeLine("a23456789012345678901234567890", null, true) + expect(tokens[0]).toEqual value: 'a', scopes: scopeLeadingLowercase + expect(tokens[1]).toEqual value: '23456789012345678901234567890', scopes: scopeNormal + + {tokens} = grammar.tokenizeLine("12345678901234567890123456789.", null, true) + expect(tokens[0]).toEqual value: '12345678901234567890123456789', scopes: scopeNormal + expect(tokens[1]).toEqual value: '.', scopes: scopeTrailingPeriod + + {tokens} = grammar.tokenizeLine("b2345678901234567890123456789.", null, true) + expect(tokens[0]).toEqual value: 'b', scopes: scopeLeadingLowercase + expect(tokens[1]).toEqual value: '2345678901234567890123456789', scopes: scopeNormal + expect(tokens[2]).toEqual value: '.', scopes: scopeTrailingPeriod + + it "highlights subject lines of 50 chars correctly", -> + {tokens} = grammar.tokenizeLine("12345678901234567890123456789012345678901234567890", null, true) + expect(tokens[0]).toEqual value: '12345678901234567890123456789012345678901234567890', scopes: scopeNormal + + {tokens} = grammar.tokenizeLine("c2345678901234567890123456789012345678901234567890", null, true) + expect(tokens[0]).toEqual value: 'c', scopes: scopeLeadingLowercase + expect(tokens[1]).toEqual value: '2345678901234567890123456789012345678901234567890', scopes: scopeNormal + + {tokens} = grammar.tokenizeLine("1234567890123456789012345678901234567890123456789.", null, true) + expect(tokens[0]).toEqual value: '1234567890123456789012345678901234567890123456789', scopes: scopeNormal + expect(tokens[1]).toEqual value: '.', scopes: scopeTrailingPeriod + + {tokens} = grammar.tokenizeLine("d234567890123456789012345678901234567890123456789.", null, true) + expect(tokens[0]).toEqual value: 'd', scopes: scopeLeadingLowercase + expect(tokens[1]).toEqual value: '234567890123456789012345678901234567890123456789', scopes: scopeNormal + expect(tokens[2]).toEqual value: '.', scopes: scopeTrailingPeriod + + it "highlights subject lines of 51 chars correctly", -> + {tokens} = grammar.tokenizeLine("123456789012345678901234567890123456789012345678901", null, true) + expect(tokens[0]).toEqual value: '12345678901234567890123456789012345678901234567890', scopes: scopeNormal + expect(tokens[1]).toEqual value: '1', scopes: scopeLineOver50 + + {tokens} = grammar.tokenizeLine("e23456789012345678901234567890123456789012345678901", null, true) + expect(tokens[0]).toEqual value: 'e', scopes: scopeLeadingLowercase + expect(tokens[1]).toEqual value: '2345678901234567890123456789012345678901234567890', scopes: scopeNormal + expect(tokens[2]).toEqual value: '1', scopes: scopeLineOver50 + + {tokens} = grammar.tokenizeLine("12345678901234567890123456789012345678901234567890.", null, true) + expect(tokens[0]).toEqual value: '12345678901234567890123456789012345678901234567890', scopes: scopeNormal + expect(tokens[1]).toEqual value: '.', scopes: scopeTrailingPeriod + + {tokens} = grammar.tokenizeLine("f2345678901234567890123456789012345678901234567890.", null, true) + expect(tokens[0]).toEqual value: 'f', scopes: scopeLeadingLowercase + expect(tokens[1]).toEqual value: '2345678901234567890123456789012345678901234567890', scopes: scopeNormal + expect(tokens[2]).toEqual value: '.', scopes: scopeTrailingPeriod + + it "highlights subject lines of 72 chars correctly", -> + {tokens} = grammar.tokenizeLine("123456789012345678901234567890123456789012345678901234567890123456789012", null, true) + expect(tokens[0]).toEqual value: '12345678901234567890123456789012345678901234567890', scopes: scopeNormal + expect(tokens[1]).toEqual value: '123456789012345678901', scopes: scopeLineOver50 + expect(tokens[2]).toEqual value: '2', scopes: scopeLineOver50 + + {tokens} = grammar.tokenizeLine("g23456789012345678901234567890123456789012345678901234567890123456789012", null, true) + expect(tokens[0]).toEqual value: 'g', scopes: scopeLeadingLowercase + expect(tokens[1]).toEqual value: '2345678901234567890123456789012345678901234567890', scopes: scopeNormal + expect(tokens[2]).toEqual value: '123456789012345678901', scopes: scopeLineOver50 + expect(tokens[3]).toEqual value: '2', scopes: scopeLineOver50 + + {tokens} = grammar.tokenizeLine("12345678901234567890123456789012345678901234567890123456789012345678901.", null, true) + expect(tokens[0]).toEqual value: '12345678901234567890123456789012345678901234567890', scopes: scopeNormal + expect(tokens[1]).toEqual value: '123456789012345678901', scopes: scopeLineOver50 + expect(tokens[2]).toEqual value: '.', scopes: scopeTrailingPeriod + + {tokens} = grammar.tokenizeLine("h2345678901234567890123456789012345678901234567890123456789012345678901.", null, true) + expect(tokens[0]).toEqual value: 'h', scopes: scopeLeadingLowercase + expect(tokens[1]).toEqual value: '2345678901234567890123456789012345678901234567890', scopes: scopeNormal + expect(tokens[2]).toEqual value: '123456789012345678901', scopes: scopeLineOver50 + expect(tokens[3]).toEqual value: '.', scopes: scopeTrailingPeriod + + it "highlights subject lines of 73 chars correctly", -> + {tokens} = grammar.tokenizeLine("1234567890123456789012345678901234567890123456789012345678901234567890123", null, true) + expect(tokens[0]).toEqual value: '12345678901234567890123456789012345678901234567890', scopes: scopeNormal + expect(tokens[1]).toEqual value: '1234567890123456789012', scopes: scopeLineOver50 + expect(tokens[2]).toEqual value: '3', scopes: scopeLineOver72 + + {tokens} = grammar.tokenizeLine("i234567890123456789012345678901234567890123456789012345678901234567890123", null, true) + expect(tokens[0]).toEqual value: 'i', scopes: scopeLeadingLowercase + expect(tokens[1]).toEqual value: '2345678901234567890123456789012345678901234567890', scopes: scopeNormal + expect(tokens[2]).toEqual value: '1234567890123456789012', scopes: scopeLineOver50 + expect(tokens[3]).toEqual value: '3', scopes: scopeLineOver72 + + {tokens} = grammar.tokenizeLine("123456789012345678901234567890123456789012345678901234567890123456789012.", null, true) + expect(tokens[0]).toEqual value: '12345678901234567890123456789012345678901234567890', scopes: scopeNormal + expect(tokens[1]).toEqual value: '1234567890123456789012', scopes: scopeLineOver50 + expect(tokens[2]).toEqual value: '.', scopes: scopeTrailingPeriod + + {tokens} = grammar.tokenizeLine("j23456789012345678901234567890123456789012345678901234567890123456789012.", null, true) + expect(tokens[0]).toEqual value: 'j', scopes: scopeLeadingLowercase + expect(tokens[1]).toEqual value: '2345678901234567890123456789012345678901234567890', scopes: scopeNormal + expect(tokens[2]).toEqual value: '1234567890123456789012', scopes: scopeLineOver50 + expect(tokens[3]).toEqual value: '.', scopes: scopeTrailingPeriod + + it "highlights subject lines of over 73 chars correctly", -> + {tokens} = grammar.tokenizeLine("123456789012345678901234567890123456789012345678901234567890123456789012345678", null, true) + expect(tokens[0]).toEqual value: '12345678901234567890123456789012345678901234567890', scopes: scopeNormal + expect(tokens[1]).toEqual value: '1234567890123456789012', scopes: scopeLineOver50 + expect(tokens[2]).toEqual value: '345678', scopes: scopeLineOver72 + + {tokens} = grammar.tokenizeLine("k23456789012345678901234567890123456789012345678901234567890123456789012345678", null, true) + expect(tokens[0]).toEqual value: 'k', scopes: scopeLeadingLowercase + expect(tokens[1]).toEqual value: '2345678901234567890123456789012345678901234567890', scopes: scopeNormal + expect(tokens[2]).toEqual value: '1234567890123456789012', scopes: scopeLineOver50 + expect(tokens[3]).toEqual value: '345678', scopes: scopeLineOver72 + + {tokens} = grammar.tokenizeLine("123456789012345678901234567890123456789012345678901234567890123456789012345678.", null, true) + expect(tokens[0]).toEqual value: '12345678901234567890123456789012345678901234567890', scopes: scopeNormal + expect(tokens[1]).toEqual value: '1234567890123456789012', scopes: scopeLineOver50 + expect(tokens[2]).toEqual value: '345678', scopes: scopeLineOver72 + expect(tokens[3]).toEqual value: '.', scopes: scopeTrailingPeriod + + {tokens} = grammar.tokenizeLine("m23456789012345678901234567890123456789012345678901234567890123456789012345678.", null, true) + expect(tokens[0]).toEqual value: 'm', scopes: scopeLeadingLowercase + expect(tokens[1]).toEqual value: '2345678901234567890123456789012345678901234567890', scopes: scopeNormal + expect(tokens[2]).toEqual value: '1234567890123456789012', scopes: scopeLineOver50 + expect(tokens[3]).toEqual value: '345678', scopes: scopeLineOver72 + expect(tokens[4]).toEqual value: '.', scopes: scopeTrailingPeriod + + describe "Git rebases", -> + beforeEach -> + grammar = atom.grammars.grammarForScopeName("text.git-rebase") + + it "parses the Git rebase message grammar", -> + expect(grammar).toBeTruthy() + expect(grammar.scopeName).toBe "text.git-rebase" + + for cmd in ["pick", "p", "reword", "r", "edit", "e", "squash", "s", "fixup", "f", "drop", "d"] + it "parses the #{cmd} command", -> + {tokens} = grammar.tokenizeLine "#{cmd} c0ffeee This is commit message" + + expect(tokens[0]).toEqual value: cmd, scopes: ["text.git-rebase", "meta.commit-command.git-rebase", "support.function.git-rebase"] + expect(tokens[1]).toEqual value: " ", scopes: ["text.git-rebase", "meta.commit-command.git-rebase"] + expect(tokens[2]).toEqual value: "c0ffeee", scopes: ["text.git-rebase", "meta.commit-command.git-rebase", "constant.sha.git-rebase"] + expect(tokens[3]).toEqual value: " ", scopes: ["text.git-rebase", "meta.commit-command.git-rebase"] + expect(tokens[4]).toEqual value: "This is commit message", scopes: ["text.git-rebase", "meta.commit-command.git-rebase", "meta.commit-message.git-rebase"] + + it "parses the exec command", -> + {tokens} = grammar.tokenizeLine "exec" + + expect(tokens[0]).toEqual value: "exec", scopes: ["text.git-rebase", "meta.exec-command.git-rebase", "support.function.git-rebase"] + + {tokens} = grammar.tokenizeLine "x" + + expect(tokens[0]).toEqual value: "x", scopes: ["text.git-rebase", "meta.exec-command.git-rebase", "support.function.git-rebase"] + + it "includes language-shellscript highlighting when using the exec command", -> + waitsForPromise -> + atom.packages.activatePackage("language-shellscript") + + runs -> + {tokens} = grammar.tokenizeLine "exec echo 'Hello World'" + + expect(tokens[0]).toEqual value: "exec", scopes: ["text.git-rebase", "meta.exec-command.git-rebase", "support.function.git-rebase"] + expect(tokens[1]).toEqual value: " ", scopes: ["text.git-rebase", "meta.exec-command.git-rebase"] + expect(tokens[2]).toEqual value: "echo", scopes: ["text.git-rebase", "meta.exec-command.git-rebase", "support.function.builtin.shell"] diff --git a/packages/language-go/.coffeelintignore b/packages/language-go/.coffeelintignore new file mode 100644 index 000000000..1db51fed7 --- /dev/null +++ b/packages/language-go/.coffeelintignore @@ -0,0 +1 @@ +spec/fixtures diff --git a/packages/language-go/.github/no-response.yml b/packages/language-go/.github/no-response.yml new file mode 100644 index 000000000..1c8799d13 --- /dev/null +++ b/packages/language-go/.github/no-response.yml @@ -0,0 +1,15 @@ +# Configuration for probot-no-response - https://github.com/probot/no-response + +# Number of days of inactivity before an issue is closed for lack of response +daysUntilClose: 28 + +# Label requiring a response +responseRequiredLabel: more-information-needed + +# Comment to post when closing an issue for lack of response. Set to `false` to disable. +closeComment: > + This issue has been automatically closed because there has been no response + to our request for more information from the original author. With only the + information that is currently in the issue, we don't have enough information + to take action. Please reach out if you have or find the answers we need so + that we can investigate further. diff --git a/packages/language-go/.github/workflows/main.yml b/packages/language-go/.github/workflows/main.yml new file mode 100644 index 000000000..9aaa5cabf --- /dev/null +++ b/packages/language-go/.github/workflows/main.yml @@ -0,0 +1,28 @@ +name: CI + +on: [push] + +env: + CI: true + +jobs: + Test: + strategy: + matrix: + os: [ubuntu-latest, macos-latest, windows-latest] + channel: [stable, beta] + runs-on: ${{ matrix.os }} + steps: + - uses: actions/checkout@v1 + - uses: UziTech/action-setup-atom@v2 + with: + version: ${{ matrix.channel }} + - name: Install windows-build-tools + if: ${{ matrix.os == 'windows-latest' }} + run: | + npm i windows-build-tools@4.0.0 + npm config set msvs_version 2019 + - name: Install dependencies + run: apm install + - name: Run tests + run: atom --test spec diff --git a/packages/language-go/.gitignore b/packages/language-go/.gitignore new file mode 100644 index 000000000..3c3629e64 --- /dev/null +++ b/packages/language-go/.gitignore @@ -0,0 +1 @@ +node_modules diff --git a/packages/language-go/CONTRIBUTING.md b/packages/language-go/CONTRIBUTING.md new file mode 100644 index 000000000..0fd0ad696 --- /dev/null +++ b/packages/language-go/CONTRIBUTING.md @@ -0,0 +1 @@ +See the [Atom contributing guide](https://github.com/atom/atom/blob/master/CONTRIBUTING.md) diff --git a/packages/language-go/ISSUE_TEMPLATE.md b/packages/language-go/ISSUE_TEMPLATE.md new file mode 100644 index 000000000..b60bb86c9 --- /dev/null +++ b/packages/language-go/ISSUE_TEMPLATE.md @@ -0,0 +1,40 @@ +<!-- + +Have you read Atom's Code of Conduct? By filing an Issue, you are expected to comply with it, including treating everyone with respect: https://github.com/atom/atom/blob/master/CODE_OF_CONDUCT.md + +Do you want to ask a question? Are you looking for support? The Atom message board is the best place for getting support: https://discuss.atom.io + +--> + +### Prerequisites + +* [ ] Put an X between the brackets on this line if you have done all of the following: + * Reproduced the problem in Safe Mode: http://flight-manual.atom.io/hacking-atom/sections/debugging/#using-safe-mode + * Followed all applicable steps in the debugging guide: http://flight-manual.atom.io/hacking-atom/sections/debugging/ + * Checked the FAQs on the message board for common solutions: https://discuss.atom.io/c/faq + * Checked that your issue isn't already filed: https://github.com/issues?utf8=✓&q=is%3Aissue+user%3Aatom + * Checked that there is not already an Atom package that provides the described functionality: https://atom.io/packages + +### Description + +[Description of the issue] + +### Steps to Reproduce + +1. [First Step] +2. [Second Step] +3. [and so on...] + +**Expected behavior:** [What you expect to happen] + +**Actual behavior:** [What actually happens] + +**Reproduces how often:** [What percentage of the time does it reproduce?] + +### Versions + +You can get this information from copy and pasting the output of `atom --version` and `apm --version` from the command line. Also, please include the OS and what version of the OS you're running. + +### Additional Information + +Any additional information, configuration or data that might be necessary to reproduce the issue. diff --git a/packages/language-go/LICENSE.md b/packages/language-go/LICENSE.md new file mode 100644 index 000000000..e19c32e5f --- /dev/null +++ b/packages/language-go/LICENSE.md @@ -0,0 +1,72 @@ +Copyright (c) 2014 GitHub Inc. + +Permission is hereby granted, free of charge, to any person obtaining +a copy of this software and associated documentation files (the +"Software"), to deal in the Software without restriction, including +without limitation the rights to use, copy, modify, merge, publish, +distribute, sublicense, and/or sell copies of the Software, and to +permit persons to whom the Software is furnished to do so, subject to +the following conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. + +-------------------------------------------------------------------- + +This package was derived from a TextMate bundle located at +https://github.com/rsms/Go.tmbundle and distributed under the following +license, located in `LICENSE`: + +Copyright (c) 2009 Rasmus Andersson + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. + +-------------------------------------------------------------------- + +The Go Template grammar was derived from GoSublime located at +https://github.com/DisposaBoy/GoSublime and distributed under the following +license, located in `LICENSE.md`: + +Copyright (c) 2012 The GoSublime Authors + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/packages/language-go/PULL_REQUEST_TEMPLATE.md b/packages/language-go/PULL_REQUEST_TEMPLATE.md new file mode 100644 index 000000000..cdaa94a86 --- /dev/null +++ b/packages/language-go/PULL_REQUEST_TEMPLATE.md @@ -0,0 +1,28 @@ +### Requirements + +* Filling out the template is required. Any pull request that does not include enough information to be reviewed in a timely manner may be closed at the maintainers' discretion. +* All new code requires tests to ensure against regressions + +### Description of the Change + +<!-- + +We must be able to understand the design of your change from this description. If we can't get a good idea of what the code will be doing from the description here, the pull request may be closed at the maintainers' discretion. Keep in mind that the maintainer reviewing this PR may not be familiar with or have worked with the code here recently, so please walk us through the concepts. + +--> + +### Alternate Designs + +<!-- Explain what other alternates were considered and why the proposed version was selected --> + +### Benefits + +<!-- What benefits will be realized by the code change? --> + +### Possible Drawbacks + +<!-- What are the possible side-effects or negative impacts of the code change? --> + +### Applicable Issues + +<!-- Enter any applicable Issues here --> diff --git a/packages/language-go/README.md b/packages/language-go/README.md new file mode 100644 index 000000000..595a2a8b1 --- /dev/null +++ b/packages/language-go/README.md @@ -0,0 +1,8 @@ +# Go language support in Atom +![CI Status](https://github.com/atom/language-go/actions/workflows/main.yml/badge.svg) + +Adds syntax highlighting and snippets to Go files in Atom. + +Originally [converted](http://flight-manual.atom.io/hacking-atom/sections/converting-from-textmate) from the [Go TextMate bundle](https://github.com/rsms/Go.tmbundle). + +Contributions are greatly appreciated. Please fork this repository and open a pull request to add snippets, make grammar tweaks, etc. diff --git a/packages/language-go/coffeelint.json b/packages/language-go/coffeelint.json new file mode 100644 index 000000000..a5dd715e3 --- /dev/null +++ b/packages/language-go/coffeelint.json @@ -0,0 +1,37 @@ +{ + "max_line_length": { + "level": "ignore" + }, + "no_empty_param_list": { + "level": "error" + }, + "arrow_spacing": { + "level": "error" + }, + "no_interpolation_in_single_quotes": { + "level": "error" + }, + "no_debugger": { + "level": "error" + }, + "prefer_english_operator": { + "level": "error" + }, + "colon_assignment_spacing": { + "spacing": { + "left": 0, + "right": 1 + }, + "level": "error" + }, + "braces_spacing": { + "spaces": 0, + "level": "error" + }, + "spacing_after_comma": { + "level": "error" + }, + "no_stand_alone_at": { + "level": "error" + } +} diff --git a/packages/language-go/grammars/go.cson b/packages/language-go/grammars/go.cson new file mode 100644 index 000000000..c1ae0a217 --- /dev/null +++ b/packages/language-go/grammars/go.cson @@ -0,0 +1,624 @@ +'scopeName': 'source.go' +'name': 'Go' +'comment': 'Go language' +'fileTypes': [ + 'go' +] +'foldingStartMarker': '({|\\()\\s*$' +'foldingStopMarker': '(}|\\))\\s*$' +'patterns': [ + { + 'include': '#comments' + } + { + 'comment': 'Interpreted string literals' + 'begin': '"' + 'beginCaptures': + '0': + 'name': 'punctuation.definition.string.begin.go' + 'end': '"' + 'endCaptures': + '0': + 'name': 'punctuation.definition.string.end.go' + 'name': 'string.quoted.double.go' + 'patterns': [ + { + 'include': '#string_escaped_char' + } + { + 'include': '#string_placeholder' + } + ] + } + { + 'comment': 'Raw string literals' + 'begin': '`' + 'beginCaptures': + '0': + 'name': 'punctuation.definition.string.begin.go' + 'end': '`' + 'endCaptures': + '0': + 'name': 'punctuation.definition.string.end.go' + 'name': 'string.quoted.raw.go', + 'patterns': [ + { + 'include': '#string_placeholder' + } + ] + } + { + 'comment': 'Syntax error receiving channels' + 'match': '<\\-([\\t ]+)chan\\b' + 'captures': + '1': + 'name': 'invalid.illegal.receive-channel.go' + } + { + 'comment': 'Syntax error sending channels' + 'match': '\\bchan([\\t ]+)<\-' + 'captures': + '1': + 'name': 'invalid.illegal.send-channel.go' + } + { + 'comment': 'Syntax error using slices' + 'match': '\\[\\](\\s+)' + 'captures': + '1': + 'name': 'invalid.illegal.slice.go' + } + { + 'comment': 'Syntax error numeric literals' + 'match': '\\b0[0-7]*[89]\\d*\\b' + 'name': 'invalid.illegal.numeric.go' + } + { + 'comment': 'Built-in functions' + 'match': '\\b(append|cap|close|complex|copy|delete|imag|len|make|new|panic|print|println|real|recover)\\b(?=\\()' + 'name': 'support.function.builtin.go' + } + { + 'comment': 'Function declarations' + 'match': '^(\\bfunc\\b)(?:\\s+(\\([^\\)]+\\)\\s+)?(\\w+)(?=\\())?' + 'captures': + '1': + 'name': 'keyword.function.go' + '2': + 'patterns': [ + { + 'include': '#brackets' + } + { + 'include': '#operators' + } + ] + '3': + 'patterns': [ + { + 'match': '\\d\\w*' + 'name': 'invalid.illegal.identifier.go' + } + { + 'match': '\\w+' + 'name': 'entity.name.function.go' + } + ] + } + { + 'comment': 'Functions' + 'match': '(\\bfunc\\b)|(\\w+)(?=\\()' + 'captures': + '1': + 'name': 'keyword.function.go' + '2': + 'patterns': [ + { + 'match': '\\d\\w*' + 'name': 'invalid.illegal.identifier.go' + } + { + 'match': '\\w+' + 'name': 'support.function.go' + } + ] + } + { + 'comment': 'Floating-point literals' + 'match': '(\\.\\d+([Ee][\-\+]\\d+)?i?)\\b|\\b\\d+\\.\\d*(([Ee][\-\+]\\d+)?i?\\b)?' + 'name': 'constant.numeric.floating-point.go' + } + { + 'comment': 'Integers' + 'match': '\\b((0x[0-9a-fA-F]+)|(0[0-7]+i?)|(\\d+([Ee]\\d+)?i?)|(\\d+[Ee][\-\+]\\d+i?))\\b' + 'name': 'constant.numeric.integer.go' + } + { + 'comment': 'Language constants' + 'match': '\\b(true|false|nil|iota)\\b' + 'name': 'constant.language.go' + } + { + # Package declarations + # Using a begin/end here allows us to match the package keyword before the package name is typed + 'begin': '\\b(package)\\s+' + 'beginCaptures': + '1': + 'name': 'keyword.package.go' + 'end': '(?!\\G)' + 'patterns': [ + { + 'match': '\\d\\w*' + 'name': 'invalid.illegal.identifier.go' + } + { + 'match': '\\w+' + 'name': 'entity.name.package.go' + } + ] + } + { + # Type declarations + # Using a begin/end here allows us to match the type keyword before the type name is typed + 'begin': '\\b(type)\\s+' + 'beginCaptures': + '1': + 'name': 'keyword.type.go' + 'end': '(?!\\G)' + 'patterns': [ + { + 'match': '\\d\\w*' + 'name': 'invalid.illegal.identifier.go' + } + { + 'match': '\\w+' + 'name': 'entity.name.type.go' + } + ] + } + { + # Imports + 'begin': '\\b(import)\\s+' + 'beginCaptures': + '1': + 'name': 'keyword.import.go' + 'end': '(?!\\G)' + 'patterns': [ + { + 'include': '#imports' + } + ] + } + { + # Variables + 'begin': '\\b(var)\\s+' + 'beginCaptures': + '1': + 'name': 'keyword.var.go' + 'end': '(?!\\G)' + 'patterns': [ + { + 'include': '#variables' + } + ] + } + { + # Assignments to existing variables + # a = + # a, b, whatever = + # a.b, c.d = + 'match': '(?<!var)\\s*(\\w+(?:\\.\\w+)*(?>,\\s*\\w+(?:\\.\\w+)*)*)(?=\\s*=(?!=))' + 'captures': + '1': + 'patterns': [ + { + 'match': '\\d\\w*' + 'name': 'invalid.illegal.identifier.go' + } + { + 'match': '\\w+(?:\\.\\w+)*' + 'name': 'variable.other.assignment.go' + 'captures': + '0': + 'patterns': [ + { + 'include': '#delimiters' + } + ] + } + { + 'include': '#delimiters' + } + ] + } + { + # Shorthand variable declaration and assignments + # a := + # a, b := + 'match': '\\w+(?:,\\s*\\w+)*(?=\\s*:=)' + 'captures': + '0': + 'patterns': [ + { + 'match': '\\d\\w*' + 'name': 'invalid.illegal.identifier.go' + } + { + 'match': '\\w+' + 'name': 'variable.other.assignment.go' + } + { + 'include': '#delimiters' + } + ] + } + { + 'comment': 'Terminators' + 'match': ';' + 'name': 'punctuation.terminator.go' + } + { + 'include': '#brackets' + } + { + 'include': '#delimiters' + } + { + 'include': '#keywords' + } + { + 'include': '#operators' + } + { + 'include': '#runes' + } + { + 'include': '#storage_types' + } +] +'repository': + 'brackets': + 'patterns': [ + { + 'begin': '{' + 'beginCaptures': + '0': + 'name': 'punctuation.definition.begin.bracket.curly.go' + 'end': '}' + 'endCaptures': + '0': + 'name': 'punctuation.definition.end.bracket.curly.go' + 'patterns': [ + { + 'include': '$self' + } + ] + } + { + 'begin': '\\(' + 'beginCaptures': + '0': + 'name': 'punctuation.definition.begin.bracket.round.go' + 'end': '\\)' + 'endCaptures': + '0': + 'name': 'punctuation.definition.end.bracket.round.go' + 'patterns': [ + { + 'include': '$self' + } + ] + } + { + 'match': '\\[|\\]' + 'name': 'punctuation.definition.bracket.square.go' + } + ] + 'comments': + 'patterns': [ + { + 'begin': '/\\*' + 'end': '\\*/' + 'captures': + '0': + 'name': 'punctuation.definition.comment.go' + 'name': 'comment.block.go' + } + { + 'begin': '//' + 'beginCaptures': + '0': + 'name': 'punctuation.definition.comment.go' + 'end': '$' + 'name': 'comment.line.double-slash.go' + } + ] + 'delimiters': + 'patterns': [ + { + 'match': ',' + 'name': 'punctuation.other.comma.go' + } + { + 'match': '\\.(?!\\.\\.)' + 'name': 'punctuation.other.period.go' + } + { + 'match': ':(?!=)' + 'name': 'punctuation.other.colon.go' + } + ] + 'imports': + 'patterns': [ + { + # Single line import declarations + 'match': '((?!\\s+")[^\\s]*)?\\s*((")([^"]*)("))' + 'captures': + '1': + 'name': 'entity.alias.import.go' + '2': + 'name': 'string.quoted.double.go' + '3': + 'name': 'punctuation.definition.string.begin.go' + '4': + 'name': 'entity.name.import.go' + '5': + 'name': 'punctuation.definition.string.end.go' + } + { + # Multiline import declarations + 'begin': '\\(' + 'beginCaptures': + '0': + 'name': 'punctuation.definition.imports.begin.bracket.round.go' + 'end': '\\)' + 'endCaptures': + '0': + 'name': 'punctuation.definition.imports.end.bracket.round.go' + 'patterns': [ + { + 'include': '#comments' + } + { + 'include': '#imports' + } + ] + } + ] + 'keywords': + 'patterns': [ + { + 'comment': 'Flow control keywords' + 'match': '\\b(break|case|continue|default|defer|else|fallthrough|for|go|goto|if|range|return|select|switch)\\b' + 'name': 'keyword.control.go' + } + { + 'match': '\\bchan\\b' + 'name': 'keyword.channel.go' + } + { + 'match': '\\bconst\\b' + 'name': 'keyword.const.go' + } + { + 'match': '\\bfunc\\b' + 'name': 'keyword.function.go' + } + { + 'match': '\\binterface\\b' + 'name': 'keyword.interface.go' + } + { + 'match': '\\bmap\\b' + 'name': 'keyword.map.go' + } + { + 'match': '\\bstruct\\b' + 'name': 'keyword.struct.go' + } + ] + 'operators': + 'comment': 'Note that the order here is very important!' + 'patterns': [ + { + 'match': '(\\*|&)(?=\\w)' + 'name': 'keyword.operator.address.go' + } + { + 'match': '<\\-' + 'name': 'keyword.operator.channel.go' + } + { + 'match': '\\-\\-' + 'name': 'keyword.operator.decrement.go' + } + { + 'match': '\\+\\+' + 'name': 'keyword.operator.increment.go' + } + { + 'match': '(==|!=|<=|>=|<(?!<)|>(?!>))' + 'name': 'keyword.operator.comparison.go' + } + { + 'match': '(&&|\\|\\||!)' + 'name': 'keyword.operator.logical.go' + } + { + 'match': '(=|\\+=|\\-=|\\|=|\\^=|\\*=|/=|:=|%=|<<=|>>=|&\\^=|&=)' + 'name': 'keyword.operator.assignment.go' + } + { + 'match': '(\\+|\\-|\\*|/|%)' + 'name': 'keyword.operator.arithmetic.go' + } + { + 'match': '(&(?!\\^)|\\||\\^|&\\^|<<|>>)' + 'name': 'keyword.operator.arithmetic.bitwise.go' + } + { + 'match': '\\.\\.\\.' + 'name': 'keyword.operator.ellipsis.go' + } + ] + 'runes': + 'patterns': [ + { + 'begin': "'" + 'beginCaptures': + '0': + 'name': 'punctuation.definition.string.begin.go' + 'end': "'" + 'endCaptures': + '0': + 'name': 'punctuation.definition.string.end.go' + 'name': 'string.quoted.rune.go' + 'patterns': [ + { + 'match': "\\G(\\\\([0-7]{3}|[abfnrtv\\\\'\"]|x[0-9a-fA-F]{2}|u[0-9a-fA-F]{4}|U[0-9a-fA-F]{8})|.)(?=')" + 'name': 'constant.other.rune.go' + } + { + 'match': "[^']+" + 'name': 'invalid.illegal.unknown-rune.go' + } + ] + } + ] + 'storage_types': + 'patterns': [ + { + 'match': '\\bbool\\b' + 'name': 'storage.type.boolean.go' + } + { + 'match': '\\bbyte\\b' + 'name': 'storage.type.byte.go' + } + { + 'match': '\\berror\\b' + 'name': 'storage.type.error.go' + } + { + 'match': '\\b(complex(64|128)|float(32|64)|u?int(8|16|32|64)?)\\b' + 'name': 'storage.type.numeric.go' + } + { + 'match': '\\brune\\b' + 'name': 'storage.type.rune.go' + } + { + 'match': '\\bstring\\b' + 'name': 'storage.type.string.go' + } + { + 'match': '\\buintptr\\b' + 'name': 'storage.type.uintptr.go' + } + ] + 'string_escaped_char': + 'patterns': [ + { + 'match': '\\\\([0-7]{3}|[abfnrtv\\\\\'"]|x[0-9a-fA-F]{2}|u[0-9a-fA-F]{4}|U[0-9a-fA-F]{8})' + 'name': 'constant.character.escape.go' + } + { + 'match': '\\\\[^0-7xuUabfnrtv\\\'"]' + 'name': 'invalid.illegal.unknown-escape.go' + } + ] + 'string_placeholder': + 'patterns': [ + { + 'match': '%(\\[\\d+\\])?([\\+#\\-0\\x20]{,2}((\\d+|\\*)?(\\.?(\\d+|\\*|(\\[\\d+\\])\\*?)?(\\[\\d+\\])?)?))?[vT%tbcdoqxXUbeEfFgGsp]' + 'name': 'constant.other.placeholder.go' + } + ] + 'variables': + # First add tests and make sure existing tests still pass when changing anything here! + 'patterns': [ + { + # var a = + # var a string = + # var a, b = + # var a, b *c = + # var a string = + # var a b.c + # var a *b.c = + 'match': '(\\w+(?:,\\s*\\w+)*)(\\s+\\*?\\w+(?:\\.\\w+)?\\s*)?(?=\\s*=)' + 'captures': + '1': + 'patterns': [ + { + 'match': '\\d\\w*' + 'name': 'invalid.illegal.identifier.go' + } + { + 'match': '\\w+' + 'name': 'variable.other.assignment.go' + } + { + 'include': '#delimiters' + } + ] + '2': + 'patterns': [ + { + 'include': '$self' + } + ] + } + { + # var a + # var a string + # var a, b string + # var a []string + # var a [3]string + # var a [...]string + # var a [][]*string + # var a c.d + # var a []c.d + # var a <-chan string + 'match': '(\\w+(?:,\\s*\\w+)*)(\\s+(\\[(\\d*|\\.\\.\\.)\\])*\\*?(<-)?\\w+(?:\\.\\w+)?\\s*[^=].*)' + 'captures': + '1': + 'patterns': [ + { + 'match': '\\d\\w*' + 'name': 'invalid.illegal.identifier.go' + } + { + 'match': '\\w+' + 'name': 'variable.other.declaration.go' + } + { + 'include': '#delimiters' + } + ] + '2': + 'patterns': [ + { + 'include': '$self' + } + ] + } + { + # Multiline variable declarations/assignments + 'begin': '\\(' + 'beginCaptures': + '0': + 'name': 'punctuation.definition.variables.begin.bracket.round.go' + 'end': '\\)' + 'endCaptures': + '0': + 'name': 'punctuation.definition.variables.end.bracket.round.go' + 'patterns': [ + { + 'include': '$self' + } + { + 'include': '#variables' + } + ] + } + ] diff --git a/packages/language-go/grammars/gohtml.cson b/packages/language-go/grammars/gohtml.cson new file mode 100644 index 000000000..47d9d3c79 --- /dev/null +++ b/packages/language-go/grammars/gohtml.cson @@ -0,0 +1,13 @@ +'scopeName': 'text.html.gohtml' +'name': 'HTML (Go)' +'fileTypes': [ + 'gohtml' +] +'patterns': [ + { + 'include': 'text.html.basic' + } + { + 'include': 'source.gotemplate' + } +] diff --git a/packages/language-go/grammars/gomod.cson b/packages/language-go/grammars/gomod.cson new file mode 100644 index 000000000..756e77bb6 --- /dev/null +++ b/packages/language-go/grammars/gomod.cson @@ -0,0 +1,55 @@ +'scopeName': 'source.mod' +'name': 'Go Module File' +'fileTypes': [ + 'mod' +] +'patterns': [ + { + 'comment': 'Module keyword' + 'match': '\\bmodule\\b' + 'name': 'keyword.module.module' + } + { + 'comment': 'Require keyword' + 'match': '\\brequire\\b' + 'name': 'keyword.module.require' + } + { + 'comment': 'Replace keyword' + 'match': '\\breplace\\b' + 'name': 'keyword.module.replace' + } + { + 'comment': 'Exclude keyword' + 'match': '\\bexclude\\b' + 'name': 'keyword.module.exclude' + } + { + 'comment': 'Version string' + 'match': 'v(0|[1-9]\\d*)\\.(0|[1-9]\\d*)\\.(0|[1-9]\\d*)(?:\\-(\\d{4}(?:0[1-9]|1[0-2])(?:0[1-9]|[1-2]\\d|3[0-1])(?:[0-1]\\d|2[0-3])(?:[0-5]\\d)(?:[0-5]\\d)))?(?:\\-([a-zA-Z\\d]{12}))?(?:\\+(incompatible))?' + 'name': 'string.unquoted.version.go' + } + { + 'begin': '\\(' + 'beginCaptures': + '0': + 'name': 'punctuation.definition.begin.bracket.round.go' + 'end': '\\)' + 'endCaptures': + '0': + 'name': 'punctuation.definition.end.bracket.round.go' + 'patterns': [ + { + 'include': '$self' + } + ] + } + { + 'begin': '//' + 'beginCaptures': + '0': + 'name': 'punctuation.definition.comment.go' + 'end': '$' + 'name': 'comment.line.double-slash.go' + } +] diff --git a/packages/language-go/grammars/gosum.cson b/packages/language-go/grammars/gosum.cson new file mode 100644 index 000000000..5e474da79 --- /dev/null +++ b/packages/language-go/grammars/gosum.cson @@ -0,0 +1,17 @@ +'scopeName': 'source.sum' +'name': 'Go Checksum File' +'fileTypes': [ + 'sum' +] +'patterns': [ + { + 'comment': 'Version string' + 'match': 'v(0|[1-9]\\d*)\\.(0|[1-9]\\d*)\\.(0|[1-9]\\d*)(?:\\-(\\d{4}(?:0[1-9]|1[0-2])(?:0[1-9]|[1-2]\\d|3[0-1])(?:[0-1]\\d|2[0-3])(?:[0-5]\\d)(?:[0-5]\\d)))?(?:\\-([a-zA-Z\\d]{12}))?(?:\\+(incompatible))?(\\/go\\.mod)?' + 'name': 'string.unquoted.version.gosum' + } + { + 'comment': 'Checksum' + 'match': 'h1:[a-zA-Z\\d+\\/]{43}=' + 'name': 'string.unquoted.checksum.gosum' + } +] diff --git a/packages/language-go/grammars/gotemplate.cson b/packages/language-go/grammars/gotemplate.cson new file mode 100644 index 000000000..4bb207c77 --- /dev/null +++ b/packages/language-go/grammars/gotemplate.cson @@ -0,0 +1,101 @@ +'scopeName': 'source.gotemplate' +'name': 'Go Template' +'foldingStartMarker': '\\{\\{\\s*(?:if|with|range)\\b' +'foldingStopMarker': '\\{\\{\\s*(?:else|end)\\b' +'patterns': [ + { + 'begin': '\\{\\{' + 'beginCaptures': + '0': + 'name': 'punctuation.section.embedded.begin.gotemplate' + 'end': '\\}\\}' + 'endCaptures': + '0': + 'name': 'punctuation.section.embedded.end.gotemplate' + 'patterns': [ + { + 'name': 'keyword.operator.initialize.gotemplate' + 'match': ':=' + } + { + 'name': 'keyword.operator.pipe.gotemplate' + 'match': '\\|' + } + { + 'name': 'variable.other.gotemplate' + 'match': '[.$][\\w]*' + } + { + 'name': 'keyword.control.gotemplate' + 'match': '\\b(if|else|range|template|with|end|nil|with|define)\\b' + } + { + 'name': 'support.function.builtin.gotemplate' + 'match': '\\b(and|call|html|index|js|len|not|or|print|printf|println|urlquery|eq|ne|lt|le|gt|ge)\\b' + } + { + 'begin': '/\\*' + 'end': '\\*/' + 'name': 'comment.block.gotemplate' + } + { + 'begin': '"' + 'beginCaptures': + '0': + 'name': 'punctuation.definition.string.begin.gotemplate' + 'end': '"' + 'endCaptures': + '0': + 'name': 'punctuation.definition.string.end.gotemplate' + 'name': 'string.quoted.double.gotemplate' + 'patterns': [ + { + 'include': '#string_placeholder' + } + { + 'include': '#string_escaped_char' + } + ] + } + { + 'begin': '`' + 'beginCaptures': + '0': + 'name': 'punctuation.definition.string.begin.gotemplate' + 'end': '`' + 'endCaptures': + '0': + 'name': 'punctuation.definition.string.end.gotemplate' + 'name': 'string.quoted.raw.gotemplate' + 'patterns': [ + { + 'include': '#string_placeholder' + } + ] + } + ] + } +] +'repository': + 'string_escaped_char': + 'patterns': [ + { + 'name': 'constant.character.escape.gotemplate' + 'match': '\\\\(\\\\|[abfnrtv\'"]|x[0-9a-fA-F]{2}|u[0-9a-fA-F]{4}|U[0-9a-fA-F]{8}|[0-7]{3})' + } + { + 'name': 'invalid.illegal.unknown-escape.gotemplate' + 'match': '\\\\.' + } + ] + 'string_placeholder': + 'patterns': [ + { + 'name': 'constant.other.placeholder.gotemplate' + 'match': '(?x)%\n (\\d+\\$)? # field (argument #)\n [#0\\- +\']* # flags\n [,;:_]? # separator character (AltiVec)\n ((-?\\d+)|\\*(-?\\d+\\$)?)? # minimum field width\n (\\.((-?\\d+)|\\*(-?\\d+\\$)?)?)? # precision\n [diouxXDOUeEfFgGaAcCsSqpnvtTbyYhHmMzZ%] # conversion type\n ' + } + { + 'name': 'invalid.illegal.placeholder.gotemplate' + 'match': '%' + } + ] diff --git a/packages/language-go/grammars/tree-sitter-go.cson b/packages/language-go/grammars/tree-sitter-go.cson new file mode 100644 index 000000000..533dd206d --- /dev/null +++ b/packages/language-go/grammars/tree-sitter-go.cson @@ -0,0 +1,130 @@ +name: 'Go' +scopeName: 'source.go' +type: 'tree-sitter' +parser: 'tree-sitter-go' + +fileTypes: [ + 'go' +] + +comments: + start: '// ' + +folds: [ + { + type: ['comment', 'raw_string_literal'] + } + { + start: {index: 0, type: '{'} + end: {index: -1, type: '}'} + } + { + start: {index: 0, type: '['} + end: {index: -1, type: ']'} + } + { + start: {index: 0, type: '('} + end: {index: -1, type: ')'} + } + { + type: [ + 'type_switch_statement', + 'type_case_clause', + 'expression_switch_statement' + 'expression_case_clause', + 'select_statement', + 'communication_clause' + ] + start: {index: 0} + end: {index: -1} + } +] + +scopes: + 'source_file': 'source.go' + + 'comment': 'comment.block' + + '"var"': 'keyword.import' + '"type"': 'keyword.type' + '"func"': 'keyword.function' + '"const"': 'keyword.const' + '"struct"': 'keyword.struct' + '"interface"': 'keyword.interface' + '"import"': 'keyword.import' + '"package"': 'keyword.package' + '"map"': 'keyword.map' + '"chan"': 'keyword.chan' + + 'type_identifier': 'support.storage.type' + 'field_identifier': 'variable.other.object.property' + 'package_identifier': 'entity.name.package' + + '"if"': 'keyword.control' + '"for"': 'keyword.control' + '"else"': 'keyword.control' + '"case"': 'keyword.control' + '"break"': 'keyword.control' + '"switch"': 'keyword.control' + '"select"': 'keyword.control' + '"return"': 'keyword.control' + '"default"': 'keyword.control' + '"continue"': 'keyword.control' + '"goto"': 'keyword.control' + '"fallthrough"': 'keyword.control' + '"defer"': 'keyword.control' + '"range"': 'keyword.control' + '"go"': 'keyword.control' + + 'interpreted_string_literal': 'string.quoted.double' + 'raw_string_literal': 'string.quoted.double' + 'escape_sequence': 'constant.character.escape' + 'rune_literal': 'constant.other.rune' + 'int_literal': 'constant.numeric.integer' + 'float_literal': 'constant.numeric.float' + 'imaginary_literal': 'constant.numeric.integer' + 'nil': 'constant.language.nil' + 'false': 'constant.language.false' + 'true': 'constant.language.true' + + 'call_expression > identifier': 'entity.name.function' + 'function_declaration > identifier': 'entity.name.function' + 'method_declaration > field_identifier': 'entity.name.function' + 'call_expression > selector_expression > field_identifier': 'entity.name.function' + + '"+"': 'keyword.operator' + '"-"': 'keyword.operator' + '"*"': 'keyword.operator' + '"/"': 'keyword.operator' + '"%"': 'keyword.operator' + '"++"': 'keyword.operator' + '"--"': 'keyword.operator' + '"=="': 'keyword.operator' + '"!="': 'keyword.operator' + '">"': 'keyword.operator' + '"<"': 'keyword.operator' + '">="': 'keyword.operator' + '"<="': 'keyword.operator' + '"!"': 'keyword.operator' + '"|"': 'keyword.operator' + '"^"': 'keyword.operator' + '"<<"': 'keyword.operator' + '">>"': 'keyword.operator' + '"="': 'keyword.operator' + '"+="': 'keyword.operator' + '"-="': 'keyword.operator' + '"*="': 'keyword.operator' + '"/="': 'keyword.operator' + '"%="': 'keyword.operator' + '"<<="': 'keyword.operator' + '">>="': 'keyword.operator' + '"&="': 'keyword.operator' + '"^="': 'keyword.operator' + '"|="': 'keyword.operator' + '":="': 'keyword.operator' + '"&"': 'keyword.operator' + '"*"': 'keyword.operator' + '"&&"': 'keyword.operator' + '"||"': 'keyword.operator' + '"..."': 'keyword.operator' + '"<-"': 'keyword.operator' diff --git a/packages/language-go/package-lock.json b/packages/language-go/package-lock.json new file mode 100644 index 000000000..36feeaabd --- /dev/null +++ b/packages/language-go/package-lock.json @@ -0,0 +1,380 @@ +{ + "name": "language-go", + "version": "0.47.3", + "lockfileVersion": 2, + "requires": true, + "packages": { + "": { + "version": "0.47.2", + "license": "MIT", + "dependencies": { + "tree-sitter-go": "0.19.1" + }, + "devDependencies": { + "coffeelint": "^1.10.1" + }, + "engines": { + "atom": "*", + "node": "*" + } + }, + "node_modules/balanced-match": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.0.tgz", + "integrity": "sha1-ibTRmasr7kneFk6gK4nORi1xt2c=", + "dev": true + }, + "node_modules/brace-expansion": { + "version": "1.1.11", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz", + "integrity": "sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==", + "dev": true, + "dependencies": { + "balanced-match": "^1.0.0", + "concat-map": "0.0.1" + } + }, + "node_modules/coffee-script": { + "version": "1.11.1", + "resolved": "https://registry.npmjs.org/coffee-script/-/coffee-script-1.11.1.tgz", + "integrity": "sha1-vxxHrWREOg2V0S3ysUfMCk2q1uk=", + "deprecated": "CoffeeScript on NPM has moved to \"coffeescript\" (no hyphen)", + "dev": true, + "bin": { + "cake": "bin/cake", + "coffee": "bin/coffee" + }, + "engines": { + "node": ">=0.8.0" + } + }, + "node_modules/coffeelint": { + "version": "1.16.2", + "resolved": "https://registry.npmjs.org/coffeelint/-/coffeelint-1.16.2.tgz", + "integrity": "sha512-6mzgOo4zb17WfdrSui/cSUEgQ0AQkW3gXDht+6lHkfkqGUtSYKwGdGcXsDfAyuScVzTlTtKdfwkAlJWfqul7zg==", + "dev": true, + "dependencies": { + "coffee-script": "~1.11.0", + "glob": "^7.0.6", + "ignore": "^3.0.9", + "optimist": "^0.6.1", + "resolve": "^0.6.3", + "strip-json-comments": "^1.0.2" + }, + "bin": { + "coffeelint": "bin/coffeelint" + }, + "engines": { + "node": ">=0.8.0", + "npm": ">=1.3.7" + } + }, + "node_modules/concat-map": { + "version": "0.0.1", + "resolved": "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz", + "integrity": "sha1-2Klr13/Wjfd5OnMDajug1UBdR3s=", + "dev": true + }, + "node_modules/fs.realpath": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/fs.realpath/-/fs.realpath-1.0.0.tgz", + "integrity": "sha1-FQStJSMVjKpA20onh8sBQRmU6k8=", + "dev": true + }, + "node_modules/glob": { + "version": "7.1.3", + "resolved": "https://registry.npmjs.org/glob/-/glob-7.1.3.tgz", + "integrity": "sha512-vcfuiIxogLV4DlGBHIUOwI0IbrJ8HWPc4MU7HzviGeNho/UJDfi6B5p3sHeWIQ0KGIU0Jpxi5ZHxemQfLkkAwQ==", + "dev": true, + "dependencies": { + "fs.realpath": "^1.0.0", + "inflight": "^1.0.4", + "inherits": "2", + "minimatch": "^3.0.4", + "once": "^1.3.0", + "path-is-absolute": "^1.0.0" + }, + "engines": { + "node": "*" + } + }, + "node_modules/ignore": { + "version": "3.3.10", + "resolved": "https://registry.npmjs.org/ignore/-/ignore-3.3.10.tgz", + "integrity": "sha512-Pgs951kaMm5GXP7MOvxERINe3gsaVjUWFm+UZPSq9xYriQAksyhg0csnS0KXSNRD5NmNdapXEpjxG49+AKh/ug==", + "dev": true + }, + "node_modules/inflight": { + "version": "1.0.6", + "resolved": "https://registry.npmjs.org/inflight/-/inflight-1.0.6.tgz", + "integrity": "sha1-Sb1jMdfQLQwJvJEKEHW6gWW1bfk=", + "dev": true, + "dependencies": { + "once": "^1.3.0", + "wrappy": "1" + } + }, + "node_modules/inherits": { + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.3.tgz", + "integrity": "sha1-Yzwsg+PaQqUC9SRmAiSA9CCCYd4=", + "dev": true + }, + "node_modules/minimatch": { + "version": "3.0.4", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.0.4.tgz", + "integrity": "sha512-yJHVQEhyqPLUTgt9B83PXu6W3rx4MvvHvSUvToogpwoGDOUQ+yDrR0HRot+yOCdCO7u4hX3pWft6kWBBcqh0UA==", + "dev": true, + "dependencies": { + "brace-expansion": "^1.1.7" + }, + "engines": { + "node": "*" + } + }, + "node_modules/minimist": { + "version": "0.0.10", + "resolved": "http://registry.npmjs.org/minimist/-/minimist-0.0.10.tgz", + "integrity": "sha1-3j+YVD2/lggr5IrRoMfNqDYwHc8=", + "dev": true + }, + "node_modules/nan": { + "version": "2.14.0", + "resolved": "https://registry.npmjs.org/nan/-/nan-2.14.0.tgz", + "integrity": "sha512-INOFj37C7k3AfaNTtX8RhsTw7qRy7eLET14cROi9+5HAVbbHuIWUHEauBv5qT4Av2tWasiTY1Jw6puUNqRJXQg==" + }, + "node_modules/once": { + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/once/-/once-1.4.0.tgz", + "integrity": "sha1-WDsap3WWHUsROsF9nFC6753Xa9E=", + "dev": true, + "dependencies": { + "wrappy": "1" + } + }, + "node_modules/optimist": { + "version": "0.6.1", + "resolved": "https://registry.npmjs.org/optimist/-/optimist-0.6.1.tgz", + "integrity": "sha1-2j6nRob6IaGaERwybpDrFaAZZoY=", + "dev": true, + "dependencies": { + "minimist": "~0.0.1", + "wordwrap": "~0.0.2" + } + }, + "node_modules/path-is-absolute": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/path-is-absolute/-/path-is-absolute-1.0.1.tgz", + "integrity": "sha1-F0uSaHNVNP+8es5r9TpanhtcX18=", + "dev": true, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/resolve": { + "version": "0.6.3", + "resolved": "https://registry.npmjs.org/resolve/-/resolve-0.6.3.tgz", + "integrity": "sha1-3ZV5gufnNt699TtYpN2RdUV13UY=", + "dev": true + }, + "node_modules/strip-json-comments": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/strip-json-comments/-/strip-json-comments-1.0.4.tgz", + "integrity": "sha1-HhX7ysl9Pumb8tc7TGVrCCu6+5E=", + "dev": true, + "bin": { + "strip-json-comments": "cli.js" + }, + "engines": { + "node": ">=0.8.0" + } + }, + "node_modules/tree-sitter-go": { + "version": "0.19.1", + "resolved": "https://registry.npmjs.org/tree-sitter-go/-/tree-sitter-go-0.19.1.tgz", + "integrity": "sha512-qAHzfuddhbs3kPIW2vMBL5TqC5umhQ3NlDBPtdqlxE6tN2PKWHj0SZxXd/YrvqNUDrMmRpPBRg9W7JyCP/+n3A==", + "hasInstallScript": true, + "dependencies": { + "nan": "^2.14.0" + } + }, + "node_modules/wordwrap": { + "version": "0.0.3", + "resolved": "https://registry.npmjs.org/wordwrap/-/wordwrap-0.0.3.tgz", + "integrity": "sha1-o9XabNXAvAAI03I0u68b7WMFkQc=", + "dev": true, + "engines": { + "node": ">=0.4.0" + } + }, + "node_modules/wrappy": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz", + "integrity": "sha1-tSQ9jz7BqjXxNkYFvA0QNuMKtp8=", + "dev": true + } + }, + "dependencies": { + "balanced-match": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.0.tgz", + "integrity": "sha1-ibTRmasr7kneFk6gK4nORi1xt2c=", + "dev": true + }, + "brace-expansion": { + "version": "1.1.11", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz", + "integrity": "sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==", + "dev": true, + "requires": { + "balanced-match": "^1.0.0", + "concat-map": "0.0.1" + } + }, + "coffee-script": { + "version": "1.11.1", + "resolved": "https://registry.npmjs.org/coffee-script/-/coffee-script-1.11.1.tgz", + "integrity": "sha1-vxxHrWREOg2V0S3ysUfMCk2q1uk=", + "dev": true + }, + "coffeelint": { + "version": "1.16.2", + "resolved": "https://registry.npmjs.org/coffeelint/-/coffeelint-1.16.2.tgz", + "integrity": "sha512-6mzgOo4zb17WfdrSui/cSUEgQ0AQkW3gXDht+6lHkfkqGUtSYKwGdGcXsDfAyuScVzTlTtKdfwkAlJWfqul7zg==", + "dev": true, + "requires": { + "coffee-script": "~1.11.0", + "glob": "^7.0.6", + "ignore": "^3.0.9", + "optimist": "^0.6.1", + "resolve": "^0.6.3", + "strip-json-comments": "^1.0.2" + } + }, + "concat-map": { + "version": "0.0.1", + "resolved": "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz", + "integrity": "sha1-2Klr13/Wjfd5OnMDajug1UBdR3s=", + "dev": true + }, + "fs.realpath": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/fs.realpath/-/fs.realpath-1.0.0.tgz", + "integrity": "sha1-FQStJSMVjKpA20onh8sBQRmU6k8=", + "dev": true + }, + "glob": { + "version": "7.1.3", + "resolved": "https://registry.npmjs.org/glob/-/glob-7.1.3.tgz", + "integrity": "sha512-vcfuiIxogLV4DlGBHIUOwI0IbrJ8HWPc4MU7HzviGeNho/UJDfi6B5p3sHeWIQ0KGIU0Jpxi5ZHxemQfLkkAwQ==", + "dev": true, + "requires": { + "fs.realpath": "^1.0.0", + "inflight": "^1.0.4", + "inherits": "2", + "minimatch": "^3.0.4", + "once": "^1.3.0", + "path-is-absolute": "^1.0.0" + } + }, + "ignore": { + "version": "3.3.10", + "resolved": "https://registry.npmjs.org/ignore/-/ignore-3.3.10.tgz", + "integrity": "sha512-Pgs951kaMm5GXP7MOvxERINe3gsaVjUWFm+UZPSq9xYriQAksyhg0csnS0KXSNRD5NmNdapXEpjxG49+AKh/ug==", + "dev": true + }, + "inflight": { + "version": "1.0.6", + "resolved": "https://registry.npmjs.org/inflight/-/inflight-1.0.6.tgz", + "integrity": "sha1-Sb1jMdfQLQwJvJEKEHW6gWW1bfk=", + "dev": true, + "requires": { + "once": "^1.3.0", + "wrappy": "1" + } + }, + "inherits": { + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.3.tgz", + "integrity": "sha1-Yzwsg+PaQqUC9SRmAiSA9CCCYd4=", + "dev": true + }, + "minimatch": { + "version": "3.0.4", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.0.4.tgz", + "integrity": "sha512-yJHVQEhyqPLUTgt9B83PXu6W3rx4MvvHvSUvToogpwoGDOUQ+yDrR0HRot+yOCdCO7u4hX3pWft6kWBBcqh0UA==", + "dev": true, + "requires": { + "brace-expansion": "^1.1.7" + } + }, + "minimist": { + "version": "0.0.10", + "resolved": "http://registry.npmjs.org/minimist/-/minimist-0.0.10.tgz", + "integrity": "sha1-3j+YVD2/lggr5IrRoMfNqDYwHc8=", + "dev": true + }, + "nan": { + "version": "2.14.0", + "resolved": "https://registry.npmjs.org/nan/-/nan-2.14.0.tgz", + "integrity": "sha512-INOFj37C7k3AfaNTtX8RhsTw7qRy7eLET14cROi9+5HAVbbHuIWUHEauBv5qT4Av2tWasiTY1Jw6puUNqRJXQg==" + }, + "once": { + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/once/-/once-1.4.0.tgz", + "integrity": "sha1-WDsap3WWHUsROsF9nFC6753Xa9E=", + "dev": true, + "requires": { + "wrappy": "1" + } + }, + "optimist": { + "version": "0.6.1", + "resolved": "https://registry.npmjs.org/optimist/-/optimist-0.6.1.tgz", + "integrity": "sha1-2j6nRob6IaGaERwybpDrFaAZZoY=", + "dev": true, + "requires": { + "minimist": "~0.0.1", + "wordwrap": "~0.0.2" + } + }, + "path-is-absolute": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/path-is-absolute/-/path-is-absolute-1.0.1.tgz", + "integrity": "sha1-F0uSaHNVNP+8es5r9TpanhtcX18=", + "dev": true + }, + "resolve": { + "version": "0.6.3", + "resolved": "https://registry.npmjs.org/resolve/-/resolve-0.6.3.tgz", + "integrity": "sha1-3ZV5gufnNt699TtYpN2RdUV13UY=", + "dev": true + }, + "strip-json-comments": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/strip-json-comments/-/strip-json-comments-1.0.4.tgz", + "integrity": "sha1-HhX7ysl9Pumb8tc7TGVrCCu6+5E=", + "dev": true + }, + "tree-sitter-go": { + "version": "0.19.1", + "resolved": "https://registry.npmjs.org/tree-sitter-go/-/tree-sitter-go-0.19.1.tgz", + "integrity": "sha512-qAHzfuddhbs3kPIW2vMBL5TqC5umhQ3NlDBPtdqlxE6tN2PKWHj0SZxXd/YrvqNUDrMmRpPBRg9W7JyCP/+n3A==", + "requires": { + "nan": "^2.14.0" + } + }, + "wordwrap": { + "version": "0.0.3", + "resolved": "https://registry.npmjs.org/wordwrap/-/wordwrap-0.0.3.tgz", + "integrity": "sha1-o9XabNXAvAAI03I0u68b7WMFkQc=", + "dev": true + }, + "wrappy": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz", + "integrity": "sha1-tSQ9jz7BqjXxNkYFvA0QNuMKtp8=", + "dev": true + } + } +} diff --git a/packages/language-go/package.json b/packages/language-go/package.json new file mode 100644 index 000000000..d3db96153 --- /dev/null +++ b/packages/language-go/package.json @@ -0,0 +1,27 @@ +{ + "name": "language-go", + "description": "Go language support in Atom", + "keywords": [ + "tree-sitter" + ], + "version": "0.47.3", + "license": "MIT", + "engines": { + "atom": "*", + "node": "*" + }, + "homepage": "http://atom.github.io/language-go", + "repository": { + "type": "git", + "url": "https://github.com/atom/language-go.git" + }, + "bugs": { + "url": "https://github.com/atom/language-go/issues" + }, + "dependencies": { + "tree-sitter-go": "0.19.1" + }, + "devDependencies": { + "coffeelint": "^1.10.1" + } +} diff --git a/packages/language-go/settings/language-go.cson b/packages/language-go/settings/language-go.cson new file mode 100644 index 000000000..f6ebb10ca --- /dev/null +++ b/packages/language-go/settings/language-go.cson @@ -0,0 +1,6 @@ +'.source.go': + 'editor': + 'commentStart': '// ' + 'increaseIndentPattern': '^.*(\\bcase\\b.*:|\\bdefault\\b:|(\\b(func|if|else|switch|select|for|struct)\\b.*)?{[^}]*|\\([^)]*)$' + 'decreaseIndentPattern': '^\\s*(\\bcase\\b.*:|\\bdefault\\b:|}[),]?|\\)[,]?)$' + 'decreaseNextIndentPattern': '^\\s*[^\\s()}]+(?<m>[^()]*\\((?:\\g<m>[^()]*|[^()]*)\\))*[^()]*\\)[,]?$' diff --git a/packages/language-go/snippets/language-go.cson b/packages/language-go/snippets/language-go.cson new file mode 100644 index 000000000..dec06ddb8 --- /dev/null +++ b/packages/language-go/snippets/language-go.cson @@ -0,0 +1,140 @@ +'.source.go': + 'package clause': + 'prefix': 'pkg' + 'body': "package ${1:name}" + 'single import': + 'prefix': 'im' + 'body': 'import "${1:package}"' + 'multiple imports': + 'prefix': 'ims' + 'body': "import (\n\t\"${1:package}\"\n)" + 'single constant': + 'prefix': 'co' + 'body': "const ${1:name} = ${2:value}" + 'multiple constants': + 'prefix': 'cos' + 'body': "const (\n\t${1:name} = ${2:value}\n)" + 'type interface declaration': + 'prefix': 'tyi' + 'body': "type ${1:name} interface {\n\t$0\n}" + 'type struct declaration': + 'prefix': 'tys' + 'body': "type ${1:name} struct {\n\t$0\n}" + 'type function declaration': + 'prefix': 'tyf' + 'body': 'type ${1:name} func(${2:commonParam}) ${3:returnParam}' + 'main package': + 'prefix': 'pkgm' + 'body': "package main\n\nfunc main() {\n\t$0\n}" + 'function declaration': + 'prefix': 'func' + 'body': "func $1($2) $3 {\n\t$0\n}" + 'variable declaration': + 'prefix': 'var' + 'body': "var ${1:name} ${2:type}" + 'switch statement': + 'prefix': 'switch' + 'body': "switch ${1:expression} {\ncase ${2:condition}:\n\t$0\n}" + 'case clause': + 'prefix': 'cs' + 'body': "case ${1:condition}:$0" + 'for statement': + 'prefix': 'for' + 'body': "for ${1:index} := 0; $1 < ${2:count}; $1${3:++} {\n\t$0\n}" + 'for range statement': + 'prefix': 'forr' + 'body': "for ${1:var} := range ${2:var} {\n\t$0\n}" + 'channel declaration': + 'prefix': 'ch' + 'body': "chan ${1:type}" + 'map declaration': + 'prefix': 'map' + 'body': "map[${1:type}]${2:type}" + 'empty interface': + 'prefix': 'in' + 'body': "interface{}" + 'if statement': + 'prefix': 'if' + 'body': "if ${1:condition} {\n\t$0\n}" + 'else branch': + 'prefix': 'el' + 'body': "else {\n\t$0\n}" + 'if else statement': + 'prefix': 'ie' + 'body': "if ${1:condition} {\n\t$2\n} else {\n\t$0\n}" + 'if err != nil': + 'prefix': 'iferr' + 'body': "if err != nil {\n\t${1:return}\n}" + 'fmt.Println': + 'prefix': 'fp' + 'body': "fmt.Println(\"$1\")" + 'fmt.Printf': + 'prefix': 'ff' + 'body': "fmt.Printf(\"$1\", ${2:var})" + 'log.Println': + 'prefix': 'lp' + 'body': "log.Println(\"$1\")" + 'log.Printf': + 'prefix': 'lf' + 'body': "log.Printf(\"$1\", ${2:var})" + 'log variable content': + 'prefix': 'lv' + 'body': "log.Printf(\"${1:var}: %#+v\\\\n\", ${1:var})" + 'make(...)': + 'prefix': 'make' + 'body': "make(${1:type}, ${2:0})" + 'new(...)': + 'prefix': 'new' + 'body': "new(${1:type})" + 'panic(...)': + 'prefix': 'pn' + 'body': "panic(\"$0\")" + 'http ResponseWriter *Request': + 'prefix': 'wr' + 'body': "${1:w} http.ResponseWriter, ${2:r} *http.Request" + 'http Context ResponseWriter *Request': + 'prefix': 'cwr' + 'body': "${1:c} context.Context, ${2:w} http.ResponseWriter, ${3:r} *http.Request" + 'http.HandleFunc': + 'prefix': 'hf' + 'body': "${1:http}.HandleFunc(\"${2:/}\", ${3:handler})" + 'http handler declaration': + 'prefix': 'hand' + 'body': "func $1(${2:w} http.ResponseWriter, ${3:r} *http.Request) {\n\t$0\n}" + 'http.Redirect': + 'prefix': 'rd' + 'body': "http.Redirect(${1:w}, ${2:r}, \"${3:/}\", ${4:http.StatusFound})" + 'http.Error': + 'prefix': 'herr' + 'body': "http.Error(${1:w}, ${2:err}.Error(), ${3:http.StatusInternalServerError})" + 'http.ListenAndServe': + 'prefix': 'las' + 'body': "http.ListenAndServe(\"${1::8080}\", ${2:nil})" + 'http.Serve': + 'prefix': 'sv' + 'body': "http.Serve(\"${1::8080}\", ${2:nil})" + 'goroutine anonymous function': + 'prefix': 'go' + 'body': 'go func($1) {\n\t$2\n}($0)' + 'goroutine function': + 'prefix': 'gf' + 'body': 'go ${1:func}($0)' + 'defer statement': + 'prefix': 'df' + 'body': "defer ${1:func}($0)" + 'test function': + 'prefix': 'tf' + 'body': "func Test$1(t *testing.T) {\n\t$0\n}" + 'go template': + 'prefix': 'got' + 'body': """ + package ${1:main} + + import ( + "${2:fmt}" + ) + + func ${1:main}() { + $3 + } + """ diff --git a/packages/language-go/spec/go-spec.coffee b/packages/language-go/spec/go-spec.coffee new file mode 100644 index 000000000..0341ce390 --- /dev/null +++ b/packages/language-go/spec/go-spec.coffee @@ -0,0 +1,977 @@ +describe 'Go grammar', -> + grammar = null + + beforeEach -> + atom.config.set('core.useTreeSitterParsers', false) + + waitsForPromise -> + atom.packages.activatePackage('language-go') + + runs -> + grammar = atom.grammars.grammarForScopeName('source.go') + + it 'parses the grammar', -> + expect(grammar).toBeTruthy() + expect(grammar.scopeName).toBe 'source.go' + + it 'tokenizes comments', -> + {tokens} = grammar.tokenizeLine('// I am a comment') + expect(tokens[0].value).toEqual '//' + expect(tokens[0].scopes).toEqual ['source.go', 'comment.line.double-slash.go', 'punctuation.definition.comment.go'] + expect(tokens[1].value).toEqual ' I am a comment' + expect(tokens[1].scopes).toEqual ['source.go', 'comment.line.double-slash.go'] + + tokens = grammar.tokenizeLines('/*\nI am a comment\n*/') + expect(tokens[0][0].value).toEqual '/*' + expect(tokens[0][0].scopes).toEqual ['source.go', 'comment.block.go', 'punctuation.definition.comment.go'] + expect(tokens[1][0].value).toEqual 'I am a comment' + expect(tokens[1][0].scopes).toEqual ['source.go', 'comment.block.go'] + expect(tokens[2][0].value).toEqual '*/' + expect(tokens[2][0].scopes).toEqual ['source.go', 'comment.block.go', 'punctuation.definition.comment.go'] + + it 'tokenizes comments in imports', -> + lines = grammar.tokenizeLines ''' + import ( + //"fmt" + "os" // comment + // comment! + ) + ''' + expect(lines[1][1]).toEqual value: '//', scopes: ['source.go', 'comment.line.double-slash.go', 'punctuation.definition.comment.go'] + expect(lines[2][5]).toEqual value: '//', scopes: ['source.go', 'comment.line.double-slash.go', 'punctuation.definition.comment.go'] + expect(lines[3][1]).toEqual value: '//', scopes: ['source.go', 'comment.line.double-slash.go', 'punctuation.definition.comment.go'] + + it 'tokenizes strings', -> + delims = + 'string.quoted.double.go': '"' + 'string.quoted.raw.go': '`' + + for scope, delim of delims + {tokens} = grammar.tokenizeLine(delim + 'I am a string' + delim) + expect(tokens[0].value).toEqual delim + expect(tokens[0].scopes).toEqual ['source.go', scope, 'punctuation.definition.string.begin.go'] + expect(tokens[1].value).toEqual 'I am a string' + expect(tokens[1].scopes).toEqual ['source.go', scope] + expect(tokens[2].value).toEqual delim + expect(tokens[2].scopes).toEqual ['source.go', scope, 'punctuation.definition.string.end.go'] + + it 'tokenizes placeholders in strings', -> + # Taken from go/src/pkg/fmt/fmt_test.go + verbs = [ + '%# x', '%-5s', '%5s', '%05s', '%.5s', '%10.1q', '%10v', '%-10v', '%.0d' + '%.d', '%+07.2f', '%0100d', '%0.100f', '%#064x', '%+.3F', '%-#20.8x', + '%[1]d', '%[2]*[1]d', '%[3]*.[2]*[1]f', '%[3]*.[2]f', '%3.[2]d', '%.[2]d' + '%-+[1]x', '%d', '%-d', '%+d', '%#d', '% d', '%0d', '%1.2d', '%-1.2d' + '%+1.2d', '%-+1.2d', '%*d', '%.*d', '%*.*d', '%0*d', '%-*d' + ] + + for verb in verbs + {tokens} = grammar.tokenizeLine('"' + verb + '"') + expect(tokens[0].value).toEqual '"', + expect(tokens[0].scopes).toEqual ['source.go', 'string.quoted.double.go', 'punctuation.definition.string.begin.go'] + expect(tokens[1].value).toEqual verb + expect(tokens[1].scopes).toEqual ['source.go', 'string.quoted.double.go', 'constant.other.placeholder.go'] + expect(tokens[2].value).toEqual '"', + expect(tokens[2].scopes).toEqual ['source.go', 'string.quoted.double.go', 'punctuation.definition.string.end.go'] + + it 'tokenizes character escapes in strings', -> + escapes = [ + '\\a', '\\b', '\\f', '\\n', '\\r', '\\t', '\\v', '\\\\' + '\\000', '\\007', '\\377', '\\x07', '\\xff', '\\u12e4', '\\U00101234' + ] + + for escape in escapes + {tokens} = grammar.tokenizeLine('"' + escape + '"') + expect(tokens[1].value).toEqual escape + expect(tokens[1].scopes).toEqual ['source.go', 'string.quoted.double.go', 'constant.character.escape.go'] + + {tokens} = grammar.tokenizeLine('"\\""') + expect(tokens[1].value).toEqual '\\"' + expect(tokens[1].scopes).toEqual ['source.go', 'string.quoted.double.go', 'constant.character.escape.go'] + + it 'tokenizes placeholders in raw strings', -> + # Taken from go/src/pkg/fmt/fmt_test.go + verbs = [ + '%# x', '%-5s', '%5s', '%05s', '%.5s', '%10.1q', '%10v', '%-10v', '%.0d' + '%.d', '%+07.2f', '%0100d', '%0.100f', '%#064x', '%+.3F', '%-#20.8x', + '%[1]d', '%[2]*[1]d', '%[3]*.[2]*[1]f', '%[3]*.[2]f', '%3.[2]d', '%.[2]d' + '%-+[1]x', '%d', '%-d', '%+d', '%#d', '% d', '%0d', '%1.2d', '%-1.2d' + '%+1.2d', '%-+1.2d', '%*d', '%.*d', '%*.*d', '%0*d', '%-*d' + ] + + for verb in verbs + {tokens} = grammar.tokenizeLine('`' + verb + '`') + expect(tokens[0].value).toEqual '`', + expect(tokens[0].scopes).toEqual ['source.go', 'string.quoted.raw.go', 'punctuation.definition.string.begin.go'] + expect(tokens[1].value).toEqual verb + expect(tokens[1].scopes).toEqual ['source.go', 'string.quoted.raw.go', 'constant.other.placeholder.go'] + expect(tokens[2].value).toEqual '`', + expect(tokens[2].scopes).toEqual ['source.go', 'string.quoted.raw.go', 'punctuation.definition.string.end.go'] + + it 'tokenizes runes', -> + runes = [ + 'u', 'X', '$', ':', '(', '.', '2', '=', '!', '@', + '\\a', '\\b', '\\f', '\\n', '\\r', '\\t', '\\v', '\\\\', "\\'", '\\"', + '\\000', '\\007', '\\377', '\\x07', '\\xff', '\\u12e4', '\\U00101234' + ] + + for rune in runes + {tokens} = grammar.tokenizeLine("'#{rune}'") + expect(tokens[0]).toEqual value: "'", scopes: ['source.go', 'string.quoted.rune.go', 'punctuation.definition.string.begin.go'] + expect(tokens[1]).toEqual value: rune, scopes: ['source.go', 'string.quoted.rune.go', 'constant.other.rune.go'] + expect(tokens[2]).toEqual value: "'", scopes: ['source.go', 'string.quoted.rune.go', 'punctuation.definition.string.end.go'] + + it 'tokenizes invalid runes and single quoted strings', -> + {tokens} = grammar.tokenizeLine("'\\c'") + expect(tokens[0]).toEqual value: "'", scopes: ['source.go', 'string.quoted.rune.go', 'punctuation.definition.string.begin.go'] + expect(tokens[1]).toEqual value: '\\c', scopes: ['source.go', 'string.quoted.rune.go', 'invalid.illegal.unknown-rune.go'] + expect(tokens[2]).toEqual value: "'", scopes: ['source.go', 'string.quoted.rune.go', 'punctuation.definition.string.end.go'] + + {tokens} = grammar.tokenizeLine("'ab'") + expect(tokens[0]).toEqual value: "'", scopes: ['source.go', 'string.quoted.rune.go', 'punctuation.definition.string.begin.go'] + expect(tokens[1]).toEqual value: 'ab', scopes: ['source.go', 'string.quoted.rune.go', 'invalid.illegal.unknown-rune.go'] + expect(tokens[2]).toEqual value: "'", scopes: ['source.go', 'string.quoted.rune.go', 'punctuation.definition.string.end.go'] + + {tokens} = grammar.tokenizeLine("'some single quote string'") + expect(tokens[0]).toEqual value: "'", scopes: ['source.go', 'string.quoted.rune.go', 'punctuation.definition.string.begin.go'] + expect(tokens[1]).toEqual value: 'some single quote string', scopes: ['source.go', 'string.quoted.rune.go', 'invalid.illegal.unknown-rune.go'] + expect(tokens[2]).toEqual value: "'", scopes: ['source.go', 'string.quoted.rune.go', 'punctuation.definition.string.end.go'] + + it 'tokenizes invalid whitespace around chan annotations', -> + invalid_send = + 'chan <- sendonly': ' ' + + invalid_receive = + '<- chan recvonly': ' ' + + for expr, invalid of invalid_send + {tokens} = grammar.tokenizeLine(expr) + expect(tokens[1].value).toEqual invalid + expect(tokens[1].scopes).toEqual ['source.go', 'invalid.illegal.send-channel.go'] + + for expr, invalid of invalid_receive + {tokens} = grammar.tokenizeLine(expr) + expect(tokens[1].value).toEqual invalid + expect(tokens[1].scopes).toEqual ['source.go', 'invalid.illegal.receive-channel.go'] + + it 'tokenizes keywords', -> + keywordLists = + 'keyword.control.go': ['break', 'case', 'continue', 'default', 'defer', 'else', 'fallthrough', 'for', 'go', 'goto', 'if', 'range', 'return', 'select', 'switch'] + 'keyword.channel.go': ['chan'] + 'keyword.const.go': ['const'] + 'keyword.function.go': ['func'] + 'keyword.interface.go': ['interface'] + 'keyword.import.go': ['import'] + 'keyword.map.go': ['map'] + 'keyword.package.go': ['package'] + 'keyword.struct.go': ['struct'] + 'keyword.type.go': ['type'] + 'keyword.var.go': ['var'] + + for scope, list of keywordLists + for keyword in list + {tokens} = grammar.tokenizeLine keyword + expect(tokens[0].value).toEqual keyword + expect(tokens[0].scopes).toEqual ['source.go', scope] + + it 'tokenizes storage types', -> + storageTypes = + 'storage.type.boolean.go': ['bool'] + 'storage.type.byte.go': ['byte'] + 'storage.type.error.go': ['error'] + 'storage.type.numeric.go': ['int', 'int8', 'int16', 'int32', 'int64', 'uint', 'uint8', 'uint16', 'uint32', 'uint64', 'float32', 'float64', 'complex64', 'complex128'] + 'storage.type.rune.go': ['rune'] + 'storage.type.string.go': ['string'] + 'storage.type.uintptr.go': ['uintptr'] + + for scope, types of storageTypes + for type in types + {tokens} = grammar.tokenizeLine type + expect(tokens[0].value).toEqual type + expect(tokens[0].scopes).toEqual ['source.go', scope] + + it 'tokenizes func regardless of the context', -> + funcKeyword = ['func f()', 'func (x) f()', 'func(x) f()', 'func'] + for line in funcKeyword + {tokens} = grammar.tokenizeLine line + expect(tokens[0].value).toEqual 'func' + expect(tokens[0].scopes).toEqual ['source.go', 'keyword.function.go'] + + funcType = [ + { + 'line': 'var f1 func(' + 'tokenPos': 4 + } + { + 'line': 'f2 :=func()' + 'tokenPos': 3 + } + { + 'line': '\tfunc(' + 'tokenPos': 1 + } + { + 'line': 'type HandlerFunc func(' + 'tokenPos': 4 + } + ] + for t in funcType + {tokens} = grammar.tokenizeLine t.line + relevantToken = tokens[t.tokenPos] + expect(relevantToken.value).toEqual 'func' + expect(relevantToken.scopes).toEqual ['source.go', 'keyword.function.go'] + + next = tokens[t.tokenPos + 1] + expect(next.value).toEqual '(' + expect(next.scopes).toEqual ['source.go', 'punctuation.definition.begin.bracket.round.go'] + + it 'only tokenizes func when it is an exact match', -> + tests = ['myfunc', 'funcMap'] + for test in tests + {tokens} = grammar.tokenizeLine test + expect(tokens[0].value).not.toEqual 'func' + expect(tokens[0].scopes).not.toEqual ['source.go', 'keyword.function.go'] + + it 'tokenizes func names in their declarations', -> + tests = [ + { + 'line': 'func f()' + 'tokenPos': 2 + } + { + 'line': 'func (T) f()' + 'tokenPos': 6 + } + { + 'line': 'func (t T) f()' + 'tokenPos': 6 + } + { + 'line': 'func (t *T) f()' + 'tokenPos': 8 + } + ] + + for t in tests + {tokens} = grammar.tokenizeLine t.line + expect(tokens[0].value).toEqual 'func' + expect(tokens[0].scopes).toEqual ['source.go', 'keyword.function.go'] + + relevantToken = tokens[t.tokenPos] + expect(relevantToken).toBeDefined() + expect(relevantToken.value).toEqual 'f' + expect(relevantToken.scopes).toEqual ['source.go', 'entity.name.function.go'] + + next = tokens[t.tokenPos + 1] + expect(next.value).toEqual '(' + expect(next.scopes).toEqual ['source.go', 'punctuation.definition.begin.bracket.round.go'] + + it 'tokenizes operators method declarations', -> + tests = [ + { + 'line': 'func (t *T) f()' + 'tokenPos': 4 + } + ] + + for t in tests + {tokens} = grammar.tokenizeLine t.line + expect(tokens[0].value).toEqual 'func' + expect(tokens[0].scopes).toEqual ['source.go', 'keyword.function.go'] + + relevantToken = tokens[t.tokenPos] + expect(relevantToken.value).toEqual '*' + expect(relevantToken.scopes).toEqual ['source.go', 'keyword.operator.address.go'] + + it 'tokenizes numerics', -> + numbers = + 'constant.numeric.integer.go': ['42', '0600', '0xBadFace', '170141183460469231731687303715884105727', '1E6', '0i', '011i', '1E6i'] + 'constant.numeric.floating-point.go': [ + '0.', '72.40', '072.40', '2.71828', '1.e+0', '6.67428e-11', '.25', '.12345E+5', + '0.i', '2.71828i', '1.e+0i', '6.67428e-11i', '.25i', '.12345E+5i' + ] + + for scope, nums of numbers + for num in nums + {tokens} = grammar.tokenizeLine num + expect(tokens[0].value).toEqual num + expect(tokens[0].scopes).toEqual ['source.go', scope] + + invalidOctals = ['08', '039', '0995'] + for num in invalidOctals + {tokens} = grammar.tokenizeLine num + expect(tokens[0].value).toEqual num + expect(tokens[0].scopes).toEqual ['source.go', 'invalid.illegal.numeric.go'] + + it 'tokenizes language constants', -> + constants = ['true', 'false', 'nil', 'iota'] + for constant in constants + {tokens} = grammar.tokenizeLine constant + expect(tokens[0].value).toEqual constant + expect(tokens[0].scopes).toEqual ['source.go', 'constant.language.go'] + + it 'tokenizes built-in functions', -> + funcs = [ + 'append(x)', 'cap(x)', 'close(x)', 'complex(x)', 'copy(x)', 'delete(x)', 'imag(x)', 'len(x)', 'make(x)', 'new(x)', + 'panic(x)', 'print(x)', 'println(x)', 'real(x)', 'recover(x)' + ] + funcVals = ['append', 'cap', 'close', 'complex', 'copy', 'delete', 'imag', 'len', 'make', 'new', 'panic', 'print', 'println', 'real', 'recover'] + + for func in funcs + funcVal = funcVals[funcs.indexOf(func)] + {tokens} = grammar.tokenizeLine func + expect(tokens[0].value).toEqual funcVal + expect(tokens[0].scopes).toEqual ['source.go', 'support.function.builtin.go'] + + it 'tokenizes operators', -> + binaryOpers = + 'keyword.operator.arithmetic.go': ['+', '-', '*', '/', '%'] + 'keyword.operator.arithmetic.bitwise.go': ['&', '|', '^', '&^', '<<', '>>'] + 'keyword.operator.assignment.go': ['=', '+=', '-=', '|=', '^=', '*=', '/=', ':=', '%=', '<<=', '>>=', '&=', '&^='] + 'keyword.operator.channel.go': ['<-'] + 'keyword.operator.comparison.go': ['==', '!=', '<', '<=', '>', '>='] + 'keyword.operator.decrement.go': ['--'] + 'keyword.operator.ellipsis.go': ['...'] + 'keyword.operator.increment.go': ['++'] + 'keyword.operator.logical.go': ['&&', '||'] + + unaryOpers = + 'keyword.operator.address.go': ['*var', '&var'] + 'keyword.operator.arithmetic.go': ['+var', '-var'] + 'keyword.operator.arithmetic.bitwise.go': ['^var'] + 'keyword.operator.logical.go': ['!var'] + + for scope, ops of binaryOpers + for op in ops + {tokens} = grammar.tokenizeLine op + expect(tokens[0].value).toEqual op + expect(tokens[0].scopes).toEqual ['source.go', scope] + + for scope, ops of unaryOpers + for op in ops + {tokens} = grammar.tokenizeLine op + expect(tokens[0].value).toEqual op[0] + expect(tokens[0].scopes).toEqual ['source.go', scope] + + it 'does not treat values/variables attached to comparion operators as extensions of the operator', -> + {tokens} = grammar.tokenizeLine '2<3.0 && 12>bar' + expect(tokens[0]).toEqual value: '2', scopes: ['source.go', 'constant.numeric.integer.go'] + expect(tokens[1]).toEqual value: '<', scopes: ['source.go', 'keyword.operator.comparison.go'] + expect(tokens[2]).toEqual value: '3.0', scopes: ['source.go', 'constant.numeric.floating-point.go'] + expect(tokens[6]).toEqual value: '12', scopes: ['source.go', 'constant.numeric.integer.go'] + expect(tokens[7]).toEqual value: '>', scopes: ['source.go', 'keyword.operator.comparison.go'] + expect(tokens[8]).toEqual value: 'bar', scopes: ['source.go'] + + it 'tokenizes punctuation brackets', -> + {tokens} = grammar.tokenizeLine '{([])}' + expect(tokens[0]).toEqual value: '{', scopes: ['source.go', 'punctuation.definition.begin.bracket.curly.go'] + expect(tokens[1]).toEqual value: '(', scopes: ['source.go', 'punctuation.definition.begin.bracket.round.go'] + expect(tokens[2]).toEqual value: '[', scopes: ['source.go', 'punctuation.definition.bracket.square.go'] + expect(tokens[3]).toEqual value: ']', scopes: ['source.go', 'punctuation.definition.bracket.square.go'] + expect(tokens[4]).toEqual value: ')', scopes: ['source.go', 'punctuation.definition.end.bracket.round.go'] + expect(tokens[5]).toEqual value: '}', scopes: ['source.go', 'punctuation.definition.end.bracket.curly.go'] + + it 'tokenizes punctuation delimiters', -> + delims = + 'punctuation.other.comma.go': ',' + 'punctuation.other.period.go': '.' + 'punctuation.other.colon.go': ':' + + for scope, delim of delims + {tokens} = grammar.tokenizeLine delim + expect(tokens[0].value).toEqual delim + expect(tokens[0].scopes).toEqual ['source.go', scope] + + it 'tokenizes func names in calls to them', -> + tests = [ + { + 'line': 'a.b()' + 'name': 'b' + 'tokenPos': 2 + 'isFunc': true + } + { + 'line': 'pkg.Func1(' + 'name': 'Func1' + 'tokenPos': 2 + 'isFunc': true + } + { + 'line': 'pkg.Func1().Func2(' + 'name': 'Func2' + 'tokenPos': 6 + 'isFunc': true + } + { + 'line': 'pkg.var' + 'name': 'var' + 'tokenPos': 2 + 'isFunc': false + } + { + 'line': 'doWork(ch)' + 'name': 'doWork' + 'tokenPos': 0 + 'isFunc': true + } + { + 'line': 'f1()' + 'name': 'f1' + 'tokenPos': 0 + 'isFunc': true + } + ] + + want = ['source.go', 'support.function.go'] + + for t in tests + {tokens} = grammar.tokenizeLine t.line + + relevantToken = tokens[t.tokenPos] + if t.isFunc + expect(relevantToken).not.toBeNull() + expect(relevantToken.value).toEqual t.name + expect(relevantToken.scopes).toEqual want + + next = tokens[t.tokenPos + 1] + expect(next.value).toEqual '(' + expect(next.scopes).toEqual ['source.go', 'punctuation.definition.begin.bracket.round.go'] + else + expect(relevantToken.scopes).not.toEqual want + + it 'tokenizes package names', -> + tests = ['package main', 'package mypackage'] + + for test in tests + {tokens} = grammar.tokenizeLine test + expect(tokens[0].scopes).toEqual ['source.go', 'keyword.package.go'] + expect(tokens[2].scopes).toEqual ['source.go', 'entity.name.package.go'] + + it 'tokenizes invalid package names as such', -> + {tokens} = grammar.tokenizeLine 'package 0mypackage' + expect(tokens[0]).toEqual value: 'package', scopes: ['source.go', 'keyword.package.go'] + expect(tokens[2]).toEqual value: '0mypackage', scopes: ['source.go', 'invalid.illegal.identifier.go'] + + it 'does not treat words that have a trailing package as a package name', -> + {tokens} = grammar.tokenizeLine 'func myFunc(Varpackage string)' + expect(tokens[4]).toEqual value: 'Varpackage ', scopes: ['source.go'] + expect(tokens[5]).toEqual value: 'string', scopes: ['source.go', 'storage.type.string.go'] + + it 'tokenizes type names', -> + tests = ['type mystring string', 'type mytype interface{'] + + for test in tests + {tokens} = grammar.tokenizeLine test + expect(tokens[0].scopes).toEqual ['source.go', 'keyword.type.go'] + expect(tokens[2].scopes).toEqual ['source.go', 'entity.name.type.go'] + + it 'tokenizes invalid type names as such', -> + {tokens} = grammar.tokenizeLine 'type 0mystring string' + expect(tokens[0]).toEqual value: 'type', scopes: ['source.go', 'keyword.type.go'] + expect(tokens[2]).toEqual value: '0mystring', scopes: ['source.go', 'invalid.illegal.identifier.go'] + + it 'does not treat words that have a trailing type as a type name', -> + {tokens} = grammar.tokenizeLine 'func myFunc(Vartype string)' + expect(tokens[4]).toEqual value: 'Vartype ', scopes: ['source.go'] + expect(tokens[5]).toEqual value: 'string', scopes: ['source.go', 'storage.type.string.go'] + + describe 'in variable declarations', -> + testVar = (token) -> + expect(token.value).toBe 'var' + expect(token.scopes).toEqual ['source.go', 'keyword.var.go'] + + testVarAssignment = (token, name) -> + expect(token.value).toBe name + expect(token.scopes).toEqual ['source.go', 'variable.other.assignment.go'] + + testVarDeclaration = (token, name) -> + expect(token.value).toBe name + expect(token.scopes).toEqual ['source.go', 'variable.other.declaration.go'] + + testOp = (token, op) -> + expect(token.value).toBe op + expect(token.scopes).toEqual ['source.go', 'keyword.operator.go'] + + testOpAddress = (token, op) -> + expect(token.value).toBe op + expect(token.scopes).toEqual ['source.go', 'keyword.operator.address.go'] + + testOpAssignment = (token, op) -> + expect(token.value).toBe op + expect(token.scopes).toEqual ['source.go', 'keyword.operator.assignment.go'] + + testOpBracket = (token, op, type) -> + expect(token.value).toBe op + expect(token.scopes).toEqual ['source.go', "punctuation.definition.variables.#{type}.bracket.round.go"] + + testOpPunctuation = (token, op) -> + expect(token.value).toBe op + expect(token.scopes).toEqual ['source.go', 'punctuation.other.comma.go'] + + testOpTermination = (token, op) -> + expect(token.value).toBe op + expect(token.scopes).toEqual ['source.go', 'punctuation.terminator.go'] + + testNumType = (token, name) -> + expect(token.value).toBe name + expect(token.scopes).toEqual ['source.go', 'storage.type.numeric.go'] + + testStringType = (token, name) -> + expect(token.value).toBe name + expect(token.scopes).toEqual ['source.go', 'storage.type.string.go'] + + testNum = (token, value) -> + expect(token.value).toBe value + expect(token.scopes).toEqual ['source.go', 'constant.numeric.integer.go'] + + testString = (token, value) -> + expect(token.value).toBe value + expect(token.scopes).toEqual ['source.go', 'string.quoted.double.go'] + + describe 'in var statements', -> + it 'tokenizes a single variable assignment', -> + {tokens} = grammar.tokenizeLine 'i = 7' + testVarAssignment tokens[0], 'i' + testOpAssignment tokens[2], '=' + testNum tokens[4], '7' + + it 'tokenizes a single qualified variable assignment', -> + {tokens} = grammar.tokenizeLine 'a.b.cde = 7' + expect(tokens[0]).toEqual value: 'a', scopes: ['source.go', 'variable.other.assignment.go'] + expect(tokens[1]).toEqual value: '.', scopes: ['source.go', 'variable.other.assignment.go', 'punctuation.other.period.go'] + expect(tokens[2]).toEqual value: 'b', scopes: ['source.go', 'variable.other.assignment.go'] + expect(tokens[3]).toEqual value: '.', scopes: ['source.go', 'variable.other.assignment.go', 'punctuation.other.period.go'] + expect(tokens[4]).toEqual value: 'cde', scopes: ['source.go', 'variable.other.assignment.go'] + testOpAssignment tokens[6], '=' + testNum tokens[8], '7' + + it 'tokenizes multiple variable assignments', -> + {tokens} = grammar.tokenizeLine 'i, j = 7, 8' + testVarAssignment tokens[0], 'i' + testOpPunctuation tokens[1], ',' + testVarAssignment tokens[3], 'j' + testOpAssignment tokens[5], '=' + testNum tokens[7], '7' + testNum tokens[10], '8' + + it 'tokenizes multiple qualified variable assignment', -> + {tokens} = grammar.tokenizeLine 'a.b, c.d.efg = 7, 8' + expect(tokens[0]).toEqual value: 'a', scopes: ['source.go', 'variable.other.assignment.go'] + expect(tokens[1]).toEqual value: '.', scopes: ['source.go', 'variable.other.assignment.go', 'punctuation.other.period.go'] + expect(tokens[2]).toEqual value: 'b', scopes: ['source.go', 'variable.other.assignment.go'] + testOpPunctuation tokens[3], ',' + expect(tokens[5]).toEqual value: 'c', scopes: ['source.go', 'variable.other.assignment.go'] + expect(tokens[6]).toEqual value: '.', scopes: ['source.go', 'variable.other.assignment.go', 'punctuation.other.period.go'] + expect(tokens[7]).toEqual value: 'd', scopes: ['source.go', 'variable.other.assignment.go'] + expect(tokens[8]).toEqual value: '.', scopes: ['source.go', 'variable.other.assignment.go', 'punctuation.other.period.go'] + expect(tokens[9]).toEqual value: 'efg', scopes: ['source.go', 'variable.other.assignment.go'] + testOpAssignment tokens[11], '=' + testNum tokens[13], '7' + testNum tokens[16], '8' + + it 'tokenizes a single name and a type', -> + {tokens} = grammar.tokenizeLine 'var i int' + testVar tokens[0] + testVarDeclaration tokens[2], 'i' + testNumType tokens[4], 'int' + + it 'tokenizes a name and a qualified type', -> + {tokens} = grammar.tokenizeLine 'var a b.c' + testVar tokens[0] + expect(tokens[2]).toEqual value: 'a', scopes: ['source.go', 'variable.other.declaration.go'] + expect(tokens[3]).toEqual value: ' b', scopes: ['source.go'] + expect(tokens[4]).toEqual value: '.', scopes: ['source.go', 'punctuation.other.period.go'] + expect(tokens[5]).toEqual value: 'c', scopes: ['source.go'] + + it 'tokenizes a single name and an array type', -> + {tokens} = grammar.tokenizeLine 'var s []string' + testVar tokens[0] + testVarDeclaration tokens[2], 's' + testStringType tokens[6], 'string' + + it 'tokenizes a single name and an array type with predetermined length', -> + {tokens} = grammar.tokenizeLine 'var s [4]string' + testVar tokens[0] + testVarDeclaration tokens[2], 's' + expect(tokens[4]).toEqual value: '[', scopes: ['source.go', 'punctuation.definition.bracket.square.go'] + expect(tokens[5]).toEqual value: '4', scopes: ['source.go', 'constant.numeric.integer.go'] + expect(tokens[6]).toEqual value: ']', scopes: ['source.go', 'punctuation.definition.bracket.square.go'] + testStringType tokens[7], 'string' + + it 'tokenizes a single name and an array type with variadic length', -> + {tokens} = grammar.tokenizeLine 'var s [...]string' + testVar tokens[0] + testVarDeclaration tokens[2], 's' + expect(tokens[4]).toEqual value: '[', scopes: ['source.go', 'punctuation.definition.bracket.square.go'] + expect(tokens[5]).toEqual value: '...', scopes: ['source.go', 'keyword.operator.ellipsis.go'] + expect(tokens[6]).toEqual value: ']', scopes: ['source.go', 'punctuation.definition.bracket.square.go'] + testStringType tokens[7], 'string' + + it 'tokenizes a single name and multi-dimensional types with an address', -> + {tokens} = grammar.tokenizeLine 'var e [][]*string' + testVar tokens[0] + testVarDeclaration tokens[2], 'e' + expect(tokens[4]).toEqual value: '[', scopes: ['source.go', 'punctuation.definition.bracket.square.go'] + expect(tokens[5]).toEqual value: ']', scopes: ['source.go', 'punctuation.definition.bracket.square.go'] + expect(tokens[6]).toEqual value: '[', scopes: ['source.go', 'punctuation.definition.bracket.square.go'] + expect(tokens[7]).toEqual value: ']', scopes: ['source.go', 'punctuation.definition.bracket.square.go'] + testOpAddress tokens[8], '*' + testStringType tokens[9], 'string' + + it 'tokenizes a single name and a channel', -> + {tokens} = grammar.tokenizeLine 'var x <-chan bool' + testVar tokens[0] + testVarDeclaration tokens[2], 'x' + expect(tokens[4]).toEqual value: '<-', scopes: ['source.go', 'keyword.operator.channel.go'] + expect(tokens[5]).toEqual value: 'chan', scopes: ['source.go', 'keyword.channel.go'] + expect(tokens[7]).toEqual value: 'bool', scopes: ['source.go', 'storage.type.boolean.go'] + + it 'tokenizes a single name and its initialization', -> + {tokens} = grammar.tokenizeLine ' var k = 0' + testVar tokens[1] + testVarAssignment tokens[3], 'k' + testOpAssignment tokens[5], '=' + testNum tokens[7], '0' + + it 'tokenizes a single name, a type, and an initialization', -> + {tokens} = grammar.tokenizeLine 'var z blub = 7' + testVar tokens[0] + testVarAssignment tokens[2], 'z' + expect(tokens[3]).toEqual value: ' blub ', scopes: ['source.go'] + testOpAssignment tokens[4], '=' + testNum tokens[6], '7' + + it 'tokenizes a single name, a qualified type, and an initialization', -> + {tokens} = grammar.tokenizeLine 'var a b.c = 5' + testVar tokens[0] + expect(tokens[2]).toEqual value: 'a', scopes: ['source.go', 'variable.other.assignment.go'] + expect(tokens[3]).toEqual value: ' b', scopes: ['source.go'] + expect(tokens[4]).toEqual value: '.', scopes: ['source.go', 'punctuation.other.period.go'] + expect(tokens[5]).toEqual value: 'c ', scopes: ['source.go'] + testOpAssignment tokens[6], '=' + testNum tokens[8], '5' + + it 'does not tokenize more than necessary', -> + # This test is worded vaguely because it's hard to describe. + # Basically, make sure that the variable match isn't tokenizing the entire line + # in a (=.+) style match. This prevents multiline stuff after the assignment + # from working correctly, because match can only tokenize single lines. + lines = grammar.tokenizeLines ''' + var multiline string = `wow! + this should work!` + ''' + testVar lines[0][0] + testVarAssignment lines[0][2], 'multiline' + testStringType lines[0][4], 'string' + testOpAssignment lines[0][6], '=' + expect(lines[0][8]).toEqual value: '`', scopes: ['source.go', 'string.quoted.raw.go', 'punctuation.definition.string.begin.go'] + expect(lines[1][1]).toEqual value: '`', scopes: ['source.go', 'string.quoted.raw.go', 'punctuation.definition.string.end.go'] + + it 'tokenizes multiple names and a type', -> + {tokens} = grammar.tokenizeLine 'var U, V, W float64' + testVar tokens[0] + testVarDeclaration tokens[2], 'U' + testOpPunctuation tokens[3], ',' + testVarDeclaration tokens[5], 'V' + testOpPunctuation tokens[6], ',' + testVarDeclaration tokens[8], 'W' + + it 'tokenizes multiple names and a qualified type', -> + {tokens} = grammar.tokenizeLine 'var a, b c.d' + testVar tokens[0] + expect(tokens[2]).toEqual value: 'a', scopes: ['source.go', 'variable.other.declaration.go'] + testOpPunctuation tokens[3], ',' + expect(tokens[5]).toEqual value: 'b', scopes: ['source.go', 'variable.other.declaration.go'] + expect(tokens[6]).toEqual value: ' c', scopes: ['source.go'] + expect(tokens[7]).toEqual value: '.', scopes: ['source.go', 'punctuation.other.period.go'] + expect(tokens[8]).toEqual value: 'd', scopes: ['source.go'] + + it 'tokenizes multiple names and initialization expressions', -> + {tokens} = grammar.tokenizeLine 'var x, y, z = 1, 2, 3' + testVar tokens[0] + testVarAssignment tokens[2], 'x' + testOpPunctuation tokens[3], ',' + testVarAssignment tokens[5], 'y' + testOpPunctuation tokens[6], ',' + testVarAssignment tokens[8], 'z' + testOpAssignment tokens[10], '=' + testNum tokens[12], '1' + testOpPunctuation tokens[13], ',' + testNum tokens[15], '2' + testOpPunctuation tokens[16], ',' + testNum tokens[18], '3' + + it 'tokenizes multiple names, a type, and initialization expressions', -> + {tokens} = grammar.tokenizeLine 'var x, y float32 = float, thirtytwo' + testVar tokens[0] + testVarAssignment tokens[2], 'x' + testOpPunctuation tokens[3], ',' + testVarAssignment tokens[5], 'y' + testNumType tokens[7], 'float32' + testOpAssignment tokens[9], '=' + testOpPunctuation tokens[11], ',' + + it 'tokenizes multiple names, a qualified type, and initialization expression', -> + {tokens} = grammar.tokenizeLine 'var a, b c.d = 1, 2' + testVar tokens[0] + expect(tokens[2]).toEqual value: 'a', scopes: ['source.go', 'variable.other.assignment.go'] + testOpPunctuation tokens[3], ',' + expect(tokens[5]).toEqual value: 'b', scopes: ['source.go', 'variable.other.assignment.go'] + expect(tokens[6]).toEqual value: ' c', scopes: ['source.go'] + expect(tokens[7]).toEqual value: '.', scopes: ['source.go', 'punctuation.other.period.go'] + expect(tokens[8]).toEqual value: 'd ', scopes: ['source.go'] + testOpAssignment tokens[9], '=' + testNum tokens[11], '1' + testOpPunctuation tokens[12], ',' + testNum tokens[14], '2' + + it 'tokenizes multiple names and a function call', -> + {tokens} = grammar.tokenizeLine 'var re, im = complexSqrt(-1)' + testVar tokens[0] + testVarAssignment tokens[2], 're' + testVarAssignment tokens[5], 'im' + testOpAssignment tokens[7], '=' + + it 'tokenizes with a placeholder', -> + {tokens} = grammar.tokenizeLine 'var _, found = entries[name]' + testVar tokens[0] + testVarAssignment tokens[2], '_' + testVarAssignment tokens[5], 'found' + testOpAssignment tokens[7], '=' + + it 'does not treat words that have a trailing var as a variable declaration', -> + {tokens} = grammar.tokenizeLine 'func test(envvar string)' + expect(tokens[4]).toEqual value: 'envvar ', scopes: ['source.go'] + expect(tokens[5]).toEqual value: 'string', scopes: ['source.go', 'storage.type.string.go'] + + describe 'in var statement blocks', -> + it 'tokenizes single names with a type', -> + lines = grammar.tokenizeLines ''' + var ( + foo *bar + ) + ''' + testVar lines[0][0] + testOpBracket lines[0][2], '(', 'begin' + testVarDeclaration lines[1][1], 'foo' + testOpAddress lines[1][3], '*' + testOpBracket lines[2][0], ')', 'end' + + it 'tokenizes single names with an initializer', -> + lines = grammar.tokenizeLines ''' + var ( + foo = 42 + ) + ''' + testVar lines[0][0], 'var' + testOpBracket lines[0][2], '(', 'begin' + testVarAssignment lines[1][1], 'foo' + testOpAssignment lines[1][3], '=' + testNum lines[1][5], '42' + testOpBracket lines[2][0], ')', 'end' + + it 'tokenizes multiple names', -> + lines = grammar.tokenizeLines ''' + var ( + foo, bar = baz, quux + ) + ''' + testVar lines[0][0] + testOpBracket lines[0][2], '(', 'begin' + testVarAssignment lines[1][1], 'foo' + testOpPunctuation lines[1][2], ',' + testVarAssignment lines[1][4], 'bar' + testOpAssignment lines[1][6], '=' + testOpPunctuation lines[1][8], ',' + testOpBracket lines[2][0], ')', 'end' + + it 'tokenizes non variable declarations', -> + lines = grammar.tokenizeLines ''' + var ( + // I am a comment + foo *bar + userRegister = &routers.Handler{ + Handler: func(c echo.Context) error { + if err := userService.Register(&user); err != nil { + return err + } + return nil + }, + } + ) + ''' + testVar lines[0][0] + testOpBracket lines[0][2], '(', 'begin' + expect(lines[1][1]).toEqual value: '//', scopes: ['source.go', 'comment.line.double-slash.go', 'punctuation.definition.comment.go'] + expect(lines[1][2]).toEqual value: ' I am a comment', scopes: ['source.go', 'comment.line.double-slash.go'] + testVarDeclaration lines[2][1], 'foo' + testOpAddress lines[2][3], '*' + testVarAssignment lines[3][1], 'userRegister' + expect(lines[4][3]).toEqual value: 'func', scopes: ['source.go', 'keyword.function.go'] + expect(lines[5][1]).toEqual value: 'if', scopes: ['source.go', 'keyword.control.go'] + expect(lines[8][3]).toEqual value: 'nil', scopes: ['source.go', 'constant.language.go'] + testOpBracket lines[11][0], ')', 'end' + + it 'tokenizes all parts of variable initializations correctly', -> + lines = grammar.tokenizeLines ''' + var ( + m = map[string]int{ + "key": 10, + } + ) + ''' + testVar lines[0][0] + testOpBracket lines[0][2], '(', 'begin' + testVarAssignment lines[1][1], 'm' + testOpAssignment lines[1][3], '=' + testString lines[2][2], 'key' + testNum lines[2][6], '10' + testOpBracket lines[4][0], ')', 'end' + + it 'tokenizes non-ASCII variable names', -> + {tokens} = grammar.tokenizeLine 'über = test' + testVarAssignment tokens[0], 'über' + testOpAssignment tokens[2], '=' + + it 'tokenizes invalid variable names as such', -> + {tokens} = grammar.tokenizeLine 'var 0test = 0' + testVar tokens[0] + expect(tokens[2]).toEqual value: '0test', scopes: ['source.go', 'invalid.illegal.identifier.go'] + + describe 'in shorthand variable declarations', -> + it 'tokenizes single names', -> + {tokens} = grammar.tokenizeLine 'f := func() int { return 7 }' + testVarAssignment tokens[0], 'f' + testOpAssignment tokens[2], ':=' + + {tokens} = grammar.tokenizeLine 'ch := make(chan int)' + testVarAssignment tokens[0], 'ch' + testOpAssignment tokens[2], ':=' + + it 'tokenizes multiple names', -> + {tokens} = grammar.tokenizeLine 'i, j := 0, 10' + testVarAssignment tokens[0], 'i' + testOpPunctuation tokens[1], ',' + testVarAssignment tokens[3], 'j' + + {tokens} = grammar.tokenizeLine 'if _, y, z := coord(p); z > 0' + testVarAssignment tokens[2], '_' + testVarAssignment tokens[5], 'y' + testVarAssignment tokens[8], 'z' + testOpAssignment tokens[10], ':=' + testOpTermination tokens[16], ';' + + describe 'in imports declarations', -> + testImport = (token) -> + expect(token.value).toBe 'import' + expect(token.scopes).toEqual ['source.go', 'keyword.import.go'] + + testImportAlias = (token, name) -> + expect(token.value).toBe name + expect(token.scopes).toEqual ['source.go', 'entity.alias.import.go'] + + testImportPackage = (token, name) -> + expect(token.value).toBe name + expect(token.scopes).toEqual ['source.go', 'string.quoted.double.go', 'entity.name.import.go'] + + testOpBracket = (token, op, type) -> + expect(token.value).toBe op + expect(token.scopes).toEqual ['source.go', "punctuation.definition.imports.#{type}.bracket.round.go"] + + testBeginQuoted = (token) -> + expect(token.value).toBe '"' + expect(token.scopes).toEqual ['source.go', 'string.quoted.double.go', 'punctuation.definition.string.begin.go'] + + testEndQuoted = (token) -> + expect(token.value).toBe '"' + expect(token.scopes).toEqual ['source.go', 'string.quoted.double.go', 'punctuation.definition.string.end.go'] + + describe 'when it is a single line declaration', -> + it 'tokenizes declarations with a package name', -> + {tokens} = grammar.tokenizeLine 'import "fmt"' + testImport tokens[0] + testBeginQuoted tokens[2] + testImportPackage tokens[3], 'fmt' + testEndQuoted tokens[4] + + it 'tokenizes declarations with a package name and an alias', -> + {tokens} = grammar.tokenizeLine 'import . "fmt"' + testImport tokens[0] + testImportAlias tokens[2], '.' + testBeginQuoted tokens[4] + testImportPackage tokens[5], 'fmt' + testEndQuoted tokens[6] + {tokens} = grammar.tokenizeLine 'import otherpackage "github.com/test/package"' + testImport tokens[0] + testImportAlias tokens[2], 'otherpackage' + testBeginQuoted tokens[4] + testImportPackage tokens[5], 'github.com/test/package' + testEndQuoted tokens[6] + + it 'does not treat words that have a trailing import as a import declaration', -> + {tokens} = grammar.tokenizeLine 'func myFunc(Varimport string)' + expect(tokens[4]).toEqual value: 'Varimport ', scopes: ['source.go'] + expect(tokens[5]).toEqual value: 'string', scopes: ['source.go', 'storage.type.string.go'] + + describe 'when it is a multi line declaration', -> + it 'tokenizes single declarations with a package name', -> + [kwd, decl, closing] = grammar.tokenizeLines ''' + import ( + "github.com/test/package" + ) + ''' + testImport kwd[0] + testOpBracket kwd[2], '(', 'begin' + testBeginQuoted decl[1] + testImportPackage decl[2], 'github.com/test/package' + testEndQuoted decl[3] + testOpBracket closing[0], ')', 'end' + + it 'tokenizes multiple declarations with a package name', -> + [kwd, decl, decl2, closing] = grammar.tokenizeLines ''' + import ( + "github.com/test/package" + "fmt" + ) + ''' + testImport kwd[0] + testOpBracket kwd[2], '(', 'begin' + testBeginQuoted decl[1] + testImportPackage decl[2], 'github.com/test/package' + testEndQuoted decl[3] + testBeginQuoted decl2[1] + testImportPackage decl2[2], 'fmt' + testEndQuoted decl2[3] + testOpBracket closing[0], ')', 'end' + + it 'tokenizes single imports with an alias for a multi-line declaration', -> + [kwd, decl, closing] = grammar.tokenizeLines ''' + import ( + . "github.com/test/package" + ) + ''' + testImport kwd[0] + testOpBracket kwd[2], '(', 'begin' + testImportAlias decl[1], '.' + testBeginQuoted decl[3] + testImportPackage decl[4], 'github.com/test/package' + testEndQuoted decl[5] + testOpBracket closing[0], ')', 'end' + + it 'tokenizes multiple imports with an alias for a multi-line declaration', -> + [kwd, decl, decl2, closing] = grammar.tokenizeLines ''' + import ( + . "github.com/test/package" + "fmt" + ) + ''' + testImport kwd[0] + testOpBracket kwd[2], '(', 'begin' + testImportAlias decl[1], '.' + testBeginQuoted decl[3] + testImportPackage decl[4], 'github.com/test/package' + testEndQuoted decl[5] + testBeginQuoted decl2[1] + testImportPackage decl2[2], 'fmt' + testEndQuoted decl2[3] + testOpBracket closing[0], ')', 'end' diff --git a/packages/language-go/spec/language-go-spec.coffee b/packages/language-go/spec/language-go-spec.coffee new file mode 100644 index 000000000..e2e6255e8 --- /dev/null +++ b/packages/language-go/spec/language-go-spec.coffee @@ -0,0 +1,62 @@ +describe 'Go settings', -> + [editor, languageMode] = [] + + afterEach -> + editor.destroy() + + beforeEach -> + atom.config.set('core.useTreeSitterParsers', false) + + waitsForPromise -> + atom.workspace.open().then (o) -> + editor = o + languageMode = editor.languageMode + + waitsForPromise -> + atom.packages.activatePackage('language-go') + + it 'matches lines correctly using the increaseIndentPattern', -> + increaseIndentRegex = languageMode.increaseIndentRegexForScopeDescriptor(['source.go']) + + expect(increaseIndentRegex.testSync(' case true:')).toBeTruthy() + expect(increaseIndentRegex.testSync(' default:')).toBeTruthy() + expect(increaseIndentRegex.testSync('func something() {')).toBeTruthy() + expect(increaseIndentRegex.testSync(' if true {')).toBeTruthy() + expect(increaseIndentRegex.testSync(' else {')).toBeTruthy() + expect(increaseIndentRegex.testSync(' switch {')).toBeTruthy() + expect(increaseIndentRegex.testSync(' switch true {')).toBeTruthy() + expect(increaseIndentRegex.testSync(' select {')).toBeTruthy() + expect(increaseIndentRegex.testSync(' select true {')).toBeTruthy() + expect(increaseIndentRegex.testSync(' for v := range val {')).toBeTruthy() + expect(increaseIndentRegex.testSync(' for i := 0; i < 10; i++ {')).toBeTruthy() + expect(increaseIndentRegex.testSync(' for i := 0; i < 10; i++ {')).toBeTruthy() + expect(increaseIndentRegex.testSync(' type something struct {')).toBeTruthy() + expect(increaseIndentRegex.testSync(' fmt.Printf("some%s",')).toBeTruthy() + expect(increaseIndentRegex.testSync(' aSlice := []string{}{')).toBeTruthy() + + it 'matches lines correctly using the decreaseIndentPattern', -> + decreaseIndentRegex = languageMode.decreaseIndentRegexForScopeDescriptor(['source.go']) + + expect(decreaseIndentRegex.testSync(' case true:')).toBeTruthy() + expect(decreaseIndentRegex.testSync(' default:')).toBeTruthy() + expect(decreaseIndentRegex.testSync(' }')).toBeTruthy() + expect(decreaseIndentRegex.testSync(' },')).toBeTruthy() + expect(decreaseIndentRegex.testSync(' )')).toBeTruthy() + expect(decreaseIndentRegex.testSync(' ),')).toBeTruthy() + + it 'matches lines correctly using the decreaseNextIndentPattern', -> + decreaseNextIndentRegex = languageMode.decreaseNextIndentRegexForScopeDescriptor(['source.go']) + + expect(decreaseNextIndentRegex.testSync(' fmt.Println("something"))')).toBeTruthy() + expect(decreaseNextIndentRegex.testSync(' fmt.Println("something")),')).toBeTruthy() + expect(decreaseNextIndentRegex.testSync(' fmt.Println("something"), "x"),')).toBeTruthy() + expect(decreaseNextIndentRegex.testSync(' fmt.Println(fmt.Sprint("something"))),')).toBeTruthy() + expect(decreaseNextIndentRegex.testSync(' fmt.Println(fmt.Sprint("something"), "x")),')).toBeTruthy() + + expect(decreaseNextIndentRegex.testSync(' fmt.Println("something")')).toBeFalsy() + expect(decreaseNextIndentRegex.testSync(' fmt.Println("something"),')).toBeFalsy() + + # a line with many (), testing for catastrophic backtracking. + # see https://github.com/atom/language-go/issues/78 + longLine = 'first.second().third().fourth().fifth().sixth().seventh().eighth().ninth().tenth()' + expect(decreaseNextIndentRegex.testSync(longLine)).toBeFalsy() diff --git a/packages/language-html/.coffeelintignore b/packages/language-html/.coffeelintignore new file mode 100644 index 000000000..1db51fed7 --- /dev/null +++ b/packages/language-html/.coffeelintignore @@ -0,0 +1 @@ +spec/fixtures diff --git a/packages/language-html/.github/no-response.yml b/packages/language-html/.github/no-response.yml new file mode 100644 index 000000000..1c8799d13 --- /dev/null +++ b/packages/language-html/.github/no-response.yml @@ -0,0 +1,15 @@ +# Configuration for probot-no-response - https://github.com/probot/no-response + +# Number of days of inactivity before an issue is closed for lack of response +daysUntilClose: 28 + +# Label requiring a response +responseRequiredLabel: more-information-needed + +# Comment to post when closing an issue for lack of response. Set to `false` to disable. +closeComment: > + This issue has been automatically closed because there has been no response + to our request for more information from the original author. With only the + information that is currently in the issue, we don't have enough information + to take action. Please reach out if you have or find the answers we need so + that we can investigate further. diff --git a/packages/language-html/.github/workflows/main.yml b/packages/language-html/.github/workflows/main.yml new file mode 100644 index 000000000..80b99d1b6 --- /dev/null +++ b/packages/language-html/.github/workflows/main.yml @@ -0,0 +1,27 @@ +name: CI + +on: [push] + +env: + CI: true + +jobs: + Test: + strategy: + matrix: + os: [ubuntu-latest, macos-latest, windows-latest] + channel: [stable, beta] + runs-on: ${{ matrix.os }} + steps: + - uses: actions/checkout@v1 + - uses: UziTech/action-setup-atom@v2 + with: + version: ${{ matrix.channel }} + - name: Install windows-build-tools + if: ${{ matrix.os == 'windows-latest' }} + run: | + npm i windows-build-tools@4.0.0 + - name: Install dependencies + run: apm install + - name: Run tests + run: atom --test spec diff --git a/packages/language-html/.gitignore b/packages/language-html/.gitignore new file mode 100644 index 000000000..3c3629e64 --- /dev/null +++ b/packages/language-html/.gitignore @@ -0,0 +1 @@ +node_modules diff --git a/packages/language-html/CONTRIBUTING.md b/packages/language-html/CONTRIBUTING.md new file mode 100644 index 000000000..0fd0ad696 --- /dev/null +++ b/packages/language-html/CONTRIBUTING.md @@ -0,0 +1 @@ +See the [Atom contributing guide](https://github.com/atom/atom/blob/master/CONTRIBUTING.md) diff --git a/packages/language-html/ISSUE_TEMPLATE.md b/packages/language-html/ISSUE_TEMPLATE.md new file mode 100644 index 000000000..b60bb86c9 --- /dev/null +++ b/packages/language-html/ISSUE_TEMPLATE.md @@ -0,0 +1,40 @@ +<!-- + +Have you read Atom's Code of Conduct? By filing an Issue, you are expected to comply with it, including treating everyone with respect: https://github.com/atom/atom/blob/master/CODE_OF_CONDUCT.md + +Do you want to ask a question? Are you looking for support? The Atom message board is the best place for getting support: https://discuss.atom.io + +--> + +### Prerequisites + +* [ ] Put an X between the brackets on this line if you have done all of the following: + * Reproduced the problem in Safe Mode: http://flight-manual.atom.io/hacking-atom/sections/debugging/#using-safe-mode + * Followed all applicable steps in the debugging guide: http://flight-manual.atom.io/hacking-atom/sections/debugging/ + * Checked the FAQs on the message board for common solutions: https://discuss.atom.io/c/faq + * Checked that your issue isn't already filed: https://github.com/issues?utf8=✓&q=is%3Aissue+user%3Aatom + * Checked that there is not already an Atom package that provides the described functionality: https://atom.io/packages + +### Description + +[Description of the issue] + +### Steps to Reproduce + +1. [First Step] +2. [Second Step] +3. [and so on...] + +**Expected behavior:** [What you expect to happen] + +**Actual behavior:** [What actually happens] + +**Reproduces how often:** [What percentage of the time does it reproduce?] + +### Versions + +You can get this information from copy and pasting the output of `atom --version` and `apm --version` from the command line. Also, please include the OS and what version of the OS you're running. + +### Additional Information + +Any additional information, configuration or data that might be necessary to reproduce the issue. diff --git a/packages/language-html/LICENSE.md b/packages/language-html/LICENSE.md new file mode 100644 index 000000000..f53986684 --- /dev/null +++ b/packages/language-html/LICENSE.md @@ -0,0 +1,31 @@ +Copyright (c) 2014 GitHub Inc. + +Permission is hereby granted, free of charge, to any person obtaining +a copy of this software and associated documentation files (the +"Software"), to deal in the Software without restriction, including +without limitation the rights to use, copy, modify, merge, publish, +distribute, sublicense, and/or sell copies of the Software, and to +permit persons to whom the Software is furnished to do so, subject to +the following conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. + +-------------------------------------------------------------------- + +This package was derived from a TextMate bundle located at +https://github.com/textmate/html.tmbundle and distributed under the following +license, located in `README.mdown`: + +Permission to copy, use, modify, sell and distribute this +software is granted. This software is provided "as is" without +express or implied warranty, and with no claim as to its +suitability for any purpose. diff --git a/packages/language-html/PULL_REQUEST_TEMPLATE.md b/packages/language-html/PULL_REQUEST_TEMPLATE.md new file mode 100644 index 000000000..cdaa94a86 --- /dev/null +++ b/packages/language-html/PULL_REQUEST_TEMPLATE.md @@ -0,0 +1,28 @@ +### Requirements + +* Filling out the template is required. Any pull request that does not include enough information to be reviewed in a timely manner may be closed at the maintainers' discretion. +* All new code requires tests to ensure against regressions + +### Description of the Change + +<!-- + +We must be able to understand the design of your change from this description. If we can't get a good idea of what the code will be doing from the description here, the pull request may be closed at the maintainers' discretion. Keep in mind that the maintainer reviewing this PR may not be familiar with or have worked with the code here recently, so please walk us through the concepts. + +--> + +### Alternate Designs + +<!-- Explain what other alternates were considered and why the proposed version was selected --> + +### Benefits + +<!-- What benefits will be realized by the code change? --> + +### Possible Drawbacks + +<!-- What are the possible side-effects or negative impacts of the code change? --> + +### Applicable Issues + +<!-- Enter any applicable Issues here --> diff --git a/packages/language-html/README.md b/packages/language-html/README.md new file mode 100644 index 000000000..12480b97b --- /dev/null +++ b/packages/language-html/README.md @@ -0,0 +1,10 @@ +# HTML language support in Atom +![CI Status](https://github.com/atom/language-html/actions/workflows/main.yml/badge.svg) + +Adds syntax highlighting and snippets to HTML files in Atom. + +Originally [converted](http://flight-manual.atom.io/hacking-atom/sections/converting-from-textmate) +from the [HTML TextMate bundle](https://github.com/textmate/html.tmbundle). + +Contributions are greatly appreciated. Please fork this repository and open a +pull request to add snippets, make grammar tweaks, etc. diff --git a/packages/language-html/coffeelint.json b/packages/language-html/coffeelint.json new file mode 100644 index 000000000..a5dd715e3 --- /dev/null +++ b/packages/language-html/coffeelint.json @@ -0,0 +1,37 @@ +{ + "max_line_length": { + "level": "ignore" + }, + "no_empty_param_list": { + "level": "error" + }, + "arrow_spacing": { + "level": "error" + }, + "no_interpolation_in_single_quotes": { + "level": "error" + }, + "no_debugger": { + "level": "error" + }, + "prefer_english_operator": { + "level": "error" + }, + "colon_assignment_spacing": { + "spacing": { + "left": 0, + "right": 1 + }, + "level": "error" + }, + "braces_spacing": { + "spaces": 0, + "level": "error" + }, + "spacing_after_comma": { + "level": "error" + }, + "no_stand_alone_at": { + "level": "error" + } +} diff --git a/packages/language-html/grammars/html.cson b/packages/language-html/grammars/html.cson new file mode 100644 index 000000000..b8ab210a5 --- /dev/null +++ b/packages/language-html/grammars/html.cson @@ -0,0 +1,779 @@ +'scopeName': 'text.html.basic' +'fileTypes': [ + 'htm' + 'html' + 'kit' + 'shtml' + 'tmpl' + 'tpl' + 'xhtml' +] +'firstLineMatch': '''(?xi) + # Document type definition + <(?:!DOCTYPE\\s*)?html + | + # Emacs modeline + -\\*-(?:\\s*(?=[^:;\\s]+\\s*-\\*-)|(?:.*?[;\\s]|(?<=-\\*-))mode\\s*:\\s*) + html + (?=[\\s;]|(?<![-*])-\\*-).*?-\\*- + | + # Vim modeline + (?:(?:\\s|^)vi(?:m[<=>]?\\d+|m)?|\\sex)(?=:(?=\\s*set?\\s[^\\n:]+:)|:(?!\\s*set?\\s))(?:(?:\\s|\\s*:\\s*)\\w*(?:\\s*=(?:[^\\n\\\\\\s]|\\\\.)*)?)*[\\s:](?:filetype|ft|syntax)\\s*= + x?html + (?=\\s|:|$) +''' +'name': 'HTML' +'patterns': [ + { + 'begin': '(<\\?)(xml)' + 'captures': + '1': + 'name': 'punctuation.definition.tag.html' + '2': + 'name': 'entity.name.tag.xml.html' + 'end': '(\\?>)' + 'name': 'meta.tag.preprocessor.xml.html' + 'patterns': [ + { + 'include': '#tag-generic-attribute' + } + { + 'include': '#string-double-quoted' + } + { + 'include': '#string-single-quoted' + } + ] + } + { + 'begin': '<!--' + 'captures': + '0': + 'name': 'punctuation.definition.comment.html' + 'end': '--\\s*>' + 'name': 'comment.block.html' + 'patterns': [ + { + 'match': '--(?!-*\\s*>)' + 'name': 'invalid.illegal.bad-comments-or-CDATA.html' + } + { + 'include': '#embedded-code' + } + ] + } + { + 'begin': '<!' + 'captures': + '0': + 'name': 'punctuation.definition.tag.html' + 'end': '>' + 'name': 'meta.tag.sgml.html' + 'patterns': [ + { + 'begin': '(?i:DOCTYPE)' + 'captures': + '1': + 'name': 'entity.name.tag.doctype.html' + 'end': '(?=>)' + 'name': 'meta.tag.sgml.doctype.html' + 'patterns': [ + { + 'match': '"[^">]*"' + 'name': 'string.quoted.double.doctype.identifiers-and-DTDs.html' + } + ] + } + { + 'begin': '\\[CDATA\\[' + 'end': ']](?=>)' + 'name': 'constant.other.inline-data.html' + } + { + 'match': '(\\s*)(?!--|>)\\S(\\s*)' + 'name': 'invalid.illegal.bad-comments-or-CDATA.html' + } + ] + } + { + 'include': '#embedded-code' + } + { + 'begin': '(?i)(?=<style(\\s+|>))' + 'end': '(?i)(</)(style)(>)' + 'endCaptures': + '1': + 'name': 'punctuation.definition.tag.html' + '2': + 'name': 'entity.name.tag.style.html' + '3': + 'name': 'punctuation.definition.tag.html' + 'name': 'meta.tag.style.html' + 'patterns': [ + { + 'begin': '(?i)\\G(<)(style)' + 'beginCaptures': + '1': + 'name': 'punctuation.definition.tag.html' + '2': + 'name': 'entity.name.tag.style.html' + 'end': '>' + 'endCaptures': + '0': + 'name': 'punctuation.definition.tag.html' + 'patterns': [ + { + 'include': '#tag-stuff' + } + ] + } + { + 'begin': '(?!\\G)' + 'end': '(?i)(?=</style>)' + 'name': 'source.css.embedded.html' + 'patterns': [ + { + 'include': '#embedded-code' + } + { + 'include': 'source.css' + } + ] + } + ] + } + { + 'begin': '(?i)(?=<script\\s+.*?\\btype\\s*=\\s*[\'"]?text/(?:x-handlebars|(?:x-(?:handlebars-)?|ng-)?template|html|ractive)[\'"]?(\\s+|>))' + 'end': '(</)((?i)script)(>)' + 'endCaptures': + '1': + 'name': 'punctuation.definition.tag.html' + '2': + 'name': 'entity.name.tag.script.html' + '3': + 'name': 'punctuation.definition.tag.html' + 'name': 'meta.tag.script.html' + 'patterns': [ + { + 'begin': '(?i)\\G(<)(script)' + 'beginCaptures': + '1': + 'name': 'punctuation.definition.tag.html' + '2': + 'name': 'entity.name.tag.script.html' + 'end': '>' + 'endCaptures': + '0': + 'name': 'punctuation.definition.tag.html' + 'patterns': [ + { + 'include': '#tag-stuff' + } + ] + } + { + 'begin': '(?!\\G)' + 'end': '(?i)(?=</script>)' + 'name': 'text.embedded.html' + 'patterns': [ + { + 'include': 'text.html.basic' + } + ] + } + ] + } + { + 'begin': '(?i)(?=<script\\s+.*?\\btype\\s*=\\s*[\'"]?text/coffeescript[\'"]?(\\s+|>))' + 'end': '(</)((?i)script)(>)' + 'endCaptures': + '1': + 'name': 'punctuation.definition.tag.html' + '2': + 'name': 'entity.name.tag.script.html' + '3': + 'name': 'punctuation.definition.tag.html' + 'name': 'meta.tag.script.html' + 'patterns': [ + { + 'begin': '(?i)\\G(<)(script)' + 'beginCaptures': + '1': + 'name': 'punctuation.definition.tag.html' + '2': + 'name': 'entity.name.tag.script.html' + 'end': '>' + 'endCaptures': + '0': + 'name': 'punctuation.definition.tag.html' + 'patterns': [ + { + 'include': '#tag-stuff' + } + ] + } + { + 'begin': '(?!\\G)' + 'end': '(?i)(?=</script>)' + 'name': 'source.coffee.embedded.html' + 'patterns': [ + { + 'begin': '###' + 'beginCaptures': + '0': + 'name': 'punctuation.definition.comment.coffee' + 'end': '###|(?=(?i)</script>)' + 'endCaptures': + '0': + 'name': 'punctuation.definition.comment.coffee' + 'name': 'comment.block.coffee' + } + { + 'begin': '#' + 'beginCaptures': + '0': + 'name': 'punctuation.definition.comment.coffee' + 'end': '(?=(?i)</script>|$)' + 'name': 'comment.line.number-sign.coffee' + } + { + 'include': 'source.coffee' + } + ] + } + ] + } + { + 'begin': '(?i)(?=<script\\s+.*?\\btype\\s*=\\s*[\'"]?application/graphql[\'"]?(\\s+|>))' + 'end': '(</)((?i)script)(>)' + 'endCaptures': + '1': + 'name': 'punctuation.definition.tag.html' + '2': + 'name': 'entity.name.tag.script.html' + '3': + 'name': 'punctuation.definition.tag.html' + 'name': 'meta.tag.script.html' + 'patterns': [ + { + 'begin': '(?i)\\G(<)(script)' + 'beginCaptures': + '1': + 'name': 'punctuation.definition.tag.html' + '2': + 'name': 'entity.name.tag.script.html' + 'end': '>' + 'endCaptures': + '0': + 'name': 'punctuation.definition.tag.html' + 'patterns': [ + { + 'include': '#tag-stuff' + } + ] + } + { + 'begin': '(?!\\G)' + 'end': '(?i)(?=</script>)' + 'name': 'source.graphql.embedded.html' + 'patterns': [ + { + 'begin': '#' + 'beginCaptures': + '0': + 'name': 'punctuation.definition.comment.graphql' + 'end': '(?=(?i)</script>|$)' + 'name': 'comment.line.number-sign.graphql' + } + { + 'include': 'source.graphql' + } + ] + } + ] + } + { + 'begin': '(?i)(?=<script(\\s+|>))' + 'end': '(</)((?i)script)(>)' + 'endCaptures': + '1': + 'name': 'punctuation.definition.tag.html' + '2': + 'name': 'entity.name.tag.script.html' + '3': + 'name': 'punctuation.definition.tag.html' + 'name': 'meta.tag.script.html' + 'patterns': [ + { + 'begin': '(?i)\\G(<)(script)' + 'beginCaptures': + '1': + 'name': 'punctuation.definition.tag.html' + '2': + 'name': 'entity.name.tag.script.html' + 'end': '>' + 'endCaptures': + '0': + 'name': 'punctuation.definition.tag.html' + 'patterns': [ + { + 'include': '#tag-stuff' + } + ] + } + { + 'begin': '(?!\\G)' + 'end': '(?i)(?=</script>)' + 'name': 'source.js.embedded.html' + 'patterns': [ + { + 'begin': '//' + 'beginCaptures': + '0': + 'name': 'punctuation.definition.comment.js' + 'end': '(?=(?i)</script>|$)' + 'name': 'comment.line.double-slash.js' + } + { + 'begin': '/\\*' + 'beginCaptures': + '0': + 'name': 'punctuation.definition.comment.begin.js' + 'end': '\\*/|(?=(?i)</script>)' + 'endCaptures': + '0': + 'name': 'punctuation.definition.comment.begin.js' + 'name': 'comment.block.js' + } + { + 'include': 'source.js' + } + ] + } + ] + } + { + 'begin': '(?i)(</?)(body|head|html)(?=\\s|/?>)' + 'captures': + '1': + 'name': 'punctuation.definition.tag.html' + '2': + 'name': 'entity.name.tag.structure.$2.html' + 'end': '(>)' + 'name': 'meta.tag.structure.$2.html' + 'patterns': [ + { + 'include': '#tag-stuff' + } + ] + } + { + 'begin': '(?i)(</?)(address|blockquote|dd|div|section|article|aside|header|footer|nav|menu|dl|dt|fieldset|form|frame|frameset|h1|h2|h3|h4|h5|h6|iframe|noframes|object|ol|p|ul|applet|center|dir|hr|pre)(?=\\s|/?>)' + 'beginCaptures': + '1': + 'name': 'punctuation.definition.tag.begin.html' + '2': + 'name': 'entity.name.tag.block.$2.html' + 'end': '(>)' + 'endCaptures': + '1': + 'name': 'punctuation.definition.tag.end.html' + 'name': 'meta.tag.block.$2.html' + 'patterns': [ + { + 'include': '#tag-stuff' + } + ] + } + { + 'begin': '(?i)(</?)(a|abbr|acronym|area|b|base|basefont|bdo|big|br|button|caption|cite|code|col|colgroup|del|dfn|em|font|head|html|i|img|input|ins|isindex|kbd|label|legend|li|link|map|meta|noscript|optgroup|option|param|q|s|samp|script|select|small|span|strike|strong|style|sub|sup|table|tbody|td|textarea|tfoot|th|thead|title|tr|tt|u|var)(?=\\s|/?>)' + 'beginCaptures': + '1': + 'name': 'punctuation.definition.tag.begin.html' + '2': + 'name': 'entity.name.tag.inline.$2.html' + 'end': '((?: ?/)?>)' + 'endCaptures': + '1': + 'name': 'punctuation.definition.tag.end.html' + 'name': 'meta.tag.inline.$2.html' + 'patterns': [ + { + 'include': '#tag-stuff' + } + ] + } + { + 'begin': '(</?)([a-zA-Z0-9:-]+)' + 'beginCaptures': + '1': + 'name': 'punctuation.definition.tag.begin.html' + '2': + 'name': 'entity.name.tag.other.html' + 'end': '(>)' + 'endCaptures': + '1': + 'name': 'punctuation.definition.tag.end.html' + 'name': 'meta.tag.other.html' + 'patterns': [ + { + 'include': '#tag-stuff' + } + ] + } + { + 'include': '#character-reference' + } + { + 'match': '<>' + 'name': 'invalid.illegal.incomplete.html' + } +] +'repository': + 'embedded-code': + 'patterns': [ + { + 'include': '#smarty' + } + { + 'include': '#python' + } + ] + 'character-reference': + # https://html.spec.whatwg.org/multipage/parsing.html#character-reference-state + # We're not fully compliant with the spec (we don't catch missing semicolons or invalid references) + # but that is mostly to prevent tokenizing ambiguous ampersands as errors. + # That could be added in the future though if we add the list of all valid character references, + # as language-css does with property names. + 'patterns': [ + { + 'begin': '(&)(#\\d+|#[xX][0-9a-fA-F]+)' + 'beginCaptures': + '1': + 'name': 'punctuation.definition.entity.begin.html' + '2': + 'name': 'entity.name.entity.other.html' + 'end': ';' + 'endCaptures': + '0': + 'name': 'punctuation.definition.entity.end.html' + 'name': 'constant.character.entity.html' + } + { + 'match': '(&)([a-zA-Z0-9]+)(;)' + 'name': 'constant.character.entity.html' + 'captures': + '1': + 'name': 'punctuation.definition.entity.begin.html' + '2': + 'name': 'entity.name.entity.other.html' + '3': + 'name': 'punctuation.definition.entity.end.html' + } + { + 'match': '&(?!\\s|<|&|[a-zA-Z0-9])' + 'name': 'invalid.illegal.bad-ampersand.html' + } + ] + 'python': + 'begin': '(?:^\\s*)<\\?python(?!.*\\?>)' + 'end': '\\?>(?:\\s*$\\n)?' + 'name': 'source.python.embedded.html' + 'patterns': [ + { + 'include': 'source.python' + } + ] + 'smarty': + 'patterns': [ + { + 'begin': '(\\{(literal)\\})' + 'captures': + '1': + 'name': 'source.smarty.embedded.html' + '2': + 'name': 'support.function.built-in.smarty' + 'end': '(\\{/(literal)\\})' + } + { + 'begin': '{{|{' + 'disabled': 1 + 'end': '}}|}' + 'name': 'source.smarty.embedded.html' + 'patterns': [ + { + 'include': 'source.smarty' + } + ] + } + ] + 'string-double-quoted': + 'begin': '"' + 'beginCaptures': + '0': + 'name': 'punctuation.definition.string.begin.html' + 'end': '"' + 'endCaptures': + '0': + 'name': 'punctuation.definition.string.end.html' + 'name': 'string.quoted.double.html' + 'patterns': [ + { + 'include': '#embedded-code' + } + { + 'include': '#character-reference' + } + ] + 'string-single-quoted': + 'begin': '\'' + 'beginCaptures': + '0': + 'name': 'punctuation.definition.string.begin.html' + 'end': '\'' + 'endCaptures': + '0': + 'name': 'punctuation.definition.string.end.html' + 'name': 'string.quoted.single.html' + 'patterns': [ + { + 'include': '#embedded-code' + } + { + 'include': '#character-reference' + } + ] + 'tag-generic-attribute': + # https://www.w3.org/TR/html51/syntax.html#attribute-name-state + 'patterns': [ + { + 'begin': '([^\\s/=>"\'<]+)\\s*(=)\\s*' + 'beginCaptures': + '1': + 'name': 'entity.other.attribute-name.html' + '2': + 'name': 'punctuation.separator.key-value.html' + 'end': '(?!\\G)|(?=\\s|/?>)' + 'name': 'meta.attribute-with-value.html' + 'patterns': [ + { + 'include': '#string-double-quoted' + } + { + 'include': '#string-single-quoted' + } + { + 'include': '#unquoted-attribute' + } + ] + } + { + 'match': '[^\\s/=>"\'<]+' + 'captures': + '0': + 'name': 'entity.other.attribute-name.html' + 'name': 'meta.attribute-without-value.html' + } + ] + 'tag-style-attribute': + 'begin': '\\b(style)\\s*(=)\\s*' + 'beginCaptures': + '1': + 'name': 'entity.other.attribute-name.style.html' + '2': + 'name': 'punctuation.separator.key-value.html' + 'end': '(?!\\G)|(?=\\s|/?>)' + 'name': 'meta.attribute-with-value.style.html' + 'patterns': [ + { + 'begin': '"' + 'beginCaptures': + '0': + 'name': 'punctuation.definition.string.begin.html' + 'end': '"' + 'endCaptures': + '0': + 'name': 'punctuation.definition.string.end.html' + 'name': 'string.quoted.double.html' + 'contentName': 'source.css.style.html' + 'patterns': [ + { + 'match': '[^"]+' + 'name': 'meta.property-list.css' + 'captures': + '0': + 'patterns': [ + { + 'include': '#embedded-code' + } + { + 'include': '#entities' + } + { + 'include': 'source.css#rule-list-innards' + } + ] + } + ] + } + { + 'begin': "'" + 'beginCaptures': + '0': + 'name': 'punctuation.definition.string.begin.html' + 'end': "'" + 'endCaptures': + '0': + 'name': 'punctuation.definition.string.end.html' + 'name': 'string.quoted.single.html' + 'contentName': 'source.css.style.html' + 'patterns': [ + { + 'match': "[^']+" + 'name': 'meta.property-list.css' + 'captures': + '0': + 'patterns': [ + { + 'include': '#embedded-code' + } + { + 'include': '#entities' + } + { + 'include': 'source.css#rule-list-innards' + } + ] + } + ] + } + { + 'match': '([^\\s&>"\'<=`]|&(?=>))+' + 'name': 'string.unquoted.html' + 'captures': + '0': + 'name': 'source.css.style.html' + 'patterns': [ + { + 'match': '.+' + 'name': 'meta.property-list.css' + 'captures': + '0': + 'patterns': [ + { + 'include': 'source.css#rule-list-innards' + } + ] + } + ] + } + ] + 'tag-id-attribute': + 'begin': '\\b(id)\\s*(=)\\s*' + 'captures': + '1': + 'name': 'entity.other.attribute-name.id.html' + '2': + 'name': 'punctuation.separator.key-value.html' + 'end': '(?!\\G)|(?=\\s|/?>)' + 'name': 'meta.attribute-with-value.id.html' + 'patterns': [ + { + 'begin': '"' + 'beginCaptures': + '0': + 'name': 'punctuation.definition.string.begin.html' + 'contentName': 'meta.toc-list.id.html' + 'end': '"' + 'endCaptures': + '0': + 'name': 'punctuation.definition.string.end.html' + 'name': 'string.quoted.double.html' + 'patterns': [ + { + 'include': '#embedded-code' + } + { + 'include': '#character-reference' + } + ] + } + { + 'begin': '\'' + 'beginCaptures': + '0': + 'name': 'punctuation.definition.string.begin.html' + 'contentName': 'meta.toc-list.id.html' + 'end': '\'' + 'endCaptures': + '0': + 'name': 'punctuation.definition.string.end.html' + 'name': 'string.quoted.single.html' + 'patterns': [ + { + 'include': '#embedded-code' + } + { + 'include': '#character-reference' + } + ] + } + { + 'include': '#unquoted-attribute' + } + ] + 'tag-class-attribute': + 'begin': '\\b(class)\\s*(=)\\s*' + 'captures': + '1': + 'name': 'entity.other.attribute-name.class.html' + '2': + 'name': 'punctuation.separator.key-value.html' + 'end': '(?!\\G)|(?=\\s|/?>)' + 'name': 'meta.attribute-with-value.class.html' + 'patterns': [ + { + 'include': '#string-double-quoted' + } + { + 'include': '#string-single-quoted' + } + { + 'include': '#unquoted-attribute' + } + ] + 'tag-stuff': + 'patterns': [ + { + 'include': '#tag-id-attribute' + } + { + 'include': '#tag-class-attribute' + } + { + 'include': '#tag-style-attribute' + } + { + 'include': '#tag-generic-attribute' + } + { + 'include': '#string-double-quoted' + } + { + 'include': '#string-single-quoted' + } + { + 'include': '#embedded-code' + } + ] + 'unquoted-attribute': + 'patterns': [ + { + 'include': '#character-reference' + } + { + # https://www.w3.org/TR/html51/syntax.html#attribute-value-unquoted-state + 'match': '([^\\s&>"\'<=`]|&(?=>))+' + 'name': 'string.unquoted.html' + } + ] diff --git a/packages/language-html/grammars/tree-sitter-ejs.cson b/packages/language-html/grammars/tree-sitter-ejs.cson new file mode 100644 index 000000000..4af198340 --- /dev/null +++ b/packages/language-html/grammars/tree-sitter-ejs.cson @@ -0,0 +1,36 @@ +name: 'EJS' +scopeName: 'text.html.ejs' +type: 'tree-sitter' +parser: 'tree-sitter-embedded-template' + +fileTypes: [ + 'ejs' + 'html.ejs' +] + +injectionRegExp: '^(ejs|EJS)$' + +folds: [ + { + type: ['directive', 'output_directive'], + start: {index: 0}, + end: {index: -1} + } +] + +comments: + start: '<%#' + end: '%>' + +scopes: + 'comment_directive': 'comment.block' + 'comment_directive > "%>"': 'comment.block' + + '"<%#"': 'keyword.control.directive' + '"<%"': 'keyword.control.directive' + '"<%="': 'keyword.control.directive' + '"<%_"': 'keyword.control.directive' + '"<%-"': 'keyword.control.directive' + '"%>"': 'keyword.control.directive' + '"-%>"': 'keyword.control.directive' + '"_%>"': 'keyword.control.directive' diff --git a/packages/language-html/grammars/tree-sitter-erb.cson b/packages/language-html/grammars/tree-sitter-erb.cson new file mode 100644 index 000000000..c754cae34 --- /dev/null +++ b/packages/language-html/grammars/tree-sitter-erb.cson @@ -0,0 +1,36 @@ +name: 'ERB' +scopeName: 'text.html.erb' +type: 'tree-sitter' +parser: 'tree-sitter-embedded-template' + +fileTypes: [ + 'erb' + 'html.erb' +] + +injectionRegExp: '^(erb|ERB)$' + +folds: [ + { + type: ['directive', 'output_directive'], + start: {index: 0}, + end: {index: -1} + } +] + +comments: + start: '<%#' + end: '%>' + +scopes: + 'comment_directive': 'comment.block' + 'comment_directive > "%>"': 'comment.block' + + '"<%#"': 'keyword.control.directive' + '"<%"': 'keyword.control.directive' + '"<%="': 'keyword.control.directive' + '"<%_"': 'keyword.control.directive' + '"<%-"': 'keyword.control.directive' + '"%>"': 'keyword.control.directive' + '"-%>"': 'keyword.control.directive' + '"_%>"': 'keyword.control.directive' diff --git a/packages/language-html/grammars/tree-sitter-html.cson b/packages/language-html/grammars/tree-sitter-html.cson new file mode 100644 index 000000000..1a0b42263 --- /dev/null +++ b/packages/language-html/grammars/tree-sitter-html.cson @@ -0,0 +1,56 @@ +name: 'HTML' +scopeName: 'text.html.basic' +type: 'tree-sitter' +parser: 'tree-sitter-html' + +fileTypes: [ + 'html' +] + +injectionRegExp: '(HTML|html|Html)$' + +folds: [ + { + type: ['start_tag', 'raw_start_tag', 'self_closing_tag'], + start: {index: 1}, + end: {index: -1} + } + { + type: ['element', 'raw_element'], + start: {index: 0}, + end: {index: -1} + } +] + +comments: + start: '<!--' + end: '-->' + +scopes: + 'fragment': 'source.html' + 'tag_name': 'entity.name.tag' + 'erroneous_end_tag_name': 'invalid.illegal' + 'doctype': 'meta.tag.doctype.html' + 'attribute_name': 'entity.other.attribute-name' + 'attribute_value': 'string.html' + 'comment': 'comment.block.html' + + ' + start_tag > "<", + end_tag > "</" + ': 'punctuation.definition.tag.begin' + ' + start_tag > ">", + end_tag > ">" + ': 'punctuation.definition.tag.end' + + 'attribute > "="': 'punctuation.separator.key-value.html' + + # quoted_attribute_value has three child nodes: ", attribute_value, and ". + # Target the first and last. + # Single quotes and double quotes are targeted in separate selectors because + # of quote-escaping difficulties. + "quoted_attribute_value > '\"':nth-child(0)": 'punctuation.definition.string.begin' + 'quoted_attribute_value > "\'":nth-child(0)': 'punctuation.definition.string.begin' + "quoted_attribute_value > '\"':nth-child(2)": 'punctuation.definition.string.end' + 'quoted_attribute_value > "\'":nth-child(2)': 'punctuation.definition.string.end' diff --git a/packages/language-html/lib/main.js b/packages/language-html/lib/main.js new file mode 100644 index 000000000..7c746f381 --- /dev/null +++ b/packages/language-html/lib/main.js @@ -0,0 +1,39 @@ +exports.activate = function () { + atom.grammars.addInjectionPoint('text.html.basic', { + type: 'script_element', + language () { return 'javascript' }, + content (node) { return node.child(1) } + }) + + atom.grammars.addInjectionPoint('text.html.basic', { + type: 'style_element', + language () { return 'css' }, + content (node) { return node.child(1) } + }) + + atom.grammars.addInjectionPoint('text.html.ejs', { + type: 'template', + language (node) { return 'javascript' }, + content (node) { return node.descendantsOfType('code') }, + newlinesBetween: true + }) + + atom.grammars.addInjectionPoint('text.html.ejs', { + type: 'template', + language (node) { return 'html' }, + content (node) { return node.descendantsOfType('content') } + }) + + atom.grammars.addInjectionPoint('text.html.erb', { + type: 'template', + language (node) { return 'ruby' }, + content (node) { return node.descendantsOfType('code') }, + newlinesBetween: true + }) + + atom.grammars.addInjectionPoint('text.html.erb', { + type: 'template', + language (node) { return 'html' }, + content (node) { return node.descendantsOfType('content') } + }) +} diff --git a/packages/language-html/package-lock.json b/packages/language-html/package-lock.json new file mode 100644 index 000000000..5270a652f --- /dev/null +++ b/packages/language-html/package-lock.json @@ -0,0 +1,202 @@ +{ + "name": "language-html", + "version": "0.53.1", + "lockfileVersion": 1, + "requires": true, + "dependencies": { + "atom-grammar-test": { + "version": "0.6.4", + "resolved": "https://registry.npmjs.org/atom-grammar-test/-/atom-grammar-test-0.6.4.tgz", + "integrity": "sha1-2KU1A9H+k5mX9Ji3SirDEARKfU4=", + "requires": { + "chevrotain": "^0.18.0", + "escape-string-regexp": "^1.0.5" + } + }, + "balanced-match": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.0.tgz", + "integrity": "sha1-ibTRmasr7kneFk6gK4nORi1xt2c=", + "dev": true + }, + "brace-expansion": { + "version": "1.1.11", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz", + "integrity": "sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==", + "dev": true, + "requires": { + "balanced-match": "^1.0.0", + "concat-map": "0.0.1" + } + }, + "chevrotain": { + "version": "0.18.0", + "resolved": "https://registry.npmjs.org/chevrotain/-/chevrotain-0.18.0.tgz", + "integrity": "sha1-sodxTjFZC64sXR4vYRZz7+xHnYA=" + }, + "coffee-script": { + "version": "1.11.1", + "resolved": "https://registry.npmjs.org/coffee-script/-/coffee-script-1.11.1.tgz", + "integrity": "sha1-vxxHrWREOg2V0S3ysUfMCk2q1uk=", + "dev": true + }, + "coffeelint": { + "version": "1.16.2", + "resolved": "https://registry.npmjs.org/coffeelint/-/coffeelint-1.16.2.tgz", + "integrity": "sha512-6mzgOo4zb17WfdrSui/cSUEgQ0AQkW3gXDht+6lHkfkqGUtSYKwGdGcXsDfAyuScVzTlTtKdfwkAlJWfqul7zg==", + "dev": true, + "requires": { + "coffee-script": "~1.11.0", + "glob": "^7.0.6", + "ignore": "^3.0.9", + "optimist": "^0.6.1", + "resolve": "^0.6.3", + "strip-json-comments": "^1.0.2" + } + }, + "concat-map": { + "version": "0.0.1", + "resolved": "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz", + "integrity": "sha1-2Klr13/Wjfd5OnMDajug1UBdR3s=", + "dev": true + }, + "dedent": { + "version": "0.7.0", + "resolved": "https://registry.npmjs.org/dedent/-/dedent-0.7.0.tgz", + "integrity": "sha1-JJXduvbrh0q7Dhvp3yLS5aVEMmw=", + "dev": true + }, + "escape-string-regexp": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz", + "integrity": "sha1-G2HAViGQqN/2rjuyzwIAyhMLhtQ=" + }, + "fs.realpath": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/fs.realpath/-/fs.realpath-1.0.0.tgz", + "integrity": "sha1-FQStJSMVjKpA20onh8sBQRmU6k8=", + "dev": true + }, + "glob": { + "version": "7.1.6", + "resolved": "https://registry.npmjs.org/glob/-/glob-7.1.6.tgz", + "integrity": "sha512-LwaxwyZ72Lk7vZINtNNrywX0ZuLyStrdDtabefZKAY5ZGJhVtgdznluResxNmPitE0SAO+O26sWTHeKSI2wMBA==", + "dev": true, + "requires": { + "fs.realpath": "^1.0.0", + "inflight": "^1.0.4", + "inherits": "2", + "minimatch": "^3.0.4", + "once": "^1.3.0", + "path-is-absolute": "^1.0.0" + } + }, + "ignore": { + "version": "3.3.10", + "resolved": "https://registry.npmjs.org/ignore/-/ignore-3.3.10.tgz", + "integrity": "sha512-Pgs951kaMm5GXP7MOvxERINe3gsaVjUWFm+UZPSq9xYriQAksyhg0csnS0KXSNRD5NmNdapXEpjxG49+AKh/ug==", + "dev": true + }, + "inflight": { + "version": "1.0.6", + "resolved": "https://registry.npmjs.org/inflight/-/inflight-1.0.6.tgz", + "integrity": "sha1-Sb1jMdfQLQwJvJEKEHW6gWW1bfk=", + "dev": true, + "requires": { + "once": "^1.3.0", + "wrappy": "1" + } + }, + "inherits": { + "version": "2.0.4", + "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.4.tgz", + "integrity": "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==", + "dev": true + }, + "minimatch": { + "version": "3.0.4", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.0.4.tgz", + "integrity": "sha512-yJHVQEhyqPLUTgt9B83PXu6W3rx4MvvHvSUvToogpwoGDOUQ+yDrR0HRot+yOCdCO7u4hX3pWft6kWBBcqh0UA==", + "dev": true, + "requires": { + "brace-expansion": "^1.1.7" + } + }, + "minimist": { + "version": "0.0.10", + "resolved": "https://registry.npmjs.org/minimist/-/minimist-0.0.10.tgz", + "integrity": "sha1-3j+YVD2/lggr5IrRoMfNqDYwHc8=", + "dev": true + }, + "nan": { + "version": "2.14.0", + "resolved": "https://registry.npmjs.org/nan/-/nan-2.14.0.tgz", + "integrity": "sha512-INOFj37C7k3AfaNTtX8RhsTw7qRy7eLET14cROi9+5HAVbbHuIWUHEauBv5qT4Av2tWasiTY1Jw6puUNqRJXQg==" + }, + "once": { + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/once/-/once-1.4.0.tgz", + "integrity": "sha1-WDsap3WWHUsROsF9nFC6753Xa9E=", + "dev": true, + "requires": { + "wrappy": "1" + } + }, + "optimist": { + "version": "0.6.1", + "resolved": "https://registry.npmjs.org/optimist/-/optimist-0.6.1.tgz", + "integrity": "sha1-2j6nRob6IaGaERwybpDrFaAZZoY=", + "dev": true, + "requires": { + "minimist": "~0.0.1", + "wordwrap": "~0.0.2" + } + }, + "path-is-absolute": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/path-is-absolute/-/path-is-absolute-1.0.1.tgz", + "integrity": "sha1-F0uSaHNVNP+8es5r9TpanhtcX18=", + "dev": true + }, + "resolve": { + "version": "0.6.3", + "resolved": "https://registry.npmjs.org/resolve/-/resolve-0.6.3.tgz", + "integrity": "sha1-3ZV5gufnNt699TtYpN2RdUV13UY=", + "dev": true + }, + "strip-json-comments": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/strip-json-comments/-/strip-json-comments-1.0.4.tgz", + "integrity": "sha1-HhX7ysl9Pumb8tc7TGVrCCu6+5E=", + "dev": true + }, + "tree-sitter-embedded-template": { + "version": "0.15.2", + "resolved": "https://registry.npmjs.org/tree-sitter-embedded-template/-/tree-sitter-embedded-template-0.15.2.tgz", + "integrity": "sha512-DHT4KAzFbtj/5XhSZWCkf7LP1SYIi4gSSOZHVH9SqEv1DIc9rSmgNO3rzIyMoRclusLoL2susmokL/hYkj56+A==", + "requires": { + "nan": "^2.0.0" + } + }, + "tree-sitter-html": { + "version": "0.15.1", + "resolved": "https://registry.npmjs.org/tree-sitter-html/-/tree-sitter-html-0.15.1.tgz", + "integrity": "sha512-hf1dTxB6OIlDk+mkxLaHC1boKUxmnp0qxt8nApZZ6zf3VWj5FeYlWv93GX5+gLL3NkLAjaUKGZGFXpR+wE97Jg==", + "requires": { + "nan": "^2.10.0" + } + }, + "wordwrap": { + "version": "0.0.3", + "resolved": "https://registry.npmjs.org/wordwrap/-/wordwrap-0.0.3.tgz", + "integrity": "sha1-o9XabNXAvAAI03I0u68b7WMFkQc=", + "dev": true + }, + "wrappy": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz", + "integrity": "sha1-tSQ9jz7BqjXxNkYFvA0QNuMKtp8=", + "dev": true + } + } +} diff --git a/packages/language-html/package.json b/packages/language-html/package.json new file mode 100644 index 000000000..789924c46 --- /dev/null +++ b/packages/language-html/package.json @@ -0,0 +1,31 @@ +{ + "name": "language-html", + "main": "lib/main", + "version": "0.53.1", + "description": "HTML language support in Atom", + "keywords": [ + "tree-sitter" + ], + "engines": { + "atom": "*", + "node": "*" + }, + "homepage": "http://atom.github.io/language-html", + "repository": { + "type": "git", + "url": "https://github.com/atom/language-html.git" + }, + "license": "MIT", + "bugs": { + "url": "https://github.com/atom/language-html/issues" + }, + "dependencies": { + "atom-grammar-test": "^0.6.3", + "tree-sitter-embedded-template": "^0.15.2", + "tree-sitter-html": "^0.15.0" + }, + "devDependencies": { + "coffeelint": "^1.10.1", + "dedent": "^0.7.0" + } +} diff --git a/packages/language-html/settings/language-html.cson b/packages/language-html/settings/language-html.cson new file mode 100644 index 000000000..1b46997d5 --- /dev/null +++ b/packages/language-html/settings/language-html.cson @@ -0,0 +1,23 @@ +'.text.html': + 'autocomplete': + 'extraWordCharacters': '-' + 'editor': + 'commentStart': '<!-- ' + 'commentEnd': ' -->' + 'foldEndPattern': '(?x)\n\t\t(</(?i:head|body|table|thead|tbody|tfoot|tr|div|select|fieldset|style|script|ul|ol|li|form|dl|section|article|header|footer|nav|aside)>\n\t\t|^(?!.*?<!--).*?--\\s*>\n\t\t|^<!--\\ end\\ tminclude\\ -->$\n\t\t|<\\?(?:php)?.*\\bend(if|for(each)?|while)\\b\n\t\t|\\{\\{?/(if|foreach|capture|literal|foreach|php|section|strip)\n\t\t|^[^{]*\\}\n\t\t|^\\s*\\)[,;]\n\t\t)' + 'increaseIndentPattern': '''(?x) + <(?!\\?|(?:area|base|br|col|frame|hr|html|img|input|link|meta|param)\\b|[^>]*/>) + ([-_\\.A-Za-z0-9]+)(?=\\s|>)\\b[^>]*>(?!.*</\\1>) + |<!--(?!.*-->) + |<\\?php.+?\\b(if|else(?:if)?|for(?:each)?|while)\\b.*:(?!.*end\\2) + |\\{[^}"\']*$ + ''' + 'decreaseIndentPattern': '''(?x) + ^\\s* + (</(?!html) + [-_\\.A-Za-z0-9]+\\b[^>]*> + |--> + |<\\?(php)?\\s+(else(if)?|end(if|for(each)?|while)|\\}) + |\\} + ) + ''' diff --git a/packages/language-html/snippets/language-html.cson b/packages/language-html/snippets/language-html.cson new file mode 100644 index 000000000..d5ea04303 --- /dev/null +++ b/packages/language-html/snippets/language-html.cson @@ -0,0 +1,667 @@ +# Important: When adding a new snippet, +# there is a second section halfway down this file +# where you need to null-out the snippet to prevent it from +# appearing in tags or embedded contexts +'.text.html': + # A + 'Anchor': + 'prefix': 'a' + 'body': '<a href="${1:#}">$2</a>$0' + 'Abbreviation': + 'prefix': 'abbr' + 'body': '<abbr title="$1">$2</abbr>$0' + 'Address': + 'prefix': 'address' + 'body': '<address${1: class="$2"}>\n\t$3\n</address>' + 'Area': + 'prefix': 'area' + 'body': '<area${1: shape="${2:default}"} coords="$3"${4: href="${5:#}" alt="$6"}>$0' + 'Article': + 'prefix': 'article' + 'body': '<article class="$1">\n\t$2\n</article>' + 'Aside': + 'prefix': 'aside' + 'body': '<aside class="$1">\n\t$2\n</aside>' + 'Audio': + 'prefix': 'audio' + 'body': '<audio src="$1">\n\t$2\n</audio>' + # B + 'Body': + 'prefix': 'body' + 'body': '<body>\n\t$1\n</body>' + 'Bold': + 'prefix': 'b' + 'body': '<b>$1</b>$0' + 'Base': + 'prefix': 'base' + 'body': '<base href="${1:#}" target="${2:_blank}">$0' + 'Bi-Directional Isolation': + 'prefix': 'bdi' + 'body': '<bdi${1: dir="${2:rtl}"}>$3</bdi>$0' + 'Bi-Directional Override': + 'prefix': 'bdo' + 'body': '<bdo dir="${1:rtl}">$2</bdo>$0' + 'Blockquote': + 'prefix': 'blockquote' + 'body': '<blockquote cite="${1:http://}">\n\t$2\n</blockquote>' + 'Line Breaker': + 'prefix': 'br' + 'body': '<br>' + 'Button': + 'prefix': 'button' + 'body': '<button type="${1:button}"${2: name="${3:button}"}>$4</button>$0' + # C + 'Canvas': + 'prefix': 'canvas' + 'body': '<canvas id="${1:canvas}" width="${2:300}" height="${3:300}">$4</canvas>$0' + 'Caption': + 'prefix': 'caption' + 'body': '<caption>$1</caption>$0' + 'Citation': + 'prefix': 'cite' + 'body': '<cite>$1</cite>$0' + 'Code': + 'prefix': 'code' + 'body': '<code>$1</code>$0' + 'Column': + 'prefix': 'col' + 'body': '<col${1: span="${2:2}"}>$0' + 'Column Group': + 'prefix': 'colgroup' + 'body': '<colgroup>$1</colgroup>$0' + 'Comment': + 'prefix': '--' + 'body': '<!-- $1 -->$0' + # D + 'HTML — 5': + 'prefix': 'doctype' + 'body': '<!DOCTYPE html>\n' + 'Data': + 'prefix': 'data' + 'body': '<data value="$1">$2</data>$0' + 'Data List': + 'prefix': 'datalist' + 'body': '<datalist${1: class="$2"}>\n\t$3\n</datalist>' + 'Description': + 'prefix': 'dd' + 'body': '<dd>$1</dd>$0' + 'Deleted Text': + 'prefix': 'del' + 'body': '<del>$1</del>$0' + 'Details': + 'prefix': 'details' + 'body': '<details${1: open}>\n\t$2\n</details>' + 'Definition': + 'prefix': 'dfn' + 'body': '<dfn>$1</dfn>$0' + 'Description List': + 'prefix': 'dl' + 'body': '<dl${1: class="$2"}>\n\t$3\n</dl>' + 'Definition Term': + 'prefix': 'dt' + 'body': '<dt>$1</dt>$0' + 'Div': + 'prefix': 'div' + 'body': '<div class="$1">\n\t$2\n</div>' + # E + 'Emphasis': + 'prefix': 'em' + 'body': '<em>$1</em>$0' + 'Embed': + 'prefix': 'embed' + 'body': '<embed type="${1:video/quicktime}" src="${2:#}" width="${3:300}" height="${4:300}">$0' + # F + 'Favicon': + 'prefix': 'favicon' + 'body': '<link rel="shortcut icon" href="$1.ico">$0' + 'Fieldset': + 'prefix': 'fieldset' + 'body': '<fieldset>\n\t$1\n</fieldset>' + 'Figure Caption': + 'prefix': 'figcaption' + 'body': '<figcaption>$1</figcaption>$0' + 'Figure': + 'prefix': 'figure' + 'body': '<figure>\n\t$1\n</figure>' + 'Footer': + 'prefix': 'footer' + 'body': '<footer>$1</footer>$0' + 'Form': + 'prefix': 'form' + 'body': '<form class="$1" action="${2:index.html}" method="${3:post}">\n\t$4\n</form>' + # G + # H + 'Heading 1': + 'prefix': 'h1' + 'body': '<h1>$1</h1>$0' + 'Heading 2': + 'prefix': 'h2' + 'body': '<h2>$1</h2>$0' + 'Heading 3': + 'prefix': 'h3' + 'body': '<h3>$1</h3>$0' + 'Heading 4': + 'prefix': 'h4' + 'body': '<h4>$1</h4>$0' + 'Heading 5': + 'prefix': 'h5' + 'body': '<h5>$1</h5>$0' + 'Heading 6': + 'prefix': 'h6' + 'body': '<h6>$1</h6>$0' + 'Head': + 'prefix': 'head' + 'body': '<head>\n\t$1\n</head>' + 'Header': + 'prefix': 'header' + 'body': '<header>\n\t$1\n</header>' + 'Heading Group': + 'prefix': 'hgroup' + 'body': '<hgroup>\n\t$1\n</hgroup>' + 'Horizontal Rule': + 'prefix': 'hr' + 'body': '<hr>' + 'HTML': + 'prefix': 'html' + 'body': '<!DOCTYPE html>\n<html lang="${1:en}" dir="${2:ltr}">\n\t<head>\n\t\t<meta charset="utf-8">\n\t\t<title>$3\n\t\n\t\n\t\t$4\n\t\n' + # I + 'Italic': + 'prefix': 'i' + 'body': '$1$0' + 'Inline Frame': + 'prefix': 'iframe' + 'body': '$0' + 'Image': + 'prefix': 'img' + 'body': '$2$0' + 'Input': + 'prefix': 'input' + 'body': '$0' + 'Import': + 'prefix': 'import' + 'body': '$0' + 'Inserted Text': + 'prefix': 'ins' + 'body': '$1$0' + # J + # K + 'Keyboard Input': + 'prefix': 'kbd' + 'body': '$1$0' + 'Keygen': + 'prefix': 'keygen' + 'body': '$0' + # L + 'Label': + 'prefix': 'label' + 'body': '$3$0' + 'Legend': + 'prefix': 'legend' + 'body': '$1$0' + 'List Item': + 'prefix': 'li' + 'body': '
  • $1
  • $0' + 'Link': + 'prefix': 'link' + 'body': '$0' + # M + 'Main': + 'prefix': 'main' + 'body': '
    \n\t$1\n
    ' + 'Map': + 'prefix': 'map' + 'body': '\n\t$1\n' + 'Mark': + 'prefix': 'mark' + 'body': '$1$0' + 'MathML': + 'prefix': 'math' + 'body': '\n\t$1\n' + 'Menu': + 'prefix': 'menu' + 'body': '\n\t$1\n' + 'Menu Item': + 'prefix': 'menuitem' + 'body': '$0' + 'Meter': + 'prefix': 'meter' + 'body': '$5$0' + 'Mail Anchor': + 'prefix': 'mailto' + 'body': '${3:email me}$0' + 'Meta': + 'prefix': 'meta' + 'body': '$0' + # N + 'Navigation': + 'prefix': 'nav' + 'body': '' + 'Noscript': + 'prefix': 'noscript' + 'body': '' + # O + 'Object': + 'prefix': 'object' + 'body': '\n\t$5\n' + 'Ordered List': + 'prefix': 'ol' + 'body': '
      \n\t$1\n
    ' + 'Option Group': + 'prefix': 'optgroup' + 'body': '\n\t$2\n' + 'Option': + 'prefix': 'option' + 'body': '$3$0' + 'Output': + 'prefix': 'output' + 'body': '$2$0' + # P + 'Paragraph': + 'prefix': 'p' + 'body': '

    $1

    $0' + 'Parameter': + 'prefix': 'param' + 'body': '$0' + 'Picture': + 'prefix': 'picture' + 'body': '\n\t$1\n' + 'Preformatted Text': + 'prefix': 'pre' + 'body': '
    $1
    $0' + 'Progress': + 'prefix': 'progress' + 'body': '${3:0%}$0' + # Q + 'Quote': + 'prefix': 'q' + 'body': '$3$0' + # R + 'Ruby Base': + 'prefix': 'rb' + 'body': '$1$0' + 'Ruby Parenthesis': + 'prefix': 'rp' + 'body': '$1$0' + 'Ruby Pronunciation': + 'prefix': 'rt' + 'body': '$1$0' + 'Ruby Text Container': + 'prefix': 'rtc' + 'body': '$1$0' + 'Ruby Annotation': + 'prefix': 'ruby' + 'body': '$1$0' + # S + 'Strikethrough': + 'prefix': 's' + 'body': '$1$0' + 'Sample Output': + 'prefix': 'samp' + 'body': '$1$0' + 'Script': + 'prefix': 'script' + 'body': '\n\t$3\n' + 'Script With External Source': + 'prefix': 'scriptsrc' + 'body': '$0' + 'Section': + 'prefix': 'section' + 'body': '
    \n\t$1\n
    ' + 'Select': + 'prefix': 'select' + 'body': '' + 'Small': + 'prefix': 'small' + 'body': '$1$0' + 'Source': + 'prefix': 'source' + 'body': '$0' + 'Span': + 'prefix': 'span' + 'body': '$1$0' + 'Strong': + 'prefix': 'strong' + 'body': '$1$0' + 'Style': + 'prefix': 'style' + 'body': '' + 'Subscript': + 'prefix': 'sub' + 'body': '$1$0' + 'Summary': + 'prefix': 'summary' + 'body': '$1$0' + 'Superscript': + 'prefix': 'sup' + 'body': '$1$0' + 'SVG': + 'prefix': 'svg' + 'body': '\n\t$1\n' + # T + 'Table': + 'prefix': 'table' + 'body': '\n\t$1\n
    ' + 'Table Body': + 'prefix': 'tbody' + 'body': '\n\t$1\n' + 'Table Cell': + 'prefix': 'td' + 'body': '$1$0' + 'Template': + 'prefix': 'template' + 'body': '' + 'Text Area': + 'prefix': 'textarea' + 'body': '$0' + 'Table Foot': + 'prefix': 'tfoot' + 'body': '\n\t$1\n' + 'Table Header Cell': + 'prefix': 'th' + 'body': '$1$0' + 'Table Head': + 'prefix': 'thead' + 'body': '\n\t$1\n' + 'Time': + 'prefix': 'time' + 'body': '$0' + 'Title': + 'prefix': 'title' + 'body': '${1:Home}$0' + 'Table Row': + 'prefix': 'tr' + 'body': '\n\t$1\n' + 'Track': + 'prefix': 'track' + 'body': '$0' + # U + 'Underline': + 'prefix': 'u' + 'body': '$1$0' + 'Unordered List': + 'prefix': 'ul' + 'body': '
      \n\t$1\n
    ' + # V + 'Variable': + 'prefix': 'var' + 'body': '$1$0' + 'Video': + 'prefix': 'video' + 'body': '' + # W + 'Word Break Opportunity': + 'prefix': 'wbr' + 'body': '' + +# These null out the snippets so the snippets are not available when in a tag or +# in embedded contexts like + + + + + + + + + diff --git a/packages/language-html/spec/fixtures/syntax_test_html_template_fragments.html b/packages/language-html/spec/fixtures/syntax_test_html_template_fragments.html new file mode 100644 index 000000000..b8b6f8596 --- /dev/null +++ b/packages/language-html/spec/fixtures/syntax_test_html_template_fragments.html @@ -0,0 +1,40 @@ + + + + + + + + + + + + + + + + diff --git a/packages/language-html/spec/html-spec.coffee b/packages/language-html/spec/html-spec.coffee new file mode 100644 index 000000000..f346a202b --- /dev/null +++ b/packages/language-html/spec/html-spec.coffee @@ -0,0 +1,832 @@ +path = require 'path' +grammarTest = require 'atom-grammar-test' + +describe 'TextMate HTML grammar', -> + grammar = null + + beforeEach -> + atom.config.set('core.useTreeSitterParsers', false) + + waitsForPromise -> + atom.packages.activatePackage('language-html') + + runs -> + grammar = atom.grammars.grammarForScopeName('text.html.basic') + + it 'parses the grammar', -> + expect(grammar).toBeTruthy() + expect(grammar.scopeName).toBe 'text.html.basic' + + describe 'style tags', -> + beforeEach -> + waitsForPromise -> + atom.packages.activatePackage('language-css') + + it 'tokenizes the tag attributes', -> + lines = grammar.tokenizeLines ''' + + ''' + + expect(lines[0][0]).toEqual value: '<', scopes: ['text.html.basic', 'meta.tag.style.html', 'punctuation.definition.tag.html'] + expect(lines[0][1]).toEqual value: 'style', scopes: ['text.html.basic', 'meta.tag.style.html', 'entity.name.tag.style.html'] + expect(lines[0][3]).toEqual value: 'id', scopes: ['text.html.basic', 'meta.tag.style.html', 'meta.attribute-with-value.id.html', 'entity.other.attribute-name.id.html'] + expect(lines[0][4]).toEqual value: '=', scopes: ['text.html.basic', 'meta.tag.style.html', 'meta.attribute-with-value.id.html', 'punctuation.separator.key-value.html'] + expect(lines[0][5]).toEqual value: '"', scopes: ['text.html.basic', 'meta.tag.style.html', 'meta.attribute-with-value.id.html', 'string.quoted.double.html', 'punctuation.definition.string.begin.html'] + expect(lines[0][6]).toEqual value: 'id', scopes: ['text.html.basic', 'meta.tag.style.html', 'meta.attribute-with-value.id.html', 'string.quoted.double.html', 'meta.toc-list.id.html'] + expect(lines[0][7]).toEqual value: '"', scopes: ['text.html.basic', 'meta.tag.style.html', 'meta.attribute-with-value.id.html', 'string.quoted.double.html', 'punctuation.definition.string.end.html'] + expect(lines[0][9]).toEqual value: 'class', scopes: ['text.html.basic', 'meta.tag.style.html', 'meta.attribute-with-value.class.html', 'entity.other.attribute-name.class.html'] + expect(lines[0][10]).toEqual value: '=', scopes: ['text.html.basic', 'meta.tag.style.html', 'meta.attribute-with-value.class.html', 'punctuation.separator.key-value.html'] + expect(lines[0][11]).toEqual value: '"', scopes: ['text.html.basic', 'meta.tag.style.html', 'meta.attribute-with-value.class.html', 'string.quoted.double.html', 'punctuation.definition.string.begin.html'] + expect(lines[0][12]).toEqual value: 'very-classy', scopes: ['text.html.basic', 'meta.tag.style.html', 'meta.attribute-with-value.class.html', 'string.quoted.double.html'] + expect(lines[0][13]).toEqual value: '"', scopes: ['text.html.basic', 'meta.tag.style.html', 'meta.attribute-with-value.class.html', 'string.quoted.double.html', 'punctuation.definition.string.end.html'] + expect(lines[0][14]).toEqual value: '>', scopes: ['text.html.basic', 'meta.tag.style.html', 'punctuation.definition.tag.html'] + expect(lines[1][0]).toEqual value: '', scopes: ['text.html.basic', 'meta.tag.style.html', 'punctuation.definition.tag.html'] + + it 'tokenizes multiline tag attributes', -> + lines = grammar.tokenizeLines ''' + + ''' + + expect(lines[0][0]).toEqual value: '<', scopes: ['text.html.basic', 'meta.tag.style.html', 'punctuation.definition.tag.html'] + expect(lines[0][1]).toEqual value: 'style', scopes: ['text.html.basic', 'meta.tag.style.html', 'entity.name.tag.style.html'] + expect(lines[0][3]).toEqual value: 'id', scopes: ['text.html.basic', 'meta.tag.style.html', 'meta.attribute-with-value.id.html', 'entity.other.attribute-name.id.html'] + expect(lines[0][4]).toEqual value: '=', scopes: ['text.html.basic', 'meta.tag.style.html', 'meta.attribute-with-value.id.html', 'punctuation.separator.key-value.html'] + expect(lines[0][5]).toEqual value: '"', scopes: ['text.html.basic', 'meta.tag.style.html', 'meta.attribute-with-value.id.html', 'string.quoted.double.html', 'punctuation.definition.string.begin.html'] + expect(lines[0][6]).toEqual value: 'id', scopes: ['text.html.basic', 'meta.tag.style.html', 'meta.attribute-with-value.id.html', 'string.quoted.double.html', 'meta.toc-list.id.html'] + expect(lines[0][7]).toEqual value: '"', scopes: ['text.html.basic', 'meta.tag.style.html', 'meta.attribute-with-value.id.html', 'string.quoted.double.html', 'punctuation.definition.string.end.html'] + expect(lines[1][1]).toEqual value: 'class', scopes: ['text.html.basic', 'meta.tag.style.html', 'meta.attribute-with-value.class.html', 'entity.other.attribute-name.class.html'] + expect(lines[1][5]).toEqual value: '"', scopes: ['text.html.basic', 'meta.tag.style.html', 'meta.attribute-with-value.class.html', 'string.quoted.double.html', 'punctuation.definition.string.end.html'] + expect(lines[2][0]).toEqual value: '>', scopes: ['text.html.basic', 'meta.tag.style.html', 'punctuation.definition.tag.html'] + expect(lines[3][0]).toEqual value: '', scopes: ['text.html.basic', 'meta.tag.style.html', 'punctuation.definition.tag.html'] + + it 'tokenizes the content inside the tag as CSS', -> + lines = grammar.tokenizeLines ''' + + ''' + + expect(lines[0][0]).toEqual value: '<', scopes: ['text.html.basic', 'meta.tag.style.html', 'punctuation.definition.tag.html'] + expect(lines[1][0]).toEqual value: ' ', scopes: ['text.html.basic', 'meta.tag.style.html', 'source.css.embedded.html'] + expect(lines[1][1]).toEqual value: 'span', scopes: ['text.html.basic', 'meta.tag.style.html', 'source.css.embedded.html', 'meta.selector.css', 'entity.name.tag.css'] + expect(lines[2][0]).toEqual value: ' + lines = grammar.tokenizeLines ''' + + ''' + + expect(lines[0][0]).toEqual value: '<', scopes: ['text.html.basic', 'meta.tag.style.html', 'punctuation.definition.tag.html'] + expect(lines[2][1]).toEqual value: 'span', scopes: ['text.html.basic', 'meta.tag.style.html', 'source.css.embedded.html', 'meta.selector.css', 'entity.name.tag.css'] + expect(lines[3][0]).toEqual value: ' + it 'tokenizes the tag attributes', -> + lines = grammar.tokenizeLines ''' + + ''' + + expect(lines[0][0]).toEqual value: '<', scopes: ['text.html.basic', 'meta.tag.script.html', 'punctuation.definition.tag.html'] + expect(lines[0][1]).toEqual value: 'script', scopes: ['text.html.basic', 'meta.tag.script.html', 'entity.name.tag.script.html'] + expect(lines[0][3]).toEqual value: 'id', scopes: ['text.html.basic', 'meta.tag.script.html', 'meta.attribute-with-value.id.html', 'entity.other.attribute-name.id.html'] + expect(lines[0][4]).toEqual value: '=', scopes: ['text.html.basic', 'meta.tag.script.html', 'meta.attribute-with-value.id.html', 'punctuation.separator.key-value.html'] + expect(lines[0][5]).toEqual value: '"', scopes: ['text.html.basic', 'meta.tag.script.html', 'meta.attribute-with-value.id.html', 'string.quoted.double.html', 'punctuation.definition.string.begin.html'] + expect(lines[0][6]).toEqual value: 'id', scopes: ['text.html.basic', 'meta.tag.script.html', 'meta.attribute-with-value.id.html', 'string.quoted.double.html', 'meta.toc-list.id.html'] + expect(lines[0][7]).toEqual value: '"', scopes: ['text.html.basic', 'meta.tag.script.html', 'meta.attribute-with-value.id.html', 'string.quoted.double.html', 'punctuation.definition.string.end.html'] + expect(lines[0][9]).toEqual value: 'type', scopes: ['text.html.basic', 'meta.tag.script.html', 'meta.attribute-with-value.html', 'entity.other.attribute-name.html'] + expect(lines[0][10]).toEqual value: '=', scopes: ['text.html.basic', 'meta.tag.script.html', 'meta.attribute-with-value.html', 'punctuation.separator.key-value.html'] + expect(lines[0][11]).toEqual value: '"', scopes: ['text.html.basic', 'meta.tag.script.html', 'meta.attribute-with-value.html', 'string.quoted.double.html', 'punctuation.definition.string.begin.html'] + expect(lines[0][12]).toEqual value: 'text/html', scopes: ['text.html.basic', 'meta.tag.script.html', 'meta.attribute-with-value.html', 'string.quoted.double.html'] + expect(lines[0][13]).toEqual value: '"', scopes: ['text.html.basic', 'meta.tag.script.html', 'meta.attribute-with-value.html', 'string.quoted.double.html', 'punctuation.definition.string.end.html'] + expect(lines[0][14]).toEqual value: '>', scopes: ['text.html.basic', 'meta.tag.script.html', 'punctuation.definition.tag.html'] + expect(lines[1][0]).toEqual value: '', scopes: ['text.html.basic', 'meta.tag.script.html', 'punctuation.definition.tag.html'] + + it 'tokenizes multiline tag attributes', -> + lines = grammar.tokenizeLines ''' + + ''' + + expect(lines[0][0]).toEqual value: '<', scopes: ['text.html.basic', 'meta.tag.script.html', 'punctuation.definition.tag.html'] + expect(lines[0][1]).toEqual value: 'script', scopes: ['text.html.basic', 'meta.tag.script.html', 'entity.name.tag.script.html'] + expect(lines[0][3]).toEqual value: 'id', scopes: ['text.html.basic', 'meta.tag.script.html', 'meta.attribute-with-value.id.html', 'entity.other.attribute-name.id.html'] + expect(lines[0][4]).toEqual value: '=', scopes: ['text.html.basic', 'meta.tag.script.html', 'meta.attribute-with-value.id.html', 'punctuation.separator.key-value.html'] + expect(lines[0][5]).toEqual value: '"', scopes: ['text.html.basic', 'meta.tag.script.html', 'meta.attribute-with-value.id.html', 'string.quoted.double.html', 'punctuation.definition.string.begin.html'] + expect(lines[0][6]).toEqual value: 'id', scopes: ['text.html.basic', 'meta.tag.script.html', 'meta.attribute-with-value.id.html', 'string.quoted.double.html', 'meta.toc-list.id.html'] + expect(lines[0][7]).toEqual value: '"', scopes: ['text.html.basic', 'meta.tag.script.html', 'meta.attribute-with-value.id.html', 'string.quoted.double.html', 'punctuation.definition.string.end.html'] + expect(lines[0][9]).toEqual value: 'type', scopes: ['text.html.basic', 'meta.tag.script.html', 'meta.attribute-with-value.html', 'entity.other.attribute-name.html'] + expect(lines[0][10]).toEqual value: '=', scopes: ['text.html.basic', 'meta.tag.script.html', 'meta.attribute-with-value.html', 'punctuation.separator.key-value.html'] + expect(lines[0][11]).toEqual value: '"', scopes: ['text.html.basic', 'meta.tag.script.html', 'meta.attribute-with-value.html', 'string.quoted.double.html', 'punctuation.definition.string.begin.html'] + expect(lines[0][12]).toEqual value: 'text/html', scopes: ['text.html.basic', 'meta.tag.script.html', 'meta.attribute-with-value.html', 'string.quoted.double.html'] + expect(lines[0][13]).toEqual value: '"', scopes: ['text.html.basic', 'meta.tag.script.html', 'meta.attribute-with-value.html', 'string.quoted.double.html', 'punctuation.definition.string.end.html'] + expect(lines[1][1]).toEqual value: 'class', scopes: ['text.html.basic', 'meta.tag.script.html', 'meta.attribute-with-value.class.html', 'entity.other.attribute-name.class.html'] + expect(lines[1][5]).toEqual value: '"', scopes: ['text.html.basic', 'meta.tag.script.html', 'meta.attribute-with-value.class.html', 'string.quoted.double.html', 'punctuation.definition.string.end.html'] + expect(lines[2][0]).toEqual value: '>', scopes: ['text.html.basic', 'meta.tag.script.html', 'punctuation.definition.tag.html'] + expect(lines[3][0]).toEqual value: '', scopes: ['text.html.basic', 'meta.tag.script.html', 'punctuation.definition.tag.html'] + + describe 'template script tags', -> + it 'tokenizes the content inside the tag as HTML', -> + lines = grammar.tokenizeLines ''' + + ''' + + expect(lines[0][0]).toEqual value: '<', scopes: ['text.html.basic', 'meta.tag.script.html', 'punctuation.definition.tag.html'] + expect(lines[1][0]).toEqual value: ' ', scopes: ['text.html.basic', 'meta.tag.script.html', 'text.embedded.html'] + expect(lines[1][1]).toEqual value: '<', scopes: ['text.html.basic', 'meta.tag.script.html', 'text.embedded.html', 'meta.tag.block.div.html', 'punctuation.definition.tag.begin.html'] + expect(lines[2][0]).toEqual value: ' + lines = grammar.tokenizeLines ''' + + ''' + + expect(lines[0][0]).toEqual value: '<', scopes: ['text.html.basic', 'meta.tag.script.html', 'punctuation.definition.tag.html'] + expect(lines[2][1]).toEqual value: '<', scopes: ['text.html.basic', 'meta.tag.script.html', 'text.embedded.html', 'meta.tag.block.div.html', 'punctuation.definition.tag.begin.html'] + expect(lines[3][0]).toEqual value: ' + beforeEach -> + waitsForPromise -> + atom.packages.activatePackage('language-coffee-script') + + it 'tokenizes the content inside the tag as CoffeeScript', -> + lines = grammar.tokenizeLines ''' + + ''' + + expect(lines[0][0]).toEqual value: '<', scopes: ['text.html.basic', 'meta.tag.script.html', 'punctuation.definition.tag.html'] + expect(lines[1][0]).toEqual value: ' ', scopes: ['text.html.basic', 'meta.tag.script.html', 'source.coffee.embedded.html'] + # TODO: Remove when Atom 1.21 reaches stable + if parseFloat(atom.getVersion()) <= 1.20 + expect(lines[1][1]).toEqual value: '->', scopes: ['text.html.basic', 'meta.tag.script.html', 'source.coffee.embedded.html', 'storage.type.function.coffee'] + else + expect(lines[1][1]).toEqual value: '->', scopes: ['text.html.basic', 'meta.tag.script.html', 'source.coffee.embedded.html', 'meta.function.inline.coffee', 'storage.type.function.coffee'] + expect(lines[2][0]).toEqual value: ' + lines = grammar.tokenizeLines ''' + + ''' + + expect(lines[0][0]).toEqual value: '<', scopes: ['text.html.basic', 'meta.tag.script.html', 'punctuation.definition.tag.html'] + # TODO: Remove when Atom 1.21 reaches stable + if parseFloat(atom.getVersion()) <= 1.20 + expect(lines[2][1]).toEqual value: '->', scopes: ['text.html.basic', 'meta.tag.script.html', 'source.coffee.embedded.html', 'storage.type.function.coffee'] + else + expect(lines[2][1]).toEqual value: '->', scopes: ['text.html.basic', 'meta.tag.script.html', 'source.coffee.embedded.html', 'meta.function.inline.coffee', 'storage.type.function.coffee'] + expect(lines[3][0]).toEqual value: ' + lines = grammar.tokenizeLines ''' + + ''' + + expect(lines[1][1]).toEqual value: '#', scopes: ['text.html.basic', 'meta.tag.script.html', 'source.coffee.embedded.html', 'comment.line.number-sign.coffee', 'punctuation.definition.comment.coffee'] + expect(lines[1][2]).toEqual value: ' comment ', scopes: ['text.html.basic', 'meta.tag.script.html', 'source.coffee.embedded.html', 'comment.line.number-sign.coffee'] + expect(lines[1][3]).toEqual value: ' + ### + comment + ''' + + expect(lines[1][1]).toEqual value: '###', scopes: ['text.html.basic', 'meta.tag.script.html', 'source.coffee.embedded.html', 'comment.block.coffee', 'punctuation.definition.comment.coffee'] + expect(lines[2][0]).toEqual value: ' comment ', scopes: ['text.html.basic', 'meta.tag.script.html', 'source.coffee.embedded.html', 'comment.block.coffee'] + expect(lines[2][1]).toEqual value: ' + beforeEach -> + waitsForPromise -> atom.packages.activatePackage('language-javascript') + + it 'tokenizes the content inside the tag as JavaScript', -> + lines = grammar.tokenizeLines ''' + + ''' + + expect(lines[0][0]).toEqual value: '<', scopes: ['text.html.basic', 'meta.tag.script.html', 'punctuation.definition.tag.html'] + expect(lines[1][0]).toEqual value: ' ', scopes: ['text.html.basic', 'meta.tag.script.html', 'source.js.embedded.html'] + expect(lines[1][1]).toEqual value: 'var', scopes: ['text.html.basic', 'meta.tag.script.html', 'source.js.embedded.html', 'storage.type.var.js'] + expect(lines[2][0]).toEqual value: ' + lines = grammar.tokenizeLines ''' + + ''' + + expect(lines[0][0]).toEqual value: '<', scopes: ['text.html.basic', 'meta.tag.script.html', 'punctuation.definition.tag.html'] + expect(lines[2][1]).toEqual value: 'var', scopes: ['text.html.basic', 'meta.tag.script.html', 'source.js.embedded.html', 'storage.type.var.js'] + expect(lines[3][0]).toEqual value: ' + lines = grammar.tokenizeLines ''' + + ''' + + expect(lines[1][1]).toEqual value: '//', scopes: ['text.html.basic', 'meta.tag.script.html', 'source.js.embedded.html', 'comment.line.double-slash.js', 'punctuation.definition.comment.js'] + expect(lines[1][2]).toEqual value: ' comment ', scopes: ['text.html.basic', 'meta.tag.script.html', 'source.js.embedded.html', 'comment.line.double-slash.js'] + expect(lines[1][3]).toEqual value: ' + /* + comment + ''' + + expect(lines[1][1]).toEqual value: '/*', scopes: ['text.html.basic', 'meta.tag.script.html', 'source.js.embedded.html', 'comment.block.js', 'punctuation.definition.comment.begin.js'] + expect(lines[2][0]).toEqual value: ' comment ', scopes: ['text.html.basic', 'meta.tag.script.html', 'source.js.embedded.html', 'comment.block.js'] + expect(lines[2][1]).toEqual value: ' + it 'tokenizes -- as an error', -> + {tokens} = grammar.tokenizeLine '' + + expect(tokens[0]).toEqual value: '', scopes: ['text.html.basic', 'comment.block.html', 'punctuation.definition.comment.html'] + + {tokens} = grammar.tokenizeLine '' + + expect(tokens[0]).toEqual value: '', scopes: ['text.html.basic', 'comment.block.html', 'punctuation.definition.comment.html'] + + grammarTest path.join(__dirname, 'fixtures/syntax_test_html.html') + grammarTest path.join(__dirname, 'fixtures/syntax_test_html_template_fragments.html') + + describe 'attributes', -> + it 'recognizes a single attribute with a quoted value', -> + {tokens} = grammar.tokenizeLine '' + + expect(tokens[3]).toEqual value: 'class', scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'meta.attribute-with-value.class.html', 'entity.other.attribute-name.class.html'] + expect(tokens[4]).toEqual value: '=', scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'meta.attribute-with-value.class.html', 'punctuation.separator.key-value.html'] + expect(tokens[5]).toEqual value: '"', scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'meta.attribute-with-value.class.html', 'string.quoted.double.html', 'punctuation.definition.string.begin.html'] + expect(tokens[6]).toEqual value: 'foo', scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'meta.attribute-with-value.class.html', 'string.quoted.double.html'] + expect(tokens[7]).toEqual value: '"', scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'meta.attribute-with-value.class.html', 'string.quoted.double.html', 'punctuation.definition.string.end.html'] + expect(tokens[8]).toEqual value: '>', scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'punctuation.definition.tag.end.html'] + + {tokens} = grammar.tokenizeLine "" + + expect(tokens[3]).toEqual value: 'class', scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'meta.attribute-with-value.class.html', 'entity.other.attribute-name.class.html'] + expect(tokens[4]).toEqual value: '=', scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'meta.attribute-with-value.class.html', 'punctuation.separator.key-value.html'] + expect(tokens[5]).toEqual value: "'", scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'meta.attribute-with-value.class.html', 'string.quoted.single.html', 'punctuation.definition.string.begin.html'] + expect(tokens[6]).toEqual value: 'foo', scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'meta.attribute-with-value.class.html', 'string.quoted.single.html'] + expect(tokens[7]).toEqual value: "'", scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'meta.attribute-with-value.class.html', 'string.quoted.single.html', 'punctuation.definition.string.end.html'] + expect(tokens[8]).toEqual value: '>', scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'punctuation.definition.tag.end.html'] + + it 'recognizes a single attribute with spaces around the equals sign', -> + {tokens} = grammar.tokenizeLine '' + + expect(tokens[3]).toEqual value: 'class', scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'meta.attribute-with-value.class.html', 'entity.other.attribute-name.class.html'] + expect(tokens[4]).toEqual value: ' ', scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'meta.attribute-with-value.class.html'] + expect(tokens[5]).toEqual value: '=', scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'meta.attribute-with-value.class.html', 'punctuation.separator.key-value.html'] + expect(tokens[6]).toEqual value: '"', scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'meta.attribute-with-value.class.html', 'string.quoted.double.html', 'punctuation.definition.string.begin.html'] + expect(tokens[7]).toEqual value: 'foo', scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'meta.attribute-with-value.class.html', 'string.quoted.double.html'] + expect(tokens[8]).toEqual value: '"', scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'meta.attribute-with-value.class.html', 'string.quoted.double.html', 'punctuation.definition.string.end.html'] + expect(tokens[9]).toEqual value: '>', scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'punctuation.definition.tag.end.html'] + + {tokens} = grammar.tokenizeLine '' + + expect(tokens[3]).toEqual value: 'class', scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'meta.attribute-with-value.class.html', 'entity.other.attribute-name.class.html'] + expect(tokens[4]).toEqual value: '=', scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'meta.attribute-with-value.class.html', 'punctuation.separator.key-value.html'] + expect(tokens[5]).toEqual value: ' ', scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'meta.attribute-with-value.class.html'] + expect(tokens[6]).toEqual value: '"', scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'meta.attribute-with-value.class.html', 'string.quoted.double.html', 'punctuation.definition.string.begin.html'] + expect(tokens[7]).toEqual value: 'foo', scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'meta.attribute-with-value.class.html', 'string.quoted.double.html'] + expect(tokens[8]).toEqual value: '"', scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'meta.attribute-with-value.class.html', 'string.quoted.double.html', 'punctuation.definition.string.end.html'] + expect(tokens[9]).toEqual value: '>', scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'punctuation.definition.tag.end.html'] + + {tokens} = grammar.tokenizeLine '' + + expect(tokens[3]).toEqual value: 'class', scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'meta.attribute-with-value.class.html', 'entity.other.attribute-name.class.html'] + expect(tokens[4]).toEqual value: ' ', scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'meta.attribute-with-value.class.html'] + expect(tokens[5]).toEqual value: '=', scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'meta.attribute-with-value.class.html', 'punctuation.separator.key-value.html'] + expect(tokens[6]).toEqual value: ' ', scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'meta.attribute-with-value.class.html'] + expect(tokens[7]).toEqual value: '"', scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'meta.attribute-with-value.class.html', 'string.quoted.double.html', 'punctuation.definition.string.begin.html'] + expect(tokens[8]).toEqual value: 'foo', scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'meta.attribute-with-value.class.html', 'string.quoted.double.html'] + expect(tokens[9]).toEqual value: '"', scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'meta.attribute-with-value.class.html', 'string.quoted.double.html', 'punctuation.definition.string.end.html'] + expect(tokens[10]).toEqual value: '>', scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'punctuation.definition.tag.end.html'] + + it 'recognizes a single attribute with an unquoted value', -> + {tokens} = grammar.tokenizeLine '' + + expect(tokens[3]).toEqual value: 'class', scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'meta.attribute-with-value.class.html', 'entity.other.attribute-name.class.html'] + expect(tokens[4]).toEqual value: '=', scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'meta.attribute-with-value.class.html', 'punctuation.separator.key-value.html'] + expect(tokens[5]).toEqual value: 'foo-3+5@', scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'meta.attribute-with-value.class.html', 'string.unquoted.html'] + expect(tokens[6]).toEqual value: '>', scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'punctuation.definition.tag.end.html'] + + it 'recognizes a single attribute with no value', -> + {tokens} = grammar.tokenizeLine '' + + expect(tokens[3]).toEqual value: 'class', scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'meta.attribute-without-value.html', 'entity.other.attribute-name.html'] + expect(tokens[4]).toEqual value: '>', scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'punctuation.definition.tag.end.html'] + + it 'recognizes multiple attributes with varying values', -> + {tokens} = grammar.tokenizeLine "" + + expect(tokens[3]).toEqual value: 'class', scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'meta.attribute-with-value.class.html', 'entity.other.attribute-name.class.html'] + expect(tokens[4]).toEqual value: '=', scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'meta.attribute-with-value.class.html', 'punctuation.separator.key-value.html'] + expect(tokens[5]).toEqual value: "'", scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'meta.attribute-with-value.class.html', 'string.quoted.single.html', 'punctuation.definition.string.begin.html'] + expect(tokens[6]).toEqual value: 'btn', scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'meta.attribute-with-value.class.html', 'string.quoted.single.html'] + expect(tokens[7]).toEqual value: "'", scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'meta.attribute-with-value.class.html', 'string.quoted.single.html', 'punctuation.definition.string.end.html'] + expect(tokens[8]).toEqual value: ' ', scopes: ['text.html.basic', 'meta.tag.inline.span.html'] + expect(tokens[9]).toEqual value: 'disabled', scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'meta.attribute-without-value.html', 'entity.other.attribute-name.html'] + expect(tokens[10]).toEqual value: ' ', scopes: ['text.html.basic', 'meta.tag.inline.span.html'] + expect(tokens[11]).toEqual value: 'spellcheck', scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'meta.attribute-with-value.html', 'entity.other.attribute-name.html'] + expect(tokens[12]).toEqual value: '=', scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'meta.attribute-with-value.html', 'punctuation.separator.key-value.html'] + expect(tokens[13]).toEqual value: 'true', scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'meta.attribute-with-value.html', 'string.unquoted.html'] + expect(tokens[14]).toEqual value: '>', scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'punctuation.definition.tag.end.html'] + + it 'recognizes attributes that are not on the same line as the tag name', -> + lines = grammar.tokenizeLines ''' + + ''' + + expect(lines[1][1]).toEqual value: 'class', scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'meta.attribute-with-value.class.html', 'entity.other.attribute-name.class.html'] + expect(lines[1][2]).toEqual value: '=', scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'meta.attribute-with-value.class.html', 'punctuation.separator.key-value.html'] + expect(lines[1][5]).toEqual value: '"', scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'meta.attribute-with-value.class.html', 'string.quoted.double.html', 'punctuation.definition.string.end.html'] + expect(lines[2][1]).toEqual value: 'disabled', scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'meta.attribute-without-value.html', 'entity.other.attribute-name.html'] + expect(lines[2][2]).toEqual value: '>', scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'punctuation.definition.tag.end.html'] + + it 'tokenizes only one attribute value in a row', -> + # The following line is invalid per HTML specification, however some browsers parse the 'world' as attribute for compatibility reasons. + {tokens} = grammar.tokenizeLine '' + + expect(tokens[3]).toEqual value: 'attr', scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'meta.attribute-with-value.html', 'entity.other.attribute-name.html'] + expect(tokens[4]).toEqual value: '=', scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'meta.attribute-with-value.html', 'punctuation.separator.key-value.html'] + expect(tokens[5]).toEqual value: '"', scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'meta.attribute-with-value.html', 'string.quoted.double.html', 'punctuation.definition.string.begin.html'] + expect(tokens[6]).toEqual value: 'hello', scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'meta.attribute-with-value.html', 'string.quoted.double.html'] + expect(tokens[7]).toEqual value: '"', scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'meta.attribute-with-value.html', 'string.quoted.double.html', 'punctuation.definition.string.end.html'] + expect(tokens[8]).toEqual value: 'world', scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'meta.attribute-without-value.html', 'entity.other.attribute-name.html'] + expect(tokens[9]).toEqual value: '>', scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'punctuation.definition.tag.end.html'] + + describe "the 'style' attribute", -> + beforeEach -> + waitsForPromise -> + atom.packages.activatePackage('language-css') + + quotes = + '"': 'double' + "'": 'single' + + for quote, type of quotes + it "tokenizes #{type}-quoted style attribute values as CSS property lists", -> + {tokens} = grammar.tokenizeLine "" + + expect(tokens[3]).toEqual value: 'style', scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'meta.attribute-with-value.style.html', 'entity.other.attribute-name.style.html'] + expect(tokens[5]).toEqual value: quote, scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'meta.attribute-with-value.style.html', "string.quoted.#{type}.html", 'punctuation.definition.string.begin.html'] + expect(tokens[6]).toEqual value: 'display', scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'meta.attribute-with-value.style.html', "string.quoted.#{type}.html", 'source.css.style.html', 'meta.property-list.css', 'meta.property-name.css', 'support.type.property-name.css'] + expect(tokens[9]).toEqual value: 'none', scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'meta.attribute-with-value.style.html', "string.quoted.#{type}.html", 'source.css.style.html', 'meta.property-list.css', 'meta.property-value.css', 'support.constant.property-value.css'] + expect(tokens[10]).toEqual value: ';', scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'meta.attribute-with-value.style.html', "string.quoted.#{type}.html", 'source.css.style.html', 'meta.property-list.css', 'punctuation.terminator.rule.css'] + expect(tokens[11]).toEqual value: quote, scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'meta.attribute-with-value.style.html', "string.quoted.#{type}.html", 'punctuation.definition.string.end.html'] + expect(tokens[12]).toEqual value: '>', scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'punctuation.definition.tag.end.html'] + + {tokens} = grammar.tokenizeLine "" + + expect(tokens[3]).toEqual value: 'style', scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'meta.attribute-with-value.style.html', 'entity.other.attribute-name.style.html'] + expect(tokens[5]).toEqual value: quote, scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'meta.attribute-with-value.style.html', "string.quoted.#{type}.html", 'punctuation.definition.string.begin.html'] + expect(tokens[6]).toEqual value: 'display', scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'meta.attribute-with-value.style.html', "string.quoted.#{type}.html", 'source.css.style.html', 'meta.property-list.css', 'meta.property-name.css', 'support.type.property-name.css'] + expect(tokens[9]).toEqual value: 'none', scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'meta.attribute-with-value.style.html', "string.quoted.#{type}.html", 'source.css.style.html', 'meta.property-list.css', 'meta.property-value.css', 'support.constant.property-value.css'] + expect(tokens[10]).toEqual value: ';', scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'meta.attribute-with-value.style.html', "string.quoted.#{type}.html", 'source.css.style.html', 'meta.property-list.css', 'punctuation.terminator.rule.css'] + expect(tokens[12]).toEqual value: 'z-index', scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'meta.attribute-with-value.style.html', "string.quoted.#{type}.html", 'source.css.style.html', 'meta.property-list.css', 'meta.property-name.css', 'support.type.property-name.css'] + expect(tokens[15]).toEqual value: '10', scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'meta.attribute-with-value.style.html', "string.quoted.#{type}.html", 'source.css.style.html', 'meta.property-list.css', 'meta.property-value.css', 'constant.numeric.css'] + expect(tokens[16]).toEqual value: ';', scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'meta.attribute-with-value.style.html', "string.quoted.#{type}.html", 'source.css.style.html', 'meta.property-list.css', 'punctuation.terminator.rule.css'] + expect(tokens[17]).toEqual value: quote, scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'meta.attribute-with-value.style.html', "string.quoted.#{type}.html", 'punctuation.definition.string.end.html'] + expect(tokens[18]).toEqual value: '>', scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'punctuation.definition.tag.end.html'] + + it "tokenizes #{type}-quoted multiline attributes", -> + lines = grammar.tokenizeLines """ + + """ + + expect(lines[0][3]).toEqual value: 'style', scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'meta.attribute-with-value.style.html', 'entity.other.attribute-name.style.html'] + expect(lines[0][5]).toEqual value: quote, scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'meta.attribute-with-value.style.html', "string.quoted.#{type}.html", 'punctuation.definition.string.begin.html'] + expect(lines[0][6]).toEqual value: 'display', scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'meta.attribute-with-value.style.html', "string.quoted.#{type}.html", 'source.css.style.html', 'meta.property-list.css', 'meta.property-name.css', 'support.type.property-name.css'] + expect(lines[0][9]).toEqual value: 'none', scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'meta.attribute-with-value.style.html', "string.quoted.#{type}.html", 'source.css.style.html', 'meta.property-list.css', 'meta.property-value.css', 'support.constant.property-value.css'] + expect(lines[0][10]).toEqual value: ';', scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'meta.attribute-with-value.style.html', "string.quoted.#{type}.html", 'source.css.style.html', 'meta.property-list.css', 'punctuation.terminator.rule.css'] + expect(lines[1][0]).toEqual value: 'z-index', scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'meta.attribute-with-value.style.html', "string.quoted.#{type}.html", 'source.css.style.html', 'meta.property-list.css', 'meta.property-name.css', 'support.type.property-name.css'] + expect(lines[1][3]).toEqual value: '10', scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'meta.attribute-with-value.style.html', "string.quoted.#{type}.html", 'source.css.style.html', 'meta.property-list.css', 'meta.property-value.css', 'constant.numeric.css'] + expect(lines[1][4]).toEqual value: ';', scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'meta.attribute-with-value.style.html', "string.quoted.#{type}.html", 'source.css.style.html', 'meta.property-list.css', 'punctuation.terminator.rule.css'] + expect(lines[1][5]).toEqual value: quote, scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'meta.attribute-with-value.style.html', "string.quoted.#{type}.html", 'punctuation.definition.string.end.html'] + expect(lines[1][6]).toEqual value: '>', scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'punctuation.definition.tag.end.html'] + + it 'tokenizes incomplete property lists', -> + {tokens} = grammar.tokenizeLine '' + + expect(tokens[3]).toEqual value: 'style', scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'meta.attribute-with-value.style.html', 'entity.other.attribute-name.style.html'] + expect(tokens[5]).toEqual value: '"', scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'meta.attribute-with-value.style.html', 'string.quoted.double.html', 'punctuation.definition.string.begin.html'] + expect(tokens[6]).toEqual value: 'display', scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'meta.attribute-with-value.style.html', 'string.quoted.double.html', 'source.css.style.html', 'meta.property-list.css', 'meta.property-name.css', 'support.type.property-name.css'] + expect(tokens[9]).toEqual value: 'none', scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'meta.attribute-with-value.style.html', 'string.quoted.double.html', 'source.css.style.html', 'meta.property-list.css', 'meta.property-value.css', 'support.constant.property-value.css'] + expect(tokens[10]).toEqual value: '"', scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'meta.attribute-with-value.style.html', 'string.quoted.double.html', 'punctuation.definition.string.end.html'] + expect(tokens[11]).toEqual value: '>', scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'punctuation.definition.tag.end.html'] + + lines = grammar.tokenizeLines """ + + """ + + expect(lines[0][3]).toEqual value: 'style', scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'meta.attribute-with-value.style.html', 'entity.other.attribute-name.style.html'] + expect(lines[0][5]).toEqual value: quote, scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'meta.attribute-with-value.style.html', "string.quoted.#{type}.html", 'punctuation.definition.string.begin.html'] + expect(lines[0][6]).toEqual value: 'display', scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'meta.attribute-with-value.style.html', "string.quoted.#{type}.html", 'source.css.style.html', 'meta.property-list.css', 'meta.property-name.css', 'support.type.property-name.css'] + expect(lines[0][9]).toEqual value: 'none', scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'meta.attribute-with-value.style.html', "string.quoted.#{type}.html", 'source.css.style.html', 'meta.property-list.css', 'meta.property-value.css', 'support.constant.property-value.css'] + expect(lines[0][10]).toEqual value: ';', scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'meta.attribute-with-value.style.html', "string.quoted.#{type}.html", 'source.css.style.html', 'meta.property-list.css', 'punctuation.terminator.rule.css'] + expect(lines[1][0]).toEqual value: 'z-index', scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'meta.attribute-with-value.style.html', "string.quoted.#{type}.html", 'source.css.style.html', 'meta.property-list.css', 'meta.property-name.css', 'support.type.property-name.css'] + expect(lines[1][3]).toEqual value: '10', scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'meta.attribute-with-value.style.html', "string.quoted.#{type}.html", 'source.css.style.html', 'meta.property-list.css', 'meta.property-value.css', 'constant.numeric.css'] + expect(lines[1][4]).toEqual value: quote, scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'meta.attribute-with-value.style.html', "string.quoted.#{type}.html", 'punctuation.definition.string.end.html'] + expect(lines[1][5]).toEqual value: '>', scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'punctuation.definition.tag.end.html'] + + it 'ends invalid quoted property lists correctly', -> + {tokens} = grammar.tokenizeLine '' + + expect(tokens[3]).toEqual value: 'style', scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'meta.attribute-with-value.style.html', 'entity.other.attribute-name.style.html'] + expect(tokens[5]).toEqual value: '"', scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'meta.attribute-with-value.style.html', 'string.quoted.double.html', 'punctuation.definition.string.begin.html'] + expect(tokens[6]).toEqual value: 's', scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'meta.attribute-with-value.style.html', 'string.quoted.double.html', 'source.css.style.html', 'meta.property-list.css', 'meta.property-name.css'] + expect(tokens[7]).toEqual value: ':', scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'meta.attribute-with-value.style.html', 'string.quoted.double.html', 'source.css.style.html', 'meta.property-list.css', 'punctuation.separator.key-value.css'] + expect(tokens[8]).toEqual value: '"', scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'meta.attribute-with-value.style.html', 'string.quoted.double.html', 'punctuation.definition.string.end.html'] + expect(tokens[9]).toEqual value: '>', scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'punctuation.definition.tag.end.html'] + + it 'tokenizes unquoted property lists', -> + {tokens} = grammar.tokenizeLine '' + + expect(tokens[3]).toEqual value: 'style', scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'meta.attribute-with-value.style.html', 'entity.other.attribute-name.style.html'] + expect(tokens[4]).toEqual value: '=', scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'meta.attribute-with-value.style.html', 'punctuation.separator.key-value.html'] + expect(tokens[5]).toEqual value: 'display', scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'meta.attribute-with-value.style.html', 'string.unquoted.html', 'source.css.style.html', 'meta.property-list.css', 'meta.property-name.css', 'support.type.property-name.css'] + expect(tokens[7]).toEqual value: 'none', scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'meta.attribute-with-value.style.html', 'string.unquoted.html', 'source.css.style.html', 'meta.property-list.css', 'meta.property-value.css', 'support.constant.property-value.css'] + expect(tokens[8]).toEqual value: ';', scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'meta.attribute-with-value.style.html', 'string.unquoted.html', 'source.css.style.html', 'meta.property-list.css', 'punctuation.terminator.rule.css'] + expect(tokens[9]).toEqual value: '>', scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'punctuation.definition.tag.end.html'] + + {tokens} = grammar.tokenizeLine '' + + expect(tokens[3]).toEqual value: 'style', scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'meta.attribute-with-value.style.html', 'entity.other.attribute-name.style.html'] + expect(tokens[4]).toEqual value: '=', scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'meta.attribute-with-value.style.html', 'punctuation.separator.key-value.html'] + expect(tokens[5]).toEqual value: 'display', scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'meta.attribute-with-value.style.html', 'string.unquoted.html', 'source.css.style.html', 'meta.property-list.css', 'meta.property-name.css', 'support.type.property-name.css'] + expect(tokens[7]).toEqual value: 'none', scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'meta.attribute-with-value.style.html', 'string.unquoted.html', 'source.css.style.html', 'meta.property-list.css', 'meta.property-value.css', 'support.constant.property-value.css'] + expect(tokens[8]).toEqual value: ';', scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'meta.attribute-with-value.style.html', 'string.unquoted.html', 'source.css.style.html', 'meta.property-list.css', 'punctuation.terminator.rule.css'] + expect(tokens[9]).toEqual value: 'z-index', scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'meta.attribute-with-value.style.html', 'string.unquoted.html', 'source.css.style.html', 'meta.property-list.css', 'meta.property-name.css', 'support.type.property-name.css'] + expect(tokens[11]).toEqual value: '10', scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'meta.attribute-with-value.style.html', 'string.unquoted.html', 'source.css.style.html', 'meta.property-list.css', 'meta.property-value.css', 'constant.numeric.css'] + expect(tokens[12]).toEqual value: '>', scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'punctuation.definition.tag.end.html'] + + it 'ends invalid unquoted property lists correctly', -> + {tokens} = grammar.tokenizeLine '' + + expect(tokens[3]).toEqual value: 'style', scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'meta.attribute-with-value.style.html', 'entity.other.attribute-name.style.html'] + expect(tokens[4]).toEqual value: '=', scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'meta.attribute-with-value.style.html', 'punctuation.separator.key-value.html'] + expect(tokens[5]).toEqual value: 's', scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'meta.attribute-with-value.style.html', 'string.unquoted.html', 'source.css.style.html', 'meta.property-list.css', 'meta.property-name.css'] + expect(tokens[6]).toEqual value: ':', scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'meta.attribute-with-value.style.html', 'string.unquoted.html', 'source.css.style.html', 'meta.property-list.css', 'punctuation.separator.key-value.css'] + expect(tokens[7]).toEqual value: '>', scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'punctuation.definition.tag.end.html'] + + {tokens} = grammar.tokenizeLine '' + + expect(tokens[3]).toEqual value: 'style', scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'meta.attribute-with-value.style.html', 'entity.other.attribute-name.style.html'] + expect(tokens[4]).toEqual value: '=', scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'meta.attribute-with-value.style.html', 'punctuation.separator.key-value.html'] + expect(tokens[5]).toEqual value: 'display', scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'meta.attribute-with-value.style.html', 'string.unquoted.html', 'source.css.style.html', 'meta.property-list.css', 'meta.property-name.css', 'support.type.property-name.css'] + expect(tokens[6]).toEqual value: ':', scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'meta.attribute-with-value.style.html', 'string.unquoted.html', 'source.css.style.html', 'meta.property-list.css', 'punctuation.separator.key-value.css'] + expect(tokens[7]).toEqual value: ' ', scopes: ['text.html.basic', 'meta.tag.inline.span.html'] + expect(tokens[8]).toEqual value: 'none', scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'meta.attribute-without-value.html', 'entity.other.attribute-name.html'] + expect(tokens[9]).toEqual value: '>', scopes: ['text.html.basic', 'meta.tag.inline.span.html', 'punctuation.definition.tag.end.html'] + + describe 'character references', -> + it 'tokenizes & and characters after it', -> + # NOTE: &a should NOT be tokenized as a character reference as there is no semicolon following it + # We have no way of knowing if there will ever be a semicolon so we play conservatively. + {tokens} = grammar.tokenizeLine '& & &a' + + expect(tokens[0]).toEqual value: '& ', scopes: ['text.html.basic'] + expect(tokens[1]).toEqual value: '&', scopes: ['text.html.basic', 'constant.character.entity.html', 'punctuation.definition.entity.begin.html'] + expect(tokens[2]).toEqual value: 'amp', scopes: ['text.html.basic', 'constant.character.entity.html', 'entity.name.entity.other.html'] + expect(tokens[3]).toEqual value: ';', scopes: ['text.html.basic', 'constant.character.entity.html', 'punctuation.definition.entity.end.html'] + expect(tokens[4]).toEqual value: ' &a', scopes: ['text.html.basic'] + + lines = grammar.tokenizeLines '&\n' + expect(lines[0][0]).toEqual value: '&', scopes: ['text.html.basic'] + + it 'tokenizes hexadecimal and digit character references', -> + {tokens} = grammar.tokenizeLine '" " "' + + expect(tokens[0]).toEqual value: '&', scopes: ['text.html.basic', 'constant.character.entity.html', 'punctuation.definition.entity.begin.html'] + expect(tokens[1]).toEqual value: '#x00022', scopes: ['text.html.basic', 'constant.character.entity.html', 'entity.name.entity.other.html'] + expect(tokens[2]).toEqual value: ';', scopes: ['text.html.basic', 'constant.character.entity.html', 'punctuation.definition.entity.end.html'] + expect(tokens[4]).toEqual value: '&', scopes: ['text.html.basic', 'constant.character.entity.html', 'punctuation.definition.entity.begin.html'] + expect(tokens[5]).toEqual value: '#X00022', scopes: ['text.html.basic', 'constant.character.entity.html', 'entity.name.entity.other.html'] + expect(tokens[6]).toEqual value: ';', scopes: ['text.html.basic', 'constant.character.entity.html', 'punctuation.definition.entity.end.html'] + expect(tokens[8]).toEqual value: '&', scopes: ['text.html.basic', 'constant.character.entity.html', 'punctuation.definition.entity.begin.html'] + expect(tokens[9]).toEqual value: '#34', scopes: ['text.html.basic', 'constant.character.entity.html', 'entity.name.entity.other.html'] + expect(tokens[10]).toEqual value: ';', scopes: ['text.html.basic', 'constant.character.entity.html', 'punctuation.definition.entity.end.html'] + + it 'tokenizes invalid ampersands', -> + {tokens} = grammar.tokenizeLine 'PSE&>' + expect(tokens[0]).toEqual value: 'PSE', scopes: ['text.html.basic'] + expect(tokens[1]).toEqual value: '&', scopes: ['text.html.basic', 'invalid.illegal.bad-ampersand.html'] + expect(tokens[2]).toEqual value: '>', scopes: ['text.html.basic'] + + {tokens} = grammar.tokenizeLine 'PSE&' + expect(tokens[0]).toEqual value: 'PSE&', scopes: ['text.html.basic'] + + {tokens} = grammar.tokenizeLine '&<' + expect(tokens[0]).toEqual value: '&<', scopes: ['text.html.basic'] + + {tokens} = grammar.tokenizeLine '& ' + expect(tokens[0]).toEqual value: '& ', scopes: ['text.html.basic'] + + {tokens} = grammar.tokenizeLine '&' + expect(tokens[0]).toEqual value: '&', scopes: ['text.html.basic'] + + {tokens} = grammar.tokenizeLine '&&' + expect(tokens[0]).toEqual value: '&&', scopes: ['text.html.basic'] + + it 'tokenizes character references in attributes', -> + {tokens} = grammar.tokenizeLine '' + expect(tokens[7]).toEqual value: '&', scopes: ['text.html.basic', 'meta.tag.inline.a.html', 'meta.attribute-with-value.html', 'string.quoted.double.html', 'constant.character.entity.html', 'punctuation.definition.entity.begin.html'] + expect(tokens[8]).toEqual value: 'amp', scopes: ['text.html.basic', 'meta.tag.inline.a.html', 'meta.attribute-with-value.html', 'string.quoted.double.html', 'constant.character.entity.html', 'entity.name.entity.other.html'] + expect(tokens[9]).toEqual value: ';', scopes: ['text.html.basic', 'meta.tag.inline.a.html', 'meta.attribute-with-value.html', 'string.quoted.double.html', 'constant.character.entity.html', 'punctuation.definition.entity.end.html'] + + it 'does not tokenize query parameters as character references', -> + {tokens} = grammar.tokenizeLine '' + expect(tokens[6]).toEqual value: 'http://example.com?one=1&type=json&topic=css', scopes: ['text.html.basic', 'meta.tag.inline.a.html', 'meta.attribute-with-value.html', 'string.quoted.double.html'] + + it 'does not tokenize multiple ampersands followed by alphabetical characters as character references', -> + {tokens} = grammar.tokenizeLine '' + expect(tokens[6]).toEqual value: 'http://example.com?price&something&yummy:&wow', scopes: ['text.html.basic', 'meta.tag.inline.a.html', 'meta.attribute-with-value.html', 'string.quoted.double.html'] + + it 'tokenizes invalid ampersands in attributes', -> + # Note: in order to replicate the following tests' behaviors, make sure you have language-hyperlink disabled + {tokens} = grammar.tokenizeLine '' + expect(tokens[7]).toEqual value: '&', scopes: ['text.html.basic', 'meta.tag.inline.a.html', 'meta.attribute-with-value.html', 'string.quoted.double.html', 'invalid.illegal.bad-ampersand.html'] + + {tokens} = grammar.tokenizeLine '' + expect(tokens[7]).toEqual value: '&', scopes: ['text.html.basic', 'meta.tag.inline.a.html', 'meta.attribute-with-value.html', 'string.quoted.double.html', 'invalid.illegal.bad-ampersand.html'] + + {tokens} = grammar.tokenizeLine '' + expect(tokens[6]).toEqual value: 'http://example.com?& ', scopes: ['text.html.basic', 'meta.tag.inline.a.html', 'meta.attribute-with-value.html', 'string.quoted.double.html'] + + lines = grammar.tokenizeLines '' + expect(lines[0][6]).toEqual value: 'http://example.com?&', scopes: ['text.html.basic', 'meta.tag.inline.a.html', 'meta.attribute-with-value.html', 'string.quoted.double.html'] + + {tokens} = grammar.tokenizeLine '' + expect(tokens[6]).toEqual value: 'http://example.com?&', scopes: ['text.html.basic', 'meta.tag.inline.a.html', 'meta.attribute-with-value.html', 'string.quoted.double.html'] + expect(tokens[7]).toEqual value: '&', scopes: ['text.html.basic', 'meta.tag.inline.a.html', 'meta.attribute-with-value.html', 'string.quoted.double.html', 'invalid.illegal.bad-ampersand.html'] + + describe 'firstLineMatch', -> + it 'recognises HTML5 doctypes', -> + expect(grammar.firstLineRegex.scanner.findNextMatchSync('')).not.toBeNull() + expect(grammar.firstLineRegex.scanner.findNextMatchSync('')).not.toBeNull() + + it 'recognises Emacs modelines', -> + valid = ''' + #-*- HTML -*- + #-*- mode: HTML -*- + /* -*-html-*- */ + // -*- HTML -*- + /* -*- mode:HTML -*- */ + // -*- font:bar;mode:HTML -*- + // -*- font:bar;mode:HTML;foo:bar; -*- + // -*-font:mode;mode:HTML-*- + // -*- foo:bar mode: html bar:baz -*- + " -*-foo:bar;mode:html;bar:foo-*- "; + " -*-font-mode:foo;mode:html;foo-bar:quux-*-" + "-*-font:x;foo:bar; mode : HTML; bar:foo;foooooo:baaaaar;fo:ba;-*-"; + "-*- font:x;foo : bar ; mode : HtML ; bar : foo ; foooooo:baaaaar;fo:ba-*-"; + ''' + for line in valid.split /\n/ + expect(grammar.firstLineRegex.scanner.findNextMatchSync(line)).not.toBeNull() + + invalid = ''' + /* --*html-*- */ + /* -*-- HTML -*- + /* -*- -- HTML -*- + /* -*- HTM -;- -*- + // -*- xHTML -*- + // -*- HTML; -*- + // -*- html-stuff -*- + /* -*- model:html -*- + /* -*- indent-mode:html -*- + // -*- font:mode;html -*- + // -*- HTimL -*- + // -*- mode: -*- HTML + // -*- mode: -html -*- + // -*-font:mode;mode:html--*- + ''' + for line in invalid.split /\n/ + expect(grammar.firstLineRegex.scanner.findNextMatchSync(line)).toBeNull() + + it 'recognises Vim modelines', -> + valid = ''' + vim: se filetype=html: + # vim: se ft=html: + # vim: set ft=HTML: + # vim: set filetype=XHTML: + # vim: ft=XHTML + # vim: syntax=HTML + # vim: se syntax=xhtml: + # ex: syntax=HTML + # vim:ft=html + # vim600: ft=xhtml + # vim>600: set ft=html: + # vi:noai:sw=3 ts=6 ft=html + # vi::::::::::noai:::::::::::: ft=html + # vim:ts=4:sts=4:sw=4:noexpandtab:ft=html + # vi:: noai : : : : sw =3 ts =6 ft =html + # vim: ts=4: pi sts=4: ft=html: noexpandtab: sw=4: + # vim: ts=4 sts=4: ft=html noexpandtab: + # vim:noexpandtab sts=4 ft=html ts=4 + # vim:noexpandtab:ft=html + # vim:ts=4:sts=4 ft=html:noexpandtab:\x20 + # vim:noexpandtab titlestring=hi\|there\\\\ ft=html ts=4 + ''' + for line in valid.split /\n/ + expect(grammar.firstLineRegex.scanner.findNextMatchSync(line)).not.toBeNull() + + invalid = ''' + ex: se filetype=html: + _vi: se filetype=HTML: + vi: se filetype=HTML + # vim set ft=html5 + # vim: soft=html + # vim: clean-syntax=html: + # vim set ft=html: + # vim: setft=HTML: + # vim: se ft=html backupdir=tmp + # vim: set ft=HTML set cmdheight=1 + # vim:noexpandtab sts:4 ft:HTML ts:4 + # vim:noexpandtab titlestring=hi\\|there\\ ft=HTML ts=4 + # vim:noexpandtab titlestring=hi\\|there\\\\\\ ft=HTML ts=4 + ''' + for line in invalid.split /\n/ + expect(grammar.firstLineRegex.scanner.findNextMatchSync(line)).toBeNull() + + describe 'tags', -> + it 'tokenizes style tags as such', -> + {tokens} = grammar.tokenizeLine '