➡️ Migrate all language packages

To make https://github.com/atom-community/atom/pull/386 reviewable,
that pr will be separated into many simpler prs. This is one of them.

This first commit does the following:
- update package.json
- update package-lock.json using `./script/build` which also seems to update `apm/package-lock.json`
- update packages/README.md
- clone all language packages. Specifically:
    - `mkdir packages/language-<all of them>`
    - `cd packages/about`
    - For all languages:
        - `cd ../language-<>`
        - `git clone language-<>`
        - Move all files except `.git` from `language-<>/language-<>`
          to `language-<>`
        - delete `language-<>/language-<>`

Since at first I accidentally updated `dependencies` then
`packageDependencies`, it appears that since the versions of language-c,
language-css, language-go, language-javascript, and language-sass don't
match, `dependencies` was reset for those versions.

[Those repos just happen to be precisely the ones that have tree-sitter v19](https://github.com/icecream17/atom-update-backlog/blob/main/Languages.md), (sans language-sass),
which [currently breaks atom](https://github.com/atom/atom/issues/22129). So even though their repos are now
in `packages`, **I've decided to not use them**.

This is done by updating `packageDependencies` only for non-breaking
languages.
This commit is contained in:
steven nguyen 2022-06-25 19:58:57 +00:00
parent 7eaebe0eea
commit 477b075db5
570 changed files with 107088 additions and 526 deletions

570
apm/package-lock.json generated

File diff suppressed because it is too large Load Diff

152
package-lock.json generated
View File

@ -1,6 +1,6 @@
{
"name": "atom",
"version": "1.61.0-dev",
"version": "1.63.0-dev",
"lockfileVersion": 1,
"requires": true,
"dependencies": {
@ -1615,7 +1615,7 @@
"atom-grammar-test": {
"version": "0.6.4",
"resolved": "https://registry.npmjs.org/atom-grammar-test/-/atom-grammar-test-0.6.4.tgz",
"integrity": "sha1-2KU1A9H+k5mX9Ji3SirDEARKfU4=",
"integrity": "sha512-oVd4NmzM95nnb/CSPuyO/YlwbhRN7rpv3UTnc5btj9RSJaI7r6GzJoFYpIjOwBviGnWHuMoZxyupEn2VXbGbZw==",
"requires": {
"chevrotain": "^0.18.0",
"escape-string-regexp": "^1.0.5"
@ -2271,7 +2271,7 @@
"chevrotain": {
"version": "0.18.0",
"resolved": "https://registry.npmjs.org/chevrotain/-/chevrotain-0.18.0.tgz",
"integrity": "sha1-sodxTjFZC64sXR4vYRZz7+xHnYA="
"integrity": "sha512-TdMrwmrzGuLta1iwC7yuDC7T3IQrI2WHjzbkY3naTX6RsQivy9aXBrTc+LuUbrs4mdefrwVHBVLW6ojn5giTZw=="
},
"chownr": {
"version": "1.1.3",
@ -4754,16 +4754,13 @@
}
},
"language-clojure": {
"version": "https://www.atom.io/api/packages/language-clojure/versions/0.22.8/tarball",
"integrity": "sha512-V9tDXCuZf53Esy3W1eUuaZW2Dq78n3KdPWkypfz3pJJ1bklgLgCWxBUGjLAY4X/ULgYjucnelhp71xwLjZZa5A=="
"version": "file:packages/language-clojure"
},
"language-coffee-script": {
"version": "https://www.atom.io/api/packages/language-coffee-script/versions/0.50.0/tarball",
"integrity": "sha512-Sp3b1i8wsd+AELphP2f52mli4C3YjicGC8ps21g48V3SrTZoM7tLE7lkcEdKddYlTqo0fBixTKN2R/iL6GcEVw=="
"version": "file:packages/language-coffee-script"
},
"language-csharp": {
"version": "https://www.atom.io/api/packages/language-csharp/versions/1.1.0/tarball",
"integrity": "sha512-37RDzLiqSHJjopFg3T/Sqntx793OMQ4o7tkf3IuKe68Mshz3mwavJhIaIX/mca+0u/aWquySJFzlZ4D8EdtRSA=="
"version": "file:packages/language-csharp"
},
"language-css": {
"version": "https://www.atom.io/api/packages/language-css/versions/0.45.1/tarball",
@ -4773,12 +4770,10 @@
}
},
"language-gfm": {
"version": "https://www.atom.io/api/packages/language-gfm/versions/0.90.8/tarball",
"integrity": "sha512-YQ13ypnfPvQTcZ/8j6cUuLsYBoU88qqPlFTRXNXa72L1HVaahFDgG0d0a/QOdOnxrYBtmEWR/5Q3FNPwPpSehw=="
"version": "file:packages/language-gfm"
},
"language-git": {
"version": "https://www.atom.io/api/packages/language-git/versions/0.19.1/tarball",
"integrity": "sha512-xvsGO/d3/XsKJmwdAz9VGHo6t7A13VuJeuEoZaoLmvzwkVpFdpJcK8PNwVMPHav+lpNeu73qiXmqS+YIlvLwLQ=="
"version": "file:packages/language-git"
},
"language-go": {
"version": "https://www.atom.io/api/packages/language-go/versions/0.47.2/tarball",
@ -4788,8 +4783,7 @@
}
},
"language-html": {
"version": "https://www.atom.io/api/packages/language-html/versions/0.53.1/tarball",
"integrity": "sha512-/GFk8qHnQ67E/+RZs1my117VKPIAsfUNrDg+7EU+HlCx8qnEnV7lBRaWedh0AoDDGtaMm2wmuhTM/1eGNcDJ8Q==",
"version": "file:packages/language-html",
"requires": {
"atom-grammar-test": "^0.6.3",
"tree-sitter-embedded-template": "^0.15.2",
@ -4797,12 +4791,10 @@
}
},
"language-hyperlink": {
"version": "https://www.atom.io/api/packages/language-hyperlink/versions/0.17.1/tarball",
"integrity": "sha512-bntgT5AVqSbWZpjjiGbKVfzjocWHgDLbfAnECKkk87owjlMeuzbZaylI+HRdbVxPMt9K1UdFRVT/NUaia+A3+g=="
"version": "file:packages/language-hyperlink"
},
"language-java": {
"version": "https://www.atom.io/api/packages/language-java/versions/0.32.1/tarball",
"integrity": "sha512-CzS8Tr2uL93SElx/P6eZCDbxnGdBq9EBimFezXWWop+IgmYPNaNFS3d2kFUXgSNY3bvNV9ezpR7xSIZteFpisQ==",
"version": "file:packages/language-java",
"requires": {
"tree-sitter-java-dev": "^0.16.0-dev2"
}
@ -4817,58 +4809,47 @@
}
},
"language-json": {
"version": "https://www.atom.io/api/packages/language-json/versions/1.0.5/tarball",
"integrity": "sha512-n4kpZ0Z3Yju2qnqoGvYXgQJF2HdR21qlrLrZ66CmsAPI7Ttw0xgXbVHBNHaHIWlH3lQT30p472cNsYlQl3pdNA==",
"version": "file:packages/language-json",
"requires": {
"tree-sitter-json": "^0.15.1"
}
},
"language-less": {
"version": "https://www.atom.io/api/packages/language-less/versions/0.34.3/tarball",
"integrity": "sha512-x1sDaJKCIQuLufevH9dt9XET3zfKaXudF1RMq05D9OpQBnhi34qRlG/jgI1khykOUn/NuhSsb5ZJtixj0oy+bA=="
"version": "file:packages/language-less"
},
"language-make": {
"version": "https://www.atom.io/api/packages/language-make/versions/0.23.0/tarball",
"integrity": "sha512-kNY6n/0eTu6398rIQHwaXC1+Rsq9a3TZrMd+KVNPoJJh33GnMocjPxEempZ6jAOL5fa+hxb8ESiUOcQlEm9hyA=="
"version": "file:packages/language-make"
},
"language-mustache": {
"version": "https://www.atom.io/api/packages/language-mustache/versions/0.14.5/tarball",
"integrity": "sha512-1aC1OAoYye+krEJ8t5RzXiLYTEA/RJ/Igv1efDsuxvZHnIkdrSDzS/UsssS3snqPkIGyLI+htRvU/v11famx6A=="
"version": "file:packages/language-mustache"
},
"language-objective-c": {
"version": "https://www.atom.io/api/packages/language-objective-c/versions/0.16.0/tarball",
"integrity": "sha512-KFkmXxNuTL2zwL8mfIF9PovRaWUOu/rWPp/fDjSgXPgClXUWeJdZQystXODr6u7kvGYEAdmjYFj/zQu7f/P85Q=="
"version": "file:packages/language-objective-c"
},
"language-perl": {
"version": "https://www.atom.io/api/packages/language-perl/versions/0.38.1/tarball",
"integrity": "sha512-XXHULyFvbxAiRoj+MxIXoeO//in3bQctHZbaD72p3vFxm3klxe2ebx7b3cFmFYqf/g0eajmLrR3tR5m1Rmz1XQ=="
"version": "file:packages/language-perl"
},
"language-php": {
"version": "https://www.atom.io/api/packages/language-php/versions/0.48.1/tarball",
"integrity": "sha512-E943QBnu4Z9PVEHdXVeY/XSWVopYnoB1Pr43RJHX3r9Xnd/slFNamBzVTIHazMqGM/33PSjaNmpeQFEfgRtHwQ=="
"version": "file:packages/language-php"
},
"language-property-list": {
"version": "https://www.atom.io/api/packages/language-property-list/versions/0.9.1/tarball",
"integrity": "sha512-HD6HI41u57i0/Tu9catiriURhJsef0RDrzJDkGDtdFkE9F9KPxC9Fayq2JBLJrhIyADRVXFxwxsfwQ2Jmh6hxg=="
"version": "file:packages/language-property-list"
},
"language-python": {
"version": "https://www.atom.io/api/packages/language-python/versions/0.53.6/tarball",
"integrity": "sha512-QLAajhoCNaDvWPE8qw/v0T0yMQCMavu5P0ZkJXTOuVzG3hj4W60F87PFYTgwSHa61KpXGvUA1kiGibeQbxytGA==",
"version": "file:packages/language-python",
"requires": {
"atom-grammar-test": "^0.6.4",
"tree-sitter-python": "^0.17.0"
}
},
"language-ruby": {
"version": "https://www.atom.io/api/packages/language-ruby/versions/0.73.0/tarball",
"integrity": "sha512-dbqBGWUBHyzXStRiZNWR/Dx85Co3ecQvF9IWjngAcWdFsye1zrUWAdhSLOU8FvYQnP2jBgE2EmQQO+jSCG+T4Q==",
"version": "file:packages/language-ruby",
"requires": {
"tree-sitter-ruby": "^0.17.0"
}
},
"language-ruby-on-rails": {
"version": "https://www.atom.io/api/packages/language-ruby-on-rails/versions/0.25.3/tarball",
"integrity": "sha512-uI4ItSsq1J0/5gBblVgLv69C8TzWMcAoL19H8iFuosWWDRUsh9va1PrPMLeSNnNbnOYkw2fE53fqLlJjrgxiGw=="
"version": "file:packages/language-ruby-on-rails"
},
"language-rust-bundled": {
"version": "file:packages/language-rust-bundled",
@ -4887,50 +4868,40 @@
}
},
"language-sass": {
"version": "https://www.atom.io/api/packages/language-sass/versions/0.62.1/tarball",
"integrity": "sha512-6UIvd6scZY06JE2X9INQzLHu3KOHnPOU16teD2MhsY3yU8OGExEtZRkY93G4OwUQN9GB2keeF70X1O7LX6FZSg=="
"version": "file:packages/language-sass"
},
"language-shellscript": {
"version": "https://www.atom.io/api/packages/language-shellscript/versions/0.28.2/tarball",
"integrity": "sha512-YAbcijqWa07DSn6HXlV5KSJ/8nMBpT+DteEwOK2A4vXSSFc0phUMR+LcPcjVB5599OZkX4aB42DqjKHUT9LMtQ==",
"version": "file:packages/language-shellscript",
"requires": {
"tree-sitter-bash": "^0.16.1"
}
},
"language-source": {
"version": "https://www.atom.io/api/packages/language-source/versions/0.9.0/tarball",
"integrity": "sha512-Uu/C5EQKdKgwUOiCWM95CkCUePhT93KpiqsrVqEgTV1TssLY/LRwT9fd1XJSZ5EDKSS71Tfzvbww/V117uoDWw=="
"version": "file:packages/language-source"
},
"language-sql": {
"version": "https://www.atom.io/api/packages/language-sql/versions/0.25.10/tarball",
"integrity": "sha512-JXlwc9wV0qnhLn2fe3xRSNghxy/MtmCgy5+6xXN3Dqr9f6Q9Jh4vy3Kwrhz4xSgpPcHMocQwS72JcFuTI9CRdw=="
"version": "file:packages/language-sql"
},
"language-text": {
"version": "https://www.atom.io/api/packages/language-text/versions/0.7.4/tarball",
"integrity": "sha512-XPmROjdb8CvAznbyiDYNeJi0hKZegBA84bAyTSt/FbZR0enexxk+5NDlyjqYsmR7A1P+LtcMJJZdQYPgXr7mdw=="
"version": "file:packages/language-text"
},
"language-todo": {
"version": "https://www.atom.io/api/packages/language-todo/versions/0.29.4/tarball",
"integrity": "sha512-mdSeM6hR7D9ZohrfMTA9wDH46MQbcbfTMxU5WpzYwvQXAvYEZyuhc2dzWZ827VsSOrUcOcAYVcOvTkTrx9nytg=="
"version": "file:packages/language-todo"
},
"language-toml": {
"version": "https://www.atom.io/api/packages/language-toml/versions/0.20.0/tarball",
"integrity": "sha512-6xFDqM6nZpynmxGKUS85iUWY0yeub7GYvLyzSOqDejMuOL5UXAITnSNcb7jhr+hQA8KTj5dCmRjphkAQER4Ucg=="
"version": "file:packages/language-toml"
},
"language-typescript": {
"version": "https://www.atom.io/api/packages/language-typescript/versions/0.6.3/tarball",
"integrity": "sha512-F/ZnFXEF7C14/8JQ3T1kiCKVff+AB043LE5i0k3m86YsVl6IrjK6ElBNu5TsmUd7Se3STmqPfjn0Pf3280AZmg==",
"version": "file:packages/language-typescript",
"requires": {
"tree-sitter-typescript": "^0.16.1"
}
},
"language-xml": {
"version": "https://www.atom.io/api/packages/language-xml/versions/0.35.3/tarball",
"integrity": "sha512-9fh1pwCSikEdHoOGprBr7xeO2lq8GuOwSRsN3dwJKGTvzFaji2Zh6KkgxHBEOh2spsc8ORT+THZ+h6hhHz+ckQ=="
"version": "file:packages/language-xml"
},
"language-yaml": {
"version": "https://www.atom.io/api/packages/language-yaml/versions/0.32.0/tarball",
"integrity": "sha512-kx6Qj//j3PuFaf8yhlfPGdirRJ3NVvLJw+9Oi2Gg998K6vG/XecgvwyP5jVs4xExX8eYMOTlvN7n6dgkPf6LHQ=="
"version": "file:packages/language-yaml"
},
"lazy-cache": {
"version": "1.0.4",
@ -7997,9 +7968,9 @@
},
"dependencies": {
"bl": {
"version": "4.0.3",
"resolved": "https://registry.npmjs.org/bl/-/bl-4.0.3.tgz",
"integrity": "sha512-fs4G6/Hu4/EE+F75J8DuN/0IpQqNjAdC7aEQv7Qt8MHGUH7Ckv2MwTEEeN9QehD0pfIDkMI1bkHYkKy7xHyKIg==",
"version": "4.1.0",
"resolved": "https://registry.npmjs.org/bl/-/bl-4.1.0.tgz",
"integrity": "sha512-1W07cM9gS6DcLperZfFSj+bWLtaPGSOHWhPiGzXmvVJbRLdG82sH/Kn8EtW1VqWVA54AKf2h5k5BbnIbwF3h6w==",
"requires": {
"buffer": "^5.5.0",
"inherits": "^2.0.4",
@ -8020,9 +7991,9 @@
"integrity": "sha512-wXqjST+SLt7R009ySCglWBCFpjUygmCIfD790/kVbiGmUgfYGuB14PiTd5DwVxSV4NcYHjzMkoj5LjQZwTQLEA=="
},
"minimist": {
"version": "1.2.5",
"resolved": "https://registry.npmjs.org/minimist/-/minimist-1.2.5.tgz",
"integrity": "sha512-FM9nNUYrRBAELZQT3xeZQ7fmMOBg6nWNmJKTcgsJeaLstP/UODVpGsr5OhXhhXg6f+qtJ8uiZ+PUxkDWcgIXLw=="
"version": "1.2.6",
"resolved": "https://registry.npmjs.org/minimist/-/minimist-1.2.6.tgz",
"integrity": "sha512-Jsjnk4bw3YJqYzbdyBiNsPWHPfO++UGG749Cxs6peCu5Xg4nrena6OVxOYxrQTqww0Jmwt+Ref8rggumkTLz9Q=="
},
"prebuild-install": {
"version": "5.3.6",
@ -8046,15 +8017,6 @@
"which-pm-runs": "^1.0.0"
}
},
"pump": {
"version": "3.0.0",
"resolved": "https://registry.npmjs.org/pump/-/pump-3.0.0.tgz",
"integrity": "sha512-LwZy+p3SFs1Pytd/jYct4wpv49HiYCqd9Rlc5ZVdk0V+8Yzv6jR5Blk3TRmPL1ft69TxP0IMZGJ+WPFU2BFhww==",
"requires": {
"end-of-stream": "^1.1.0",
"once": "^1.3.1"
}
},
"readable-stream": {
"version": "3.6.0",
"resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-3.6.0.tgz",
@ -8066,9 +8028,9 @@
}
},
"simple-get": {
"version": "3.1.0",
"resolved": "https://registry.npmjs.org/simple-get/-/simple-get-3.1.0.tgz",
"integrity": "sha512-bCR6cP+aTdScaQCnQKbPKtJOKDp/hj9EDLJo3Nw4y1QksqaovlW/bnptB6/c1e+qmNIDHRK+oXFDdEqBT8WzUA==",
"version": "3.1.1",
"resolved": "https://registry.npmjs.org/simple-get/-/simple-get-3.1.1.tgz",
"integrity": "sha512-CQ5LTKGfCpvE1K0n2us+kuMPbk/q0EKl82s4aheV9oXjFEz6W/Y7oQFVJuU6QG77hRT4Ghb5RURteF5vnWjupA==",
"requires": {
"decompress-response": "^4.2.0",
"once": "^1.3.1",
@ -8076,20 +8038,20 @@
}
},
"tar-fs": {
"version": "2.1.0",
"resolved": "https://registry.npmjs.org/tar-fs/-/tar-fs-2.1.0.tgz",
"integrity": "sha512-9uW5iDvrIMCVpvasdFHW0wJPez0K4JnMZtsuIeDI7HyMGJNxmDZDOCQROr7lXyS+iL/QMpj07qcjGYTSdRFXUg==",
"version": "2.1.1",
"resolved": "https://registry.npmjs.org/tar-fs/-/tar-fs-2.1.1.tgz",
"integrity": "sha512-V0r2Y9scmbDRLCNex/+hYzvp/zyYjvFbHPNgVTKfQvVrb6guiE/fxP+XblDNR011utopbkex2nM4dHNV6GDsng==",
"requires": {
"chownr": "^1.1.1",
"mkdirp-classic": "^0.5.2",
"pump": "^3.0.0",
"tar-stream": "^2.0.0"
"tar-stream": "^2.1.4"
}
},
"tar-stream": {
"version": "2.1.4",
"resolved": "https://registry.npmjs.org/tar-stream/-/tar-stream-2.1.4.tgz",
"integrity": "sha512-o3pS2zlG4gxr67GmFYBLlq+dM8gyRGUOvsrHclSkvtVtQbjV0s/+ZE8OpICbaj8clrX3tjeHngYGP7rweaBnuw==",
"version": "2.2.0",
"resolved": "https://registry.npmjs.org/tar-stream/-/tar-stream-2.2.0.tgz",
"integrity": "sha512-ujeqbceABgwMZxEJnk2HDY2DlnUZ+9oEcb1KzTVfYHio0UE6dG71n60d8D2I4qNvleWrrXpmjpt7vZeF1LnMZQ==",
"requires": {
"bl": "^4.0.3",
"end-of-stream": "^1.4.1",
@ -8125,9 +8087,9 @@
},
"dependencies": {
"nan": {
"version": "2.15.0",
"resolved": "https://registry.npmjs.org/nan/-/nan-2.15.0.tgz",
"integrity": "sha512-8ZtvEnA2c5aYCZYd1cvgdnU6cqwixRoYg70xPLWUws5ORTa/lnw+u4amixRS/Ac5U5mQVgp9pnlSUnbNWFaWZQ=="
"version": "2.16.0",
"resolved": "https://registry.npmjs.org/nan/-/nan-2.16.0.tgz",
"integrity": "sha512-UdAqHyFngu7TfQKsCBgAA6pWDkT8MAO7d0jyOecVhN5354xbLqdn8mV9Tat9gepAupm0bt2DbeaSC8vS52MuFA=="
}
}
},
@ -8236,9 +8198,9 @@
"integrity": "sha512-wXqjST+SLt7R009ySCglWBCFpjUygmCIfD790/kVbiGmUgfYGuB14PiTd5DwVxSV4NcYHjzMkoj5LjQZwTQLEA=="
},
"minimist": {
"version": "1.2.5",
"resolved": "https://registry.npmjs.org/minimist/-/minimist-1.2.5.tgz",
"integrity": "sha512-FM9nNUYrRBAELZQT3xeZQ7fmMOBg6nWNmJKTcgsJeaLstP/UODVpGsr5OhXhhXg6f+qtJ8uiZ+PUxkDWcgIXLw=="
"version": "1.2.6",
"resolved": "https://registry.npmjs.org/minimist/-/minimist-1.2.6.tgz",
"integrity": "sha512-Jsjnk4bw3YJqYzbdyBiNsPWHPfO++UGG749Cxs6peCu5Xg4nrena6OVxOYxrQTqww0Jmwt+Ref8rggumkTLz9Q=="
},
"prebuild-install": {
"version": "5.3.6",
@ -8308,9 +8270,9 @@
}
},
"tree-sitter-typescript": {
"version": "0.16.1",
"resolved": "https://registry.npmjs.org/tree-sitter-typescript/-/tree-sitter-typescript-0.16.1.tgz",
"integrity": "sha512-jyU5yl4W6JPn66v2YbzaO1ClDcdDnj+7YQNZz3STgEiUooSjpWI1Ucgw+S/qEGbf0fMXsC0fucpP+/M1uc9ubw==",
"version": "0.16.3",
"resolved": "https://registry.npmjs.org/tree-sitter-typescript/-/tree-sitter-typescript-0.16.3.tgz",
"integrity": "sha512-qdRydjlnFuxwlkE/+oqOywzcKL2l3G1xkhR9DxDySGfF4JiMdYCTqJCWRUYaGnagJDZBF7wGWtHf5FGGXdLjNw==",
"requires": {
"nan": "^2.14.0"
}

View File

@ -82,39 +82,39 @@
"key-path-helpers": "^0.4.0",
"keybinding-resolver": "https://www.atom.io/api/packages/keybinding-resolver/versions/0.39.1/tarball",
"language-c": "https://www.atom.io/api/packages/language-c/versions/0.60.19/tarball",
"language-clojure": "https://www.atom.io/api/packages/language-clojure/versions/0.22.8/tarball",
"language-coffee-script": "https://www.atom.io/api/packages/language-coffee-script/versions/0.50.0/tarball",
"language-csharp": "https://www.atom.io/api/packages/language-csharp/versions/1.1.0/tarball",
"language-clojure": "file:packages/language-clojure",
"language-coffee-script": "file:packages/language-coffee-script",
"language-csharp": "file:packages/language-csharp",
"language-css": "https://www.atom.io/api/packages/language-css/versions/0.45.1/tarball",
"language-gfm": "https://www.atom.io/api/packages/language-gfm/versions/0.90.8/tarball",
"language-git": "https://www.atom.io/api/packages/language-git/versions/0.19.1/tarball",
"language-gfm": "file:packages/language-gfm",
"language-git": "file:packages/language-git",
"language-go": "https://www.atom.io/api/packages/language-go/versions/0.47.2/tarball",
"language-html": "https://www.atom.io/api/packages/language-html/versions/0.53.1/tarball",
"language-hyperlink": "https://www.atom.io/api/packages/language-hyperlink/versions/0.17.1/tarball",
"language-java": "https://www.atom.io/api/packages/language-java/versions/0.32.1/tarball",
"language-html": "file:packages/language-html",
"language-hyperlink": "file:packages/language-hyperlink",
"language-java": "file:packages/language-java",
"language-javascript": "https://www.atom.io/api/packages/language-javascript/versions/0.134.1/tarball",
"language-json": "https://www.atom.io/api/packages/language-json/versions/1.0.5/tarball",
"language-less": "https://www.atom.io/api/packages/language-less/versions/0.34.3/tarball",
"language-make": "https://www.atom.io/api/packages/language-make/versions/0.23.0/tarball",
"language-mustache": "https://www.atom.io/api/packages/language-mustache/versions/0.14.5/tarball",
"language-objective-c": "https://www.atom.io/api/packages/language-objective-c/versions/0.16.0/tarball",
"language-perl": "https://www.atom.io/api/packages/language-perl/versions/0.38.1/tarball",
"language-php": "https://www.atom.io/api/packages/language-php/versions/0.48.1/tarball",
"language-property-list": "https://www.atom.io/api/packages/language-property-list/versions/0.9.1/tarball",
"language-python": "https://www.atom.io/api/packages/language-python/versions/0.53.6/tarball",
"language-ruby": "https://www.atom.io/api/packages/language-ruby/versions/0.73.0/tarball",
"language-ruby-on-rails": "https://www.atom.io/api/packages/language-ruby-on-rails/versions/0.25.3/tarball",
"language-json": "file:packages/language-json",
"language-less": "file:packages/language-less",
"language-make": "file:packages/language-make",
"language-mustache": "file:packages/language-mustache",
"language-objective-c": "file:packages/language-objective-c",
"language-perl": "file:packages/language-perl",
"language-php": "file:packages/language-php",
"language-property-list": "file:packages/language-property-list",
"language-python": "file:packages/language-python",
"language-ruby": "file:packages/language-ruby",
"language-ruby-on-rails": "file:packages/language-ruby-on-rails",
"language-rust-bundled": "file:packages/language-rust-bundled",
"language-sass": "https://www.atom.io/api/packages/language-sass/versions/0.62.1/tarball",
"language-shellscript": "https://www.atom.io/api/packages/language-shellscript/versions/0.28.2/tarball",
"language-source": "https://www.atom.io/api/packages/language-source/versions/0.9.0/tarball",
"language-sql": "https://www.atom.io/api/packages/language-sql/versions/0.25.10/tarball",
"language-text": "https://www.atom.io/api/packages/language-text/versions/0.7.4/tarball",
"language-todo": "https://www.atom.io/api/packages/language-todo/versions/0.29.4/tarball",
"language-toml": "https://www.atom.io/api/packages/language-toml/versions/0.20.0/tarball",
"language-typescript": "https://www.atom.io/api/packages/language-typescript/versions/0.6.3/tarball",
"language-xml": "https://www.atom.io/api/packages/language-xml/versions/0.35.3/tarball",
"language-yaml": "https://www.atom.io/api/packages/language-yaml/versions/0.32.0/tarball",
"language-sass": "file:packages/language-sass",
"language-shellscript": "file:packages/language-shellscript",
"language-source": "file:packages/language-source",
"language-sql": "file:packages/language-sql",
"language-text": "file:packages/language-text",
"language-todo": "file:packages/language-todo",
"language-toml": "file:packages/language-toml",
"language-typescript": "file:packages/language-typescript",
"language-xml": "file:packages/language-xml",
"language-yaml": "file:packages/language-yaml",
"less-cache": "1.1.0",
"line-ending-selector": "file:packages/line-ending-selector",
"line-top-index": "0.3.1",
@ -233,39 +233,39 @@
"whitespace": "0.37.8",
"wrap-guide": "0.41.0",
"language-c": "0.60.19",
"language-clojure": "0.22.8",
"language-coffee-script": "0.50.0",
"language-csharp": "1.1.0",
"language-clojure": "file:./packages/language-clojure",
"language-coffee-script": "file:./packages/language-coffee-script",
"language-csharp": "file:./packages/language-csharp",
"language-css": "0.45.1",
"language-gfm": "0.90.8",
"language-git": "0.19.1",
"language-gfm": "file:./packages/language-gfm",
"language-git": "file:./packages/language-git",
"language-go": "0.47.2",
"language-html": "0.53.1",
"language-hyperlink": "0.17.1",
"language-java": "0.32.1",
"language-html": "file:./packages/language-html",
"language-hyperlink": "file:./packages/language-hyperlink",
"language-java": "file:./packages/language-java",
"language-javascript": "0.134.1",
"language-json": "1.0.5",
"language-less": "0.34.3",
"language-make": "0.23.0",
"language-mustache": "0.14.5",
"language-objective-c": "0.16.0",
"language-perl": "0.38.1",
"language-php": "0.48.1",
"language-property-list": "0.9.1",
"language-python": "0.53.6",
"language-ruby": "0.73.0",
"language-ruby-on-rails": "0.25.3",
"language-json": "file:./packages/language-json",
"language-less": "file:./packages/language-less",
"language-make": "file:./packages/language-make",
"language-mustache": "file:./packages/language-mustache",
"language-objective-c": "file:./packages/language-objective-c",
"language-perl": "file:./packages/language-perl",
"language-php": "file:./packages/language-php",
"language-property-list": "file:./packages/language-property-list",
"language-python": "file:./packages/language-python",
"language-ruby": "file:./packages/language-ruby",
"language-ruby-on-rails": "file:./packages/language-ruby-on-rails",
"language-rust-bundled": "file:./packages/language-rust-bundled",
"language-sass": "0.62.1",
"language-shellscript": "0.28.2",
"language-source": "0.9.0",
"language-sql": "0.25.10",
"language-text": "0.7.4",
"language-todo": "0.29.4",
"language-toml": "0.20.0",
"language-typescript": "0.6.3",
"language-xml": "0.35.3",
"language-yaml": "0.32.0"
"language-sass": "file:./packages/language-sass",
"language-shellscript": "file:./packages/language-shellscript",
"language-source": "file:./packages/language-source",
"language-sql": "file:./packages/language-sql",
"language-text": "file:./packages/language-text",
"language-todo": "file:./packages/language-todo",
"language-toml": "file:./packages/language-toml",
"language-typescript": "file:./packages/language-typescript",
"language-xml": "file:./packages/language-xml",
"language-yaml": "file:./packages/language-yaml"
},
"private": true,
"scripts": {

View File

@ -40,40 +40,40 @@ See [RFC 003](https://github.com/atom/atom/blob/master/docs/rfcs/003-consolidate
| **image-view** | [`atom/image-view`][image-view] | [#18274](https://github.com/atom/atom/issues/18274) |
| **incompatible-packages** | [`./incompatible-packages`](./incompatible-packages) | [#17846](https://github.com/atom/atom/issues/17846) |
| **keybinding-resolver** | [`atom/keybinding-resolver`][keybinding-resolver] | [#18275](https://github.com/atom/atom/issues/18275) |
| **language-c** | [`atom/language-c`][language-c] | |
| **language-clojure** | [`atom/language-clojure`][language-clojure] | |
| **language-coffee-script** | [`atom/language-coffee-script`][language-coffee-script] | |
| **language-csharp** | [`atom/language-csharp`][language-csharp] | |
| **language-css** | [`atom/language-css`][language-css] | |
| **language-gfm** | [`atom/language-gfm`][language-gfm] | |
| **language-git** | [`atom/language-git`][language-git] | |
| **language-go** | [`atom/language-go`][language-go] | |
| **language-html** | [`atom/language-html`][language-html] | |
| **language-hyperlink** | [`atom/language-hyperlink`][language-hyperlink] | |
| **language-java** | [`atom/language-java`][language-java] | |
| **language-javascript** | [`atom/language-javascript`][language-javascript] | |
| **language-json** | [`atom/language-json`][language-json] | |
| **language-less** | [`atom/language-less`][language-less] | |
| **language-make** | [`atom/language-make`][language-make] | |
| **language-mustache** | [`atom/language-mustache`][language-mustache] | |
| **language-objective-c** | [`atom/language-objective-c`][language-objective-c] | |
| **language-perl** | [`atom/language-perl`][language-perl] | |
| **language-php** | [`atom/language-php`][language-php] | |
| **language-property-list** | [`atom/language-property-list`][language-property-list] | |
| **language-python** | [`atom/language-python`][language-python] | |
| **language-ruby** | [`atom/language-ruby`][language-ruby] | |
| **language-ruby-on-rails** | [`atom/language-ruby-on-rails`][language-ruby-on-rails] | |
| **language-c** | [`atom/language-c`](./language-c) | |
| **language-clojure** | [`atom/language-clojure`](./language-clojure) | |
| **language-coffee-script** | [`atom/language-coffee-script`](./language-coffee-script) | |
| **language-csharp** | [`atom/language-csharp`](./language-csharp) | |
| **language-css** | [`atom/language-css`](./language-css) | |
| **language-gfm** | [`atom/language-gfm`](./language-gfm) | |
| **language-git** | [`atom/language-git`](./language-git) | |
| **language-go** | [`atom/language-go`](./language-go) | |
| **language-html** | [`atom/language-html`](./language-html) | |
| **language-hyperlink** | [`atom/language-hyperlink`](./language-hyperlink) | |
| **language-java** | [`atom/language-java`](./language-java) | |
| **language-javascript** | [`atom/language-javascript`](./language-javascript) | |
| **language-json** | [`atom/language-json`](./language-json) | |
| **language-less** | [`atom/language-less`](./language-less) | |
| **language-make** | [`atom/language-make`](./language-make) | |
| **language-mustache** | [`atom/language-mustache`](./language-mustache) | |
| **language-objective-c** | [`atom/language-objective-c`](./language-objective-c) | |
| **language-perl** | [`atom/language-perl`](./language-perl) | |
| **language-php** | [`atom/language-php`](./language-php) | |
| **language-property-list** | [`atom/language-property-list`](./language-property-list) | |
| **language-python** | [`atom/language-python`](./language-python) | |
| **language-ruby** | [`atom/language-ruby`](./language-ruby) | |
| **language-ruby-on-rails** | [`atom/language-ruby-on-rails`](./language-ruby-on-rails) | |
| **language-rust-bundled** | [`./language-rust-bundled`](./language-rust-bundled) | |
| **language-sass** | [`atom/language-sass`][language-sass] | |
| **language-shellscript** | [`atom/language-shellscript`][language-shellscript] | |
| **language-source** | [`atom/language-source`][language-source] | |
| **language-sql** | [`atom/language-sql`][language-sql] | |
| **language-text** | [`atom/language-text`][language-text] | |
| **language-todo** | [`atom/language-todo`][language-todo] | |
| **language-toml** | [`atom/language-toml`][language-toml] | |
| **language-typescript** | [`atom/language-typescript`][language-typescript] | |
| **language-xml** | [`atom/language-xml`][language-xml] | |
| **language-yaml** | [`atom/language-yaml`][language-yaml] | |
| **language-sass** | [`atom/language-sass`](./language-sass) | |
| **language-shellscript** | [`atom/language-shellscript`](./language-shellscript) | |
| **language-source** | [`atom/language-source`](./language-source) | |
| **language-sql** | [`atom/language-sql`](./language-sql) | |
| **language-text** | [`atom/language-text`](./language-text) | |
| **language-todo** | [`atom/language-todo`](./language-todo) | |
| **language-toml** | [`atom/language-toml`](./language-toml) | |
| **language-typescript** | [`atom/language-typescript`](./language-typescript) | |
| **language-xml** | [`atom/language-xml`](./language-xml) | |
| **language-yaml** | [`atom/language-yaml`](./language-yaml) | |
| **line-ending-selector** | [`./packages/line-ending-selector`](./line-ending-selector) | [#17847](https://github.com/atom/atom/issues/17847) |
| **link** | [`./link`](./link) | [#17848](https://github.com/atom/atom/issues/17848) |
| **markdown-preview** | [`atom/markdown-preview`][markdown-preview] | |

View File

@ -0,0 +1,15 @@
# Configuration for probot-no-response - https://github.com/probot/no-response
# Number of days of inactivity before an issue is closed for lack of response
daysUntilClose: 28
# Label requiring a response
responseRequiredLabel: more-information-needed
# Comment to post when closing an issue for lack of response. Set to `false` to disable.
closeComment: >
This issue has been automatically closed because there has been no response
to our request for more information from the original author. With only the
information that is currently in the issue, we don't have enough information
to take action. Please reach out if you have or find the answers we need so
that we can investigate further.

View File

@ -0,0 +1,27 @@
name: CI
on: [push]
env:
CI: true
jobs:
Test:
strategy:
matrix:
os: [ubuntu-latest, macos-latest, windows-latest]
channel: [stable, beta]
runs-on: ${{ matrix.os }}
steps:
- uses: actions/checkout@v1
- uses: UziTech/action-setup-atom@v2
with:
version: ${{ matrix.channel }}
- name: Install windows-build-tools
if: ${{ matrix.os == 'windows-latest' }}
run: |
npm i windows-build-tools@4.0.0
- name: Install dependencies
run: apm install
- name: Run tests
run: atom --test spec

1
packages/language-c/.gitignore vendored Normal file
View File

@ -0,0 +1 @@
node_modules

View File

@ -0,0 +1 @@
See the [Atom contributing guide](https://github.com/atom/atom/blob/master/CONTRIBUTING.md)

View File

@ -0,0 +1,40 @@
<!--
Have you read Atom's Code of Conduct? By filing an Issue, you are expected to comply with it, including treating everyone with respect: https://github.com/atom/atom/blob/master/CODE_OF_CONDUCT.md
Do you want to ask a question? Are you looking for support? The Atom message board is the best place for getting support: https://discuss.atom.io
-->
### Prerequisites
* [ ] Put an X between the brackets on this line if you have done all of the following:
* Reproduced the problem in Safe Mode: http://flight-manual.atom.io/hacking-atom/sections/debugging/#using-safe-mode
* Followed all applicable steps in the debugging guide: http://flight-manual.atom.io/hacking-atom/sections/debugging/
* Checked the FAQs on the message board for common solutions: https://discuss.atom.io/c/faq
* Checked that your issue isn't already filed: https://github.com/issues?utf8=✓&q=is%3Aissue+user%3Aatom
* Checked that there is not already an Atom package that provides the described functionality: https://atom.io/packages
### Description
[Description of the issue]
### Steps to Reproduce
1. [First Step]
2. [Second Step]
3. [and so on...]
**Expected behavior:** [What you expect to happen]
**Actual behavior:** [What actually happens]
**Reproduces how often:** [What percentage of the time does it reproduce?]
### Versions
You can get this information from copy and pasting the output of `atom --version` and `apm --version` from the command line. Also, please include the OS and what version of the OS you're running.
### Additional Information
Any additional information, configuration or data that might be necessary to reproduce the issue.

View File

@ -0,0 +1,31 @@
Copyright (c) 2014 GitHub Inc.
Permission is hereby granted, free of charge, to any person obtaining
a copy of this software and associated documentation files (the
"Software"), to deal in the Software without restriction, including
without limitation the rights to use, copy, modify, merge, publish,
distribute, sublicense, and/or sell copies of the Software, and to
permit persons to whom the Software is furnished to do so, subject to
the following conditions:
The above copyright notice and this permission notice shall be
included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
--------------------------------------------------------------------
This package was derived from a TextMate bundle located at
https://github.com/textmate/c.tmbundle and distributed under the following
license, located in `README.mdown`:
Permission to copy, use, modify, sell and distribute this
software is granted. This software is provided "as is" without
express or implied warranty, and with no claim as to its
suitability for any purpose.

View File

@ -0,0 +1,28 @@
### Requirements
* Filling out the template is required. Any pull request that does not include enough information to be reviewed in a timely manner may be closed at the maintainers' discretion.
* All new code requires tests to ensure against regressions
### Description of the Change
<!--
We must be able to understand the design of your change from this description. If we can't get a good idea of what the code will be doing from the description here, the pull request may be closed at the maintainers' discretion. Keep in mind that the maintainer reviewing this PR may not be familiar with or have worked with the code here recently, so please walk us through the concepts.
-->
### Alternate Designs
<!-- Explain what other alternates were considered and why the proposed version was selected -->
### Benefits
<!-- What benefits will be realized by the code change? -->
### Possible Drawbacks
<!-- What are the possible side-effects or negative impacts of the code change? -->
### Applicable Issues
<!-- Enter any applicable Issues here -->

View File

@ -0,0 +1,10 @@
# C/C++ language support in Atom
![CI Status](https://github.com/atom/language-c/actions/workflows/main.yml/badge.svg)
Adds syntax highlighting and snippets to C/C++ files in Atom.
Originally [converted](http://flight-manual.atom.io/hacking-atom/sections/converting-from-textmate)
from the [C TextMate bundle](https://github.com/textmate/c.tmbundle).
Contributions are greatly appreciated. Please fork this repository and open a
pull request to add snippets, make grammar tweaks, etc.

View File

@ -0,0 +1,37 @@
{
"max_line_length": {
"level": "ignore"
},
"no_empty_param_list": {
"level": "error"
},
"arrow_spacing": {
"level": "error"
},
"no_interpolation_in_single_quotes": {
"level": "error"
},
"no_debugger": {
"level": "error"
},
"prefer_english_operator": {
"level": "error"
},
"colon_assignment_spacing": {
"spacing": {
"left": 0,
"right": 1
},
"level": "error"
},
"braces_spacing": {
"spaces": 0,
"level": "error"
},
"spacing_after_comma": {
"level": "error"
},
"no_stand_alone_at": {
"level": "error"
}
}

View File

@ -0,0 +1,424 @@
'scopeName': 'source.cpp'
'fileTypes': [
'cc'
'cpp'
'cp'
'cxx'
'c++'
'cu'
'cuh'
'h'
'hh'
'hpp'
'hxx'
'h++'
'inl'
'ino'
'ipp'
'tcc'
'tpp'
]
'firstLineMatch': '(?i)-\\*-[^*]*(Mode:\\s*)?C\\+\\+(\\s*;.*?)?\\s*-\\*-'
'name': 'C++'
'patterns': [
{
'include': '#special_block'
}
{
'include': '#strings'
}
{
'match': '\\b(friend|explicit|virtual|override|final|noexcept)\\b'
'name': 'storage.modifier.cpp'
}
{
'match': '\\b(private:|protected:|public:)'
'name': 'storage.modifier.cpp'
}
{
'match': '\\b(catch|operator|try|throw|using)\\b'
'name': 'keyword.control.cpp'
}
{
'match': '\\bdelete\\b(\\s*\\[\\])?|\\bnew\\b(?!])'
'name': 'keyword.control.cpp'
}
{
# Common naming idiom for C++ instanced vars: "fMemberName"
'match': '\\b(f|m)[A-Z]\\w*\\b'
'name': 'variable.other.readwrite.member.cpp'
}
{
'match': '\\bthis\\b'
'name': 'variable.language.this.cpp'
}
{
'match': '\\bnullptr\\b'
'name': 'constant.language.cpp'
}
{
'match': '\\btemplate\\b\\s*'
'name': 'storage.type.template.cpp'
}
{
'match': '\\b(const_cast|dynamic_cast|reinterpret_cast|static_cast)\\b\\s*'
'name': 'keyword.operator.cast.cpp'
}
{
'match': '::'
'name': 'punctuation.separator.namespace.access.cpp'
}
{
'match': '\\b(and|and_eq|bitand|bitor|compl|not|not_eq|or|or_eq|typeid|xor|xor_eq|alignof|alignas)\\b'
'name': 'keyword.operator.cpp'
}
{
'match': '\\b(class|decltype|wchar_t|char16_t|char32_t)\\b'
'name': 'storage.type.cpp'
}
{
'match': '\\b(constexpr|export|mutable|typename|thread_local)\\b'
'name': 'storage.modifier.cpp'
}
{
'begin': '''(?x)
(?:
^ | # beginning of line
(?:(?<!else|new|=)) # or word + space before name
)
((?:[A-Za-z_][A-Za-z0-9_]*::)*+~[A-Za-z_][A-Za-z0-9_]*) # actual name
\\s*(\\() # opening bracket
'''
'beginCaptures':
'1':
'name': 'entity.name.function.cpp'
'2':
'name': 'punctuation.definition.parameters.begin.c'
'end': '\\)'
'endCaptures':
'0':
'name': 'punctuation.definition.parameters.end.c'
'name': 'meta.function.destructor.cpp'
'patterns': [
{
'include': '$base'
}
]
}
{
'begin': '''(?x)
(?:
^ | # beginning of line
(?:(?<!else|new|=)) # or word + space before name
)
((?:[A-Za-z_][A-Za-z0-9_]*::)*+~[A-Za-z_][A-Za-z0-9_]*) # actual name
\\s*(\\() # opening bracket
'''
'beginCaptures':
'1':
'name': 'entity.name.function.cpp'
'2':
'name': 'punctuation.definition.parameters.begin.c'
'end': '\\)'
'endCaptures':
'0':
'name': 'punctuation.definition.parameters.end.c'
'name': 'meta.function.destructor.prototype.cpp'
'patterns': [
{
'include': '$base'
}
]
}
{
'include': 'source.c'
}
]
'repository':
'angle_brackets':
'begin': '<'
'end': '>'
'name': 'meta.angle-brackets.cpp'
'patterns': [
{
'include': '#angle_brackets'
}
{
'include': '$base'
}
]
'block':
'begin': '\\{'
'beginCaptures':
'0':
'name': 'punctuation.section.block.begin.bracket.curly.c'
'end': '\\}'
'endCaptures':
'0':
'name': 'punctuation.section.block.end.bracket.curly.c'
'name': 'meta.block.cpp'
'patterns': [
{
'captures':
'1':
'name': 'support.function.any-method.c'
'2':
'name': 'punctuation.definition.parameters.c'
'match': '''(?x)
(
(?!while|for|do|if|else|switch|catch|enumerate|return|r?iterate)
(?:\\b[A-Za-z_][A-Za-z0-9_]*+\\b|::)*+ # actual name
)
\\s*(\\() # opening bracket
'''
'name': 'meta.function-call.c'
}
{
'include': '$base'
}
]
'constructor':
'patterns': [
{
'begin': '''(?x)
(?:^\\s*) # beginning of line
((?!while|for|do|if|else|switch|catch|enumerate|r?iterate)[A-Za-z_][A-Za-z0-9_:]*) # actual name
\\s*(\\() # opening bracket
'''
'beginCaptures':
'1':
'name': 'entity.name.function.cpp'
'2':
'name': 'punctuation.definition.parameters.begin.c'
'end': '\\)'
'endCaptures':
'0':
'name': 'punctuation.definition.parameters.end.c'
'name': 'meta.function.constructor.cpp'
'patterns': [
{
'include': '$base'
}
]
}
{
'begin': '''(?x)
(:)
(
(?=
\\s*[A-Za-z_][A-Za-z0-9_:]* # actual name
\\s* (\\() # opening bracket
)
)
'''
'beginCaptures':
'1':
'name': 'punctuation.definition.parameters.c'
'end': '(?=\\{)'
'name': 'meta.function.constructor.initializer-list.cpp'
'patterns': [
{
'include': '$base'
}
]
}
]
'special_block':
'patterns': [
{
"begin": "\\b(using)\\b\\s*(namespace)\\b\\s*((?:[_A-Za-z][_A-Za-z0-9]*\\b(::)?)*)",
"beginCaptures": {
"1": {
"name": "keyword.control.cpp"
},
"2": {
"name": "storage.type.cpp"
},
"3": {
"name": "entity.name.type.cpp"
}
},
"end": "(;)",
"name": "meta.using-namespace-declaration.cpp"
},
{
'begin': '\\b(namespace)\\b\\s*([_A-Za-z][_A-Za-z0-9]*\\b)?+'
'beginCaptures':
'1':
'name': 'storage.type.cpp'
'2':
'name': 'entity.name.type.cpp'
'captures':
'1':
'name': 'keyword.control.namespace.$2'
'end': '(?<=\\})|(?=(;|,|\\(|\\)|>|\\[|\\]|=))'
'name': 'meta.namespace-block.cpp'
'patterns': [
{
'begin': '\\{'
'beginCaptures':
'0':
'name': 'punctuation.definition.scope.cpp'
'end': '\\}'
'endCaptures':
'0':
'name': 'punctuation.definition.scope.cpp'
'patterns': [
{
'include': '#special_block'
}
{
'include': '#constructor'
}
{
'include': '$base'
}
]
}
{
'include': '$base'
}
]
}
{
'begin': '\\b(class|struct)\\b\\s*([_A-Za-z][_A-Za-z0-9]*\\b)?+(\\s*:\\s*(public|protected|private)\\s*([_A-Za-z][_A-Za-z0-9]*\\b)((\\s*,\\s*(public|protected|private)\\s*[_A-Za-z][_A-Za-z0-9]*\\b)*))?'
'beginCaptures':
'1':
'name': 'storage.type.cpp'
'2':
'name': 'entity.name.type.cpp'
'4':
'name': 'storage.type.modifier.cpp'
'5':
'name': 'entity.name.type.inherited.cpp'
'6':
'patterns': [
{
'match': '(public|protected|private)'
'name': 'storage.type.modifier.cpp'
}
{
'match': '[_A-Za-z][_A-Za-z0-9]*'
'name': 'entity.name.type.inherited.cpp'
}
]
'end': '(?<=\\})|(?=(;|\\(|\\)|>|\\[|\\]|=))'
'name': 'meta.class-struct-block.cpp'
'patterns': [
{
'include': '#angle_brackets'
}
{
'begin': '\\{'
'beginCaptures':
'0':
'name': 'punctuation.section.block.begin.bracket.curly.cpp'
'end': '(\\})(\\s*\\n)?'
'endCaptures':
'1':
'name': 'punctuation.section.block.end.bracket.curly.cpp'
'2':
'name': 'invalid.illegal.you-forgot-semicolon.cpp'
'patterns': [
{
'include': '#special_block'
}
{
'include': '#constructor'
}
{
'include': '$base'
}
]
}
{
'include': '$base'
}
]
}
{
'begin': '\\b(extern)(?=\\s*")'
'beginCaptures':
'1':
'name': 'storage.modifier.cpp'
'end': '(?<=\\})|(?=\\w)|(?=\\s*#\\s*endif\\b)'
'name': 'meta.extern-block.cpp'
'patterns': [
{
'begin': '\\{'
'beginCaptures':
'0':
'name': 'punctuation.section.block.begin.bracket.curly.c'
'end': '\\}|(?=\\s*#\\s*endif\\b)'
'endCaptures':
'0':
'name': 'punctuation.section.block.end.bracket.curly.c'
'patterns': [
{
'include': '#special_block'
}
{
'include': '$base'
}
]
}
{
'include': '$base'
}
]
}
]
'strings':
'patterns': [
{
'begin': '(u|u8|U|L)?"'
'beginCaptures':
'0':
'name': 'punctuation.definition.string.begin.cpp'
'1':
'name': 'meta.encoding.cpp'
'end': '"'
'endCaptures':
'0':
'name': 'punctuation.definition.string.end.cpp'
'name': 'string.quoted.double.cpp'
'patterns': [
{
'match': '\\\\u\\h{4}|\\\\U\\h{8}'
'name': 'constant.character.escape.cpp'
}
{
'match': '\\\\[\'"?\\\\abfnrtv]'
'name': 'constant.character.escape.cpp'
}
{
'match': '\\\\[0-7]{1,3}'
'name': 'constant.character.escape.cpp'
}
{
'match': '\\\\x\\h+'
'name': 'constant.character.escape.cpp'
}
{
'include': 'source.c#string_placeholder'
}
]
}
{
'begin': '(u|u8|U|L)?R"(?:([^ ()\\\\\\t]{0,16})|([^ ()\\\\\\t]*))\\('
'beginCaptures':
'0':
'name': 'punctuation.definition.string.begin.cpp'
'1':
'name': 'meta.encoding.cpp'
'3':
'name': 'invalid.illegal.delimiter-too-long.cpp'
'end': '\\)\\2(\\3)"'
'endCaptures':
'0':
'name': 'punctuation.definition.string.end.cpp'
'1':
'name': 'invalid.illegal.delimiter-too-long.cpp'
'name': 'string.quoted.double.raw.cpp'
}
]

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,185 @@
name: 'C'
scopeName: 'source.c'
type: 'tree-sitter'
parser: 'tree-sitter-c'
injectionRegex: 'c|C'
fileTypes: [
'h'
'c'
'h.in'
]
folds: [
{
type: ['comment', 'preproc_arg']
}
{
type: ['preproc_if', 'preproc_ifdef', 'preproc_elif'],
end: {type: ['preproc_else', 'preproc_elif']}
}
{
type: ['preproc_if', 'preproc_ifdef'],
end: {index: -1}
}
{
type: ['preproc_else', 'preproc_elif']
start: {index: 0}
}
{
type: [
'enumerator_list'
'compound_statement'
'declaration_list'
'field_declaration_list'
'parameter_list'
'argument_list'
'initializer_list'
'parenthesized_expression'
'template_parameter_list'
'template_argument_list'
]
start: {index: 0}
end: {index: -1}
}
{
type: 'case_statement'
start: {index: 0}
end: {type: 'break_statement', index: -1}
}
{
type: 'case_statement'
start: {index: 0}
}
]
comments:
start: '// '
scopes:
'translation_unit': 'source.c'
'comment': 'comment.block'
'identifier': [
{match: '^[A-Z\\d_]+$', scopes: 'constant.other'}
]
'"#if"': 'keyword.control.directive'
'"#ifdef"': 'keyword.control.directive'
'"#ifndef"': 'keyword.control.directive'
'"#elif"': 'keyword.control.directive'
'"#else"': 'keyword.control.directive'
'"#endif"': 'keyword.control.directive'
'"#define"': 'keyword.control.directive'
'"#include"': 'keyword.control.directive'
'preproc_directive': 'keyword.control.directive'
'"if"': 'keyword.control'
'"else"': 'keyword.control'
'"do"': 'keyword.control'
'"for"': 'keyword.control'
'"while"': 'keyword.control'
'"break"': 'keyword.control'
'"continue"': 'keyword.control'
'"return"': 'keyword.control'
'"switch"': 'keyword.control'
'"case"': 'keyword.control'
'"default"': 'keyword.control'
'"goto"': 'keyword.control'
'"struct"': 'keyword.control'
'"enum"': 'keyword.control'
'"union"': 'keyword.control'
'"typedef"': 'keyword.control'
'preproc_function_def > identifier:nth-child(1)': 'entity.name.function.preprocessor'
'preproc_arg': 'meta.preprocessor.macro'
'''
call_expression > identifier,
call_expression > field_expression > field_identifier,
function_declarator > identifier
''': 'entity.name.function'
'statement_identifier': 'constant.variable'
'field_identifier': 'variable.other.member'
'type_identifier': 'support.storage.type'
'primitive_type': 'support.storage.type'
'"signed"': 'support.storage.type'
'"unsigned"': 'support.storage.type'
'"short"': 'support.storage.type'
'"long"': 'support.storage.type'
'char_literal': 'string.quoted.single'
'string_literal': 'string.quoted.double'
'system_lib_string': 'string.quoted.other'
'escape_sequence': 'constant.character.escape'
'number_literal': 'constant.numeric.decimal'
'null': 'constant.language.null'
'true': 'constant.language.boolean'
'false': 'constant.language.boolean'
'auto': 'storage.modifier'
'"extern"': 'storage.modifier'
'"register"': 'storage.modifier'
'"static"': 'storage.modifier'
'"inline"': 'storage.modifier'
'"const"': 'storage.modifier'
'"volatile"': 'storage.modifier'
'"restrict"': 'storage.modifier'
'"_Atomic"': 'storage.modifier'
'function_specifier': 'storage.modifier'
'";"': 'punctuation.terminator.statement'
'"["': 'punctuation.definition.begin.bracket.square'
'"]"': 'punctuation.definition.end.bracket.square'
'","': 'punctuation.separator.delimiter'
'char_literal > "\'"': 'punctuation.definition.string'
'string_literal > "\\""': 'punctuation.definition.string'
'"{"': 'punctuation.section.block.begin.bracket.curly'
'"}"': 'punctuation.section.block.end.bracket.curly'
'"("': 'punctuation.section.parens.begin.bracket.round'
'")"': 'punctuation.section.parens.end.bracket.round'
'"sizeof"': 'keyword.operator.sizeof'
'"."': 'keyword.operator.member'
'"->"': 'keyword.operator.member'
'"*"': 'keyword.operator'
'"-"': 'keyword.operator'
'"+"': 'keyword.operator'
'"/"': 'keyword.operator'
'"%"': 'keyword.operator'
'"++"': 'keyword.operator'
'"--"': 'keyword.operator'
'"=="': 'keyword.operator'
'"!"': 'keyword.operator'
'"!="': 'keyword.operator'
'"<"': 'keyword.operator'
'">"': 'keyword.operator'
'">="': 'keyword.operator'
'"<="': 'keyword.operator'
'"&&"': 'keyword.operator'
'"||"': 'keyword.operator'
'"&"': 'keyword.operator'
'"|"': 'keyword.operator'
'"^"': 'keyword.operator'
'"~"': 'keyword.operator'
'"<<"': 'keyword.operator'
'">>"': 'keyword.operator'
'"="': 'keyword.operator'
'"+="': 'keyword.operator'
'"-="': 'keyword.operator'
'"*="': 'keyword.operator'
'"/="': 'keyword.operator'
'"%="': 'keyword.operator'
'"<<="': 'keyword.operator'
'">>="': 'keyword.operator'
'"&="': 'keyword.operator'
'"^="': 'keyword.operator'
'"|="': 'keyword.operator'
'"?"': 'keyword.operator'
'":"': 'keyword.operator'

View File

@ -0,0 +1,251 @@
name: 'C++'
scopeName: 'source.cpp'
type: 'tree-sitter'
parser: 'tree-sitter-cpp'
injectionRegex: '(c|C)(\\+\\+|pp|PP)'
fileTypes: [
'cc'
'cpp'
'cp'
'cxx'
'c++'
'cu'
'cuh'
'h'
'hh'
'hpp'
'hxx'
'h++'
'inl'
'ino'
'ipp'
'tcc'
'tpp'
]
contentRegex: '\n\\s*(namespace|class|template)\\s+'
folds: [
{
type: ['comment', 'preproc_arg']
}
{
type: ['preproc_if', 'preproc_ifdef', 'preproc_elif'],
end: {type: ['preproc_else', 'preproc_elif']}
}
{
type: ['preproc_if', 'preproc_ifdef'],
end: {index: -1}
}
{
type: ['preproc_else', 'preproc_elif']
start: {index: 0}
}
{
type: [
'enumerator_list'
'compound_statement'
'declaration_list'
'field_declaration_list'
'parameter_list'
'argument_list'
'initializer_list'
'parenthesized_expression'
'template_parameter_list'
'template_argument_list'
]
start: {index: 0}
end: {index: -1}
}
{
type: 'case_statement'
start: {index: 0}
end: {type: 'break_statement', index: -1}
}
{
type: 'case_statement'
start: {index: 0}
}
]
comments:
start: '// '
scopes:
'translation_unit': 'source.cpp'
'comment': 'comment.block'
'identifier': [
{match: '^[A-Z\\d_]+$', scopes: 'constant.other'}
]
'"#if"': 'keyword.control.directive'
'"#ifdef"': 'keyword.control.directive'
'"#ifndef"': 'keyword.control.directive'
'"#elif"': 'keyword.control.directive'
'"#else"': 'keyword.control.directive'
'"#endif"': 'keyword.control.directive'
'"#define"': 'keyword.control.directive'
'"#include"': 'keyword.control.directive'
'preproc_directive': 'keyword.control.directive'
'"if"': 'keyword.control'
'"else"': 'keyword.control'
'"do"': 'keyword.control'
'"for"': 'keyword.control'
'"while"': 'keyword.control'
'"break"': 'keyword.control'
'"continue"': 'keyword.control'
'"return"': 'keyword.control'
'"switch"': 'keyword.control'
'"case"': 'keyword.control'
'"default"': 'keyword.control'
'"goto"': 'keyword.control'
'"struct"': 'keyword.control'
'"enum"': 'keyword.control'
'"union"': 'keyword.control'
'"typedef"': 'keyword.control'
'"class"': 'keyword.control'
'"using"': 'keyword.control'
'"namespace"': 'keyword.control'
'"template"': 'keyword.control'
'"typename"': 'keyword.control'
'"try"': 'keyword.control'
'"catch"': 'keyword.control'
'"throw"': 'keyword.control'
'"__attribute__"': 'keyword.attribute'
'preproc_function_def > identifier:nth-child(1)': 'entity.name.function.preprocessor'
'preproc_arg': 'meta.preprocessor.macro'
'preproc_directive': 'keyword.control.directive'
'template_function > identifier': [
{
match: '^(static|const|dynamic|reinterpret)_cast$'
scopes: 'keyword.operator'
}
]
'''
call_expression > identifier,
call_expression > field_expression > field_identifier,
call_expression > scoped_identifier > identifier,
template_function > identifier,
template_function > scoped_identifier > identifier,
template_method > field_identifier,
function_declarator > identifier,
function_declarator > field_identifier,
function_declarator > scoped_identifier > identifier,
destructor_name > identifier
''': 'entity.name.function'
'statement_identifier': 'constant.variable'
'field_identifier': 'variable.other.member'
'type_identifier': 'support.storage.type'
'primitive_type': 'support.storage.type'
'"unsigned"': 'support.storage.type'
'"signed"': 'support.storage.type'
'"short"': 'support.storage.type'
'"long"': 'support.storage.type'
'auto': 'support.storage.type'
'char_literal': 'string.quoted.single'
'string_literal': 'string.quoted.double'
'system_lib_string': 'string.quoted.other'
'raw_string_literal': 'string.quoted.other'
'escape_sequence': 'constant.character.escape'
'preproc_include > string_literal > escape_sequence': 'string.quoted.double'
'number_literal': 'constant.numeric.decimal'
'null': 'constant.language.null'
'nullptr': 'constant.language.null'
'true': 'constant.language.boolean'
'false': 'constant.language.boolean'
'"extern"': 'storage.modifier'
'"static"': 'storage.modifier'
'"register"': 'storage.modifier'
'"friend"': 'storage.modifier'
'"inline"': 'storage.modifier'
'"explicit"': 'storage.modifier'
'"const"': 'storage.modifier'
'"constexpr"': 'storage.modifier'
'"volatile"': 'storage.modifier'
'"restrict"': 'storage.modifier'
'function_specifier': 'storage.modifier'
'"public"': 'storage.modifier'
'"private"': 'storage.modifier'
'"protected"': 'storage.modifier'
'"final"': 'storage.modifier'
'"override"': 'storage.modifier'
'"virtual"': 'storage.modifier'
'"noexcept"': 'storage.modifier'
'"mutable"': 'storage.modifier'
'";"': 'punctuation.terminator.statement'
'"["': 'punctuation.definition.begin.bracket.square'
'"]"': 'punctuation.definition.end.bracket.square'
'access_specifier > ":"': 'punctuation.definition.visibility.colon'
'base_class_clause > ":"': 'punctuation.definition.inheritance.colon'
'base_class_clause > ","': 'punctuation.definition.separator.class.comma'
'field_declaration > ","': 'punctuation.separator.delimiter'
'parameter_list > ","': 'punctuation.separator.delimiter'
'field_initializer_list > ":"': 'punctuation.definition.initialization.colon'
'field_initializer_list > ","': 'punctuation.separator.delimiter'
'"::"': 'punctuation.separator.method.double-colon'
'template_parameter_list > "<"': 'punctuation.definition.template.bracket.angle'
'template_parameter_list > ">"': 'punctuation.definition.template.bracket.angle'
'template_argument_list > ">"': 'punctuation.definition.template.bracket.angle'
'template_argument_list > "<"': 'punctuation.definition.template.bracket.angle'
'char_literal > "\'"': 'punctuation.definition.string'
'string_literal > "\\""': 'punctuation.definition.string'
'"{"': 'punctuation.section.block.begin.bracket.curly'
'"}"': 'punctuation.section.block.end.bracket.curly'
'"("': 'punctuation.section.parens.begin.bracket.round'
'")"': 'punctuation.section.parens.end.bracket.round'
'"sizeof"': 'keyword.operator.sizeof'
'"new"': 'keyword.operator'
'"delete"': 'keyword.operator'
'"."': 'keyword.operator.member'
'"->"': 'keyword.operator.member'
'"*"': 'keyword.operator'
'"-"': 'keyword.operator'
'"+"': 'keyword.operator'
'"/"': 'keyword.operator'
'"%"': 'keyword.operator'
'"++"': 'keyword.operator'
'"--"': 'keyword.operator'
'"=="': 'keyword.operator'
'"!"': 'keyword.operator'
'"!="': 'keyword.operator'
'relational_expression > "<"': 'keyword.operator'
'relational_expression > ">"': 'keyword.operator'
'">="': 'keyword.operator'
'"<="': 'keyword.operator'
'"&&"': 'keyword.operator'
'"||"': 'keyword.operator'
'"&"': 'keyword.operator'
'"|"': 'keyword.operator'
'"^"': 'keyword.operator'
'"~"': 'keyword.operator'
'"<<"': 'keyword.operator'
'">>"': 'keyword.operator'
'"="': 'keyword.operator'
'"+="': 'keyword.operator'
'"-="': 'keyword.operator'
'"*="': 'keyword.operator'
'"/="': 'keyword.operator'
'"%="': 'keyword.operator'
'"<<="': 'keyword.operator'
'">>="': 'keyword.operator'
'"&="': 'keyword.operator'
'"^="': 'keyword.operator'
'"|="': 'keyword.operator'
'"?"': 'keyword.operator'
'conditional_expression > ":"': 'keyword.operator'

View File

@ -0,0 +1,12 @@
exports.activate = function () {
// Highlight macro bodies as C/C++
for (const language of ['c', 'cpp']) {
for (const nodeType of ['preproc_def', 'preproc_function_def']) {
atom.grammars.addInjectionPoint(`source.${language}`, {
type: nodeType,
language (node) { return language },
content (node) { return node.lastNamedChild }
})
}
}
}

398
packages/language-c/package-lock.json generated Normal file
View File

@ -0,0 +1,398 @@
{
"name": "language-c",
"version": "0.60.20",
"lockfileVersion": 2,
"requires": true,
"packages": {
"": {
"version": "0.60.19",
"license": "MIT",
"dependencies": {
"tree-sitter-c": "0.19.0",
"tree-sitter-cpp": "0.19.0"
},
"devDependencies": {
"coffeelint": "^1.10.1"
},
"engines": {
"atom": "*",
"node": "*"
}
},
"node_modules/balanced-match": {
"version": "1.0.0",
"resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.0.tgz",
"integrity": "sha1-ibTRmasr7kneFk6gK4nORi1xt2c=",
"dev": true
},
"node_modules/brace-expansion": {
"version": "1.1.11",
"resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz",
"integrity": "sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==",
"dev": true,
"dependencies": {
"balanced-match": "^1.0.0",
"concat-map": "0.0.1"
}
},
"node_modules/coffee-script": {
"version": "1.11.1",
"resolved": "https://registry.npmjs.org/coffee-script/-/coffee-script-1.11.1.tgz",
"integrity": "sha1-vxxHrWREOg2V0S3ysUfMCk2q1uk=",
"deprecated": "CoffeeScript on NPM has moved to \"coffeescript\" (no hyphen)",
"dev": true,
"bin": {
"cake": "bin/cake",
"coffee": "bin/coffee"
},
"engines": {
"node": ">=0.8.0"
}
},
"node_modules/coffeelint": {
"version": "1.16.2",
"resolved": "https://registry.npmjs.org/coffeelint/-/coffeelint-1.16.2.tgz",
"integrity": "sha512-6mzgOo4zb17WfdrSui/cSUEgQ0AQkW3gXDht+6lHkfkqGUtSYKwGdGcXsDfAyuScVzTlTtKdfwkAlJWfqul7zg==",
"dev": true,
"dependencies": {
"coffee-script": "~1.11.0",
"glob": "^7.0.6",
"ignore": "^3.0.9",
"optimist": "^0.6.1",
"resolve": "^0.6.3",
"strip-json-comments": "^1.0.2"
},
"bin": {
"coffeelint": "bin/coffeelint"
},
"engines": {
"node": ">=0.8.0",
"npm": ">=1.3.7"
}
},
"node_modules/concat-map": {
"version": "0.0.1",
"resolved": "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz",
"integrity": "sha1-2Klr13/Wjfd5OnMDajug1UBdR3s=",
"dev": true
},
"node_modules/fs.realpath": {
"version": "1.0.0",
"resolved": "https://registry.npmjs.org/fs.realpath/-/fs.realpath-1.0.0.tgz",
"integrity": "sha1-FQStJSMVjKpA20onh8sBQRmU6k8=",
"dev": true
},
"node_modules/glob": {
"version": "7.1.3",
"resolved": "https://registry.npmjs.org/glob/-/glob-7.1.3.tgz",
"integrity": "sha512-vcfuiIxogLV4DlGBHIUOwI0IbrJ8HWPc4MU7HzviGeNho/UJDfi6B5p3sHeWIQ0KGIU0Jpxi5ZHxemQfLkkAwQ==",
"dev": true,
"dependencies": {
"fs.realpath": "^1.0.0",
"inflight": "^1.0.4",
"inherits": "2",
"minimatch": "^3.0.4",
"once": "^1.3.0",
"path-is-absolute": "^1.0.0"
},
"engines": {
"node": "*"
}
},
"node_modules/ignore": {
"version": "3.3.10",
"resolved": "https://registry.npmjs.org/ignore/-/ignore-3.3.10.tgz",
"integrity": "sha512-Pgs951kaMm5GXP7MOvxERINe3gsaVjUWFm+UZPSq9xYriQAksyhg0csnS0KXSNRD5NmNdapXEpjxG49+AKh/ug==",
"dev": true
},
"node_modules/inflight": {
"version": "1.0.6",
"resolved": "https://registry.npmjs.org/inflight/-/inflight-1.0.6.tgz",
"integrity": "sha1-Sb1jMdfQLQwJvJEKEHW6gWW1bfk=",
"dev": true,
"dependencies": {
"once": "^1.3.0",
"wrappy": "1"
}
},
"node_modules/inherits": {
"version": "2.0.3",
"resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.3.tgz",
"integrity": "sha1-Yzwsg+PaQqUC9SRmAiSA9CCCYd4=",
"dev": true
},
"node_modules/minimatch": {
"version": "3.0.4",
"resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.0.4.tgz",
"integrity": "sha512-yJHVQEhyqPLUTgt9B83PXu6W3rx4MvvHvSUvToogpwoGDOUQ+yDrR0HRot+yOCdCO7u4hX3pWft6kWBBcqh0UA==",
"dev": true,
"dependencies": {
"brace-expansion": "^1.1.7"
},
"engines": {
"node": "*"
}
},
"node_modules/minimist": {
"version": "0.0.10",
"resolved": "http://registry.npmjs.org/minimist/-/minimist-0.0.10.tgz",
"integrity": "sha1-3j+YVD2/lggr5IrRoMfNqDYwHc8=",
"dev": true
},
"node_modules/nan": {
"version": "2.14.0",
"resolved": "https://registry.npmjs.org/nan/-/nan-2.14.0.tgz",
"integrity": "sha512-INOFj37C7k3AfaNTtX8RhsTw7qRy7eLET14cROi9+5HAVbbHuIWUHEauBv5qT4Av2tWasiTY1Jw6puUNqRJXQg=="
},
"node_modules/once": {
"version": "1.4.0",
"resolved": "https://registry.npmjs.org/once/-/once-1.4.0.tgz",
"integrity": "sha1-WDsap3WWHUsROsF9nFC6753Xa9E=",
"dev": true,
"dependencies": {
"wrappy": "1"
}
},
"node_modules/optimist": {
"version": "0.6.1",
"resolved": "https://registry.npmjs.org/optimist/-/optimist-0.6.1.tgz",
"integrity": "sha1-2j6nRob6IaGaERwybpDrFaAZZoY=",
"dev": true,
"dependencies": {
"minimist": "~0.0.1",
"wordwrap": "~0.0.2"
}
},
"node_modules/path-is-absolute": {
"version": "1.0.1",
"resolved": "https://registry.npmjs.org/path-is-absolute/-/path-is-absolute-1.0.1.tgz",
"integrity": "sha1-F0uSaHNVNP+8es5r9TpanhtcX18=",
"dev": true,
"engines": {
"node": ">=0.10.0"
}
},
"node_modules/resolve": {
"version": "0.6.3",
"resolved": "https://registry.npmjs.org/resolve/-/resolve-0.6.3.tgz",
"integrity": "sha1-3ZV5gufnNt699TtYpN2RdUV13UY=",
"dev": true
},
"node_modules/strip-json-comments": {
"version": "1.0.4",
"resolved": "https://registry.npmjs.org/strip-json-comments/-/strip-json-comments-1.0.4.tgz",
"integrity": "sha1-HhX7ysl9Pumb8tc7TGVrCCu6+5E=",
"dev": true,
"bin": {
"strip-json-comments": "cli.js"
},
"engines": {
"node": ">=0.8.0"
}
},
"node_modules/tree-sitter-c": {
"version": "0.19.0",
"resolved": "https://registry.npmjs.org/tree-sitter-c/-/tree-sitter-c-0.19.0.tgz",
"integrity": "sha512-6wlHAZRUmibYwTv4XdLhiMZ3EXYLZdvkiRIXJJ1rMCSMFwhaaLnBjyFw7aILJT1x6V7oD7I3VNOOniwKHV9osA==",
"hasInstallScript": true,
"dependencies": {
"nan": "^2.14.0"
}
},
"node_modules/tree-sitter-cpp": {
"version": "0.19.0",
"resolved": "https://registry.npmjs.org/tree-sitter-cpp/-/tree-sitter-cpp-0.19.0.tgz",
"integrity": "sha512-nCbHpC3ZaLnuvfHVtC40M5Mzc4QA11Zogz/lLUMBirysxmc9ResCN+gNMWjl7OrG4RPRSd0wQbjg9z6UTtl0SQ==",
"hasInstallScript": true,
"dependencies": {
"nan": "^2.14.0"
}
},
"node_modules/wordwrap": {
"version": "0.0.3",
"resolved": "https://registry.npmjs.org/wordwrap/-/wordwrap-0.0.3.tgz",
"integrity": "sha1-o9XabNXAvAAI03I0u68b7WMFkQc=",
"dev": true,
"engines": {
"node": ">=0.4.0"
}
},
"node_modules/wrappy": {
"version": "1.0.2",
"resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz",
"integrity": "sha1-tSQ9jz7BqjXxNkYFvA0QNuMKtp8=",
"dev": true
}
},
"dependencies": {
"balanced-match": {
"version": "1.0.0",
"resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.0.tgz",
"integrity": "sha1-ibTRmasr7kneFk6gK4nORi1xt2c=",
"dev": true
},
"brace-expansion": {
"version": "1.1.11",
"resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz",
"integrity": "sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==",
"dev": true,
"requires": {
"balanced-match": "^1.0.0",
"concat-map": "0.0.1"
}
},
"coffee-script": {
"version": "1.11.1",
"resolved": "https://registry.npmjs.org/coffee-script/-/coffee-script-1.11.1.tgz",
"integrity": "sha1-vxxHrWREOg2V0S3ysUfMCk2q1uk=",
"dev": true
},
"coffeelint": {
"version": "1.16.2",
"resolved": "https://registry.npmjs.org/coffeelint/-/coffeelint-1.16.2.tgz",
"integrity": "sha512-6mzgOo4zb17WfdrSui/cSUEgQ0AQkW3gXDht+6lHkfkqGUtSYKwGdGcXsDfAyuScVzTlTtKdfwkAlJWfqul7zg==",
"dev": true,
"requires": {
"coffee-script": "~1.11.0",
"glob": "^7.0.6",
"ignore": "^3.0.9",
"optimist": "^0.6.1",
"resolve": "^0.6.3",
"strip-json-comments": "^1.0.2"
}
},
"concat-map": {
"version": "0.0.1",
"resolved": "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz",
"integrity": "sha1-2Klr13/Wjfd5OnMDajug1UBdR3s=",
"dev": true
},
"fs.realpath": {
"version": "1.0.0",
"resolved": "https://registry.npmjs.org/fs.realpath/-/fs.realpath-1.0.0.tgz",
"integrity": "sha1-FQStJSMVjKpA20onh8sBQRmU6k8=",
"dev": true
},
"glob": {
"version": "7.1.3",
"resolved": "https://registry.npmjs.org/glob/-/glob-7.1.3.tgz",
"integrity": "sha512-vcfuiIxogLV4DlGBHIUOwI0IbrJ8HWPc4MU7HzviGeNho/UJDfi6B5p3sHeWIQ0KGIU0Jpxi5ZHxemQfLkkAwQ==",
"dev": true,
"requires": {
"fs.realpath": "^1.0.0",
"inflight": "^1.0.4",
"inherits": "2",
"minimatch": "^3.0.4",
"once": "^1.3.0",
"path-is-absolute": "^1.0.0"
}
},
"ignore": {
"version": "3.3.10",
"resolved": "https://registry.npmjs.org/ignore/-/ignore-3.3.10.tgz",
"integrity": "sha512-Pgs951kaMm5GXP7MOvxERINe3gsaVjUWFm+UZPSq9xYriQAksyhg0csnS0KXSNRD5NmNdapXEpjxG49+AKh/ug==",
"dev": true
},
"inflight": {
"version": "1.0.6",
"resolved": "https://registry.npmjs.org/inflight/-/inflight-1.0.6.tgz",
"integrity": "sha1-Sb1jMdfQLQwJvJEKEHW6gWW1bfk=",
"dev": true,
"requires": {
"once": "^1.3.0",
"wrappy": "1"
}
},
"inherits": {
"version": "2.0.3",
"resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.3.tgz",
"integrity": "sha1-Yzwsg+PaQqUC9SRmAiSA9CCCYd4=",
"dev": true
},
"minimatch": {
"version": "3.0.4",
"resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.0.4.tgz",
"integrity": "sha512-yJHVQEhyqPLUTgt9B83PXu6W3rx4MvvHvSUvToogpwoGDOUQ+yDrR0HRot+yOCdCO7u4hX3pWft6kWBBcqh0UA==",
"dev": true,
"requires": {
"brace-expansion": "^1.1.7"
}
},
"minimist": {
"version": "0.0.10",
"resolved": "http://registry.npmjs.org/minimist/-/minimist-0.0.10.tgz",
"integrity": "sha1-3j+YVD2/lggr5IrRoMfNqDYwHc8=",
"dev": true
},
"nan": {
"version": "2.14.0",
"resolved": "https://registry.npmjs.org/nan/-/nan-2.14.0.tgz",
"integrity": "sha512-INOFj37C7k3AfaNTtX8RhsTw7qRy7eLET14cROi9+5HAVbbHuIWUHEauBv5qT4Av2tWasiTY1Jw6puUNqRJXQg=="
},
"once": {
"version": "1.4.0",
"resolved": "https://registry.npmjs.org/once/-/once-1.4.0.tgz",
"integrity": "sha1-WDsap3WWHUsROsF9nFC6753Xa9E=",
"dev": true,
"requires": {
"wrappy": "1"
}
},
"optimist": {
"version": "0.6.1",
"resolved": "https://registry.npmjs.org/optimist/-/optimist-0.6.1.tgz",
"integrity": "sha1-2j6nRob6IaGaERwybpDrFaAZZoY=",
"dev": true,
"requires": {
"minimist": "~0.0.1",
"wordwrap": "~0.0.2"
}
},
"path-is-absolute": {
"version": "1.0.1",
"resolved": "https://registry.npmjs.org/path-is-absolute/-/path-is-absolute-1.0.1.tgz",
"integrity": "sha1-F0uSaHNVNP+8es5r9TpanhtcX18=",
"dev": true
},
"resolve": {
"version": "0.6.3",
"resolved": "https://registry.npmjs.org/resolve/-/resolve-0.6.3.tgz",
"integrity": "sha1-3ZV5gufnNt699TtYpN2RdUV13UY=",
"dev": true
},
"strip-json-comments": {
"version": "1.0.4",
"resolved": "https://registry.npmjs.org/strip-json-comments/-/strip-json-comments-1.0.4.tgz",
"integrity": "sha1-HhX7ysl9Pumb8tc7TGVrCCu6+5E=",
"dev": true
},
"tree-sitter-c": {
"version": "0.19.0",
"resolved": "https://registry.npmjs.org/tree-sitter-c/-/tree-sitter-c-0.19.0.tgz",
"integrity": "sha512-6wlHAZRUmibYwTv4XdLhiMZ3EXYLZdvkiRIXJJ1rMCSMFwhaaLnBjyFw7aILJT1x6V7oD7I3VNOOniwKHV9osA==",
"requires": {
"nan": "^2.14.0"
}
},
"tree-sitter-cpp": {
"version": "0.19.0",
"resolved": "https://registry.npmjs.org/tree-sitter-cpp/-/tree-sitter-cpp-0.19.0.tgz",
"integrity": "sha512-nCbHpC3ZaLnuvfHVtC40M5Mzc4QA11Zogz/lLUMBirysxmc9ResCN+gNMWjl7OrG4RPRSd0wQbjg9z6UTtl0SQ==",
"requires": {
"nan": "^2.14.0"
}
},
"wordwrap": {
"version": "0.0.3",
"resolved": "https://registry.npmjs.org/wordwrap/-/wordwrap-0.0.3.tgz",
"integrity": "sha1-o9XabNXAvAAI03I0u68b7WMFkQc=",
"dev": true
},
"wrappy": {
"version": "1.0.2",
"resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz",
"integrity": "sha1-tSQ9jz7BqjXxNkYFvA0QNuMKtp8=",
"dev": true
}
}
}

View File

@ -0,0 +1,29 @@
{
"version": "0.60.20",
"name": "language-c",
"description": "Atom language support for C/C++",
"keywords": [
"tree-sitter"
],
"main": "lib/main",
"homepage": "https://atom.github.io/language-c",
"repository": {
"type": "git",
"url": "https://github.com/atom/language-c.git"
},
"license": "MIT",
"bugs": {
"url": "https://github.com/atom/language-c/issues"
},
"engines": {
"atom": "*",
"node": "*"
},
"dependencies": {
"tree-sitter-c": "0.19.0",
"tree-sitter-cpp": "0.19.0"
},
"devDependencies": {
"coffeelint": "^1.10.1"
}
}

View File

@ -0,0 +1,19 @@
'.source.c, .source.cpp, .source.objc, .source.objcpp':
'editor':
'commentStart': '// '
'increaseIndentPattern': '(?x)
^ .* \\{ [^}"\']* $
|^ .* \\( [^\\)"\']* $
|^ \\s* (public|private|protected): \\s* $
|^ \\s* @(public|private|protected) \\s* $
|^ \\s* \\{ \\} $
'
'decreaseIndentPattern': '(?x)
^ \\s* (\\s* /[*] .* [*]/ \\s*)* \\}
|^ \\s* (\\s* /[*] .* [*]/ \\s*)* \\)
|^ \\s* (public|private|protected): \\s* $
|^ \\s* @(public|private|protected) \\s* $
'
'.source.c, .source.cpp':
'editor':
'foldEndPattern': '(?<!\\*)\\*\\*/|^\\s*\\}'

View File

@ -0,0 +1,113 @@
'.source.c, .source.cpp, .source.objc, .source.objcpp':
'#ifndef … #define … #endif':
'prefix': 'def'
'body': '#ifndef ${1:SYMBOL}\n#define $1 ${2:value}\n#endif'
'#include <>':
'prefix': 'Inc'
'body': '#include <${1:.h}>'
'#include ""':
'prefix': 'inc'
'body': '#include "${1:.h}"'
'#pragma mark':
'prefix': 'mark'
'body': '#if 0\n${1:#pragma mark -\n}#pragma mark $2\n#endif\n\n$0'
'main()':
'prefix': 'main'
'body': 'int main(int argc, char const *argv[]) {\n\t${1:/* code */}\n\treturn 0;\n}'
'For Loop':
'prefix': 'for'
'body': 'for (size_t ${1:i} = 0; ${1:i} < ${2:count}; ${1:i}${3:++}) {\n\t${4:/* code */}\n}'
'Header Include-Guard':
'prefix': 'once'
'body': '#ifndef ${1:SYMBOL}\n#define $1\n\n${2}\n\n#endif /* end of include guard: $1 */\n'
'Shared Pointer':
'prefix': 'sp'
'body': 'typedef std::shared_ptr<${2:${1:my_type}_t}> ${3:${4:my_type}_ptr};'
'Typedef':
'prefix': 'td'
'body': 'typedef ${1:int} ${2:MyCustomType};'
'Do While Loop':
'prefix': 'do'
'body': 'do {\n\t${0:/* code */}\n} while(${1:/* condition */});'
'While Loop':
'prefix': 'while'
'body': 'while (${1:/* condition */}) {\n\t${2:/* code */}\n}'
'fprintf':
'prefix': 'fprintf'
'body': 'fprintf(${1:stderr}, "${2:%s}\\\\n", $3);$4'
'If Condition':
'prefix': 'if'
'body': 'if (${1:/* condition */}) {\n\t${2:/* code */}\n}'
'If Else':
'prefix': 'ife'
'body': 'if (${1:/* condition */}) {\n\t${2:/* code */}\n} else {\n\t${3:/* code */}\n}'
'If ElseIf':
'prefix': 'iff'
'body': 'if (${1:/* condition */}) {\n\t${2:/* code */}\n} else if (${3:/* condition */}) {\n\t${4:/* code */}\n}'
'If ElseIf Else':
'prefix': 'iffe'
'body': 'if (${1:/* condition */}) {\n\t${2:/* code */}\n} else if (${3:/* condition */}) {\n\t${4:/* code */}\n} else {\n\t${5:/* code */}\n}'
'Switch Statement':
'prefix': 'switch'
'body': 'switch (${1:/* expression */}) {\n\tcase ${2:/* value */}:\n}'
'case':
'prefix': 'cs'
'body': 'case ${1:/* value */}:$0'
'printf':
'prefix': 'printf'
'body': 'printf("${1:%s}\\\\n", $2);$3'
'scanf':
'prefix': 'scanf'
'body': 'scanf(\"${1:%s}\\\\n\", $2);$3'
'Struct':
'prefix': 'st'
'body': 'struct ${1:name_t} {\n\t${2:/* data */}\n};'
'void':
'prefix': 'void'
'body': 'void ${1:name}(${2:/* arguments */}) {\n\t${3:/* code */}\n}'
'any function':
'prefix': 'func'
'body': '${1:int} ${2:name}(${3:/* arguments */}) {\n\t${5:/* code */}\n\treturn ${4:0};\n}'
'write file':
'prefix': 'wf'
'body': 'FILE *${1:fp};\n${1:fp} = fopen ("${2:filename.txt}","w");\nif (${1:fp}!=NULL)\n{\n\tfprintf(${1:fp},"${3:Some String\\\\n}");\n\tfclose (${1:fp});\n}'
'read file':
'prefix': 'rf'
'body': 'FILE *${1:fp};\n${1:fp} = fopen ("${2:filename.txt}","r");\nif (${1:fp}!=NULL)\n{\n\tfscanf(${1:fp},"${3:Some String\\\\n}", ${3:&var});\n\tfclose (${1:fp});\n}'
'.source.cpp, .source.objcpp':
'Enumeration':
'prefix': 'enum'
'body': 'enum ${1:name} { $0 };'
'Class':
'prefix': 'cl'
'body': 'class ${1:name_t} {\nprivate:\n\t${0:/* data */}\n\npublic:\n\t${1:name_t} (${2:arguments});\n\tvirtual ~${1:name_t} ();\n};'
'Namespace':
'prefix': 'ns'
'body': 'namespace ${1:name} {\n\t$2\n} /* $1 */'
'cout':
'prefix': 'cout'
'body': 'std::cout << \"${1:/* message */}\" << \'\\\\n\';'
'cin':
'prefix': 'cin'
'body': 'std::cin >> ${1:/* variable */};'
'cerr':
'prefix': 'cerr'
'body': 'std::cerr << \"${1:/* error message */}\" << \'\\\\n\';'
'std::map':
'prefix': 'map'
'body': 'std::map<${1:key}, ${2:value}> map$3;'
'std::string':
'prefix': 'str'
'body': 'std::string'
'std::vector':
'prefix': 'vector'
'body': 'std::vector<${1:int}> v$2;'
'template <typename>':
'prefix': 'tp'
'body': 'template <typename ${1:T}>'
'output file':
'prefix': 'outf'
'body': 'std::ofstream ${1:afile}("${2:filename.txt}", std::ios::out);\nif (${1:afile}.is_open()) {\n\t${1:afile} << "${3:This is a line.}\\\\n";\n\t${1:afile}.close();\n}'
'input file':
'prefix': 'inf'
'body': 'std::ifstream ${1:afile}("${2:filename.txt}", std::ios::in);\nif (${1:afile}.is_open()) {\n\tstd::string line;\n\twhile (std::getline(${1:afile}, line)) {\n\t\tstd::cout << line << \'\\\\n\';\n\t}\n\t${1:afile}.close();\n}\nelse {\n\tstd::cerr << "Unable to open file\\\\n";\n}'

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1 @@
spec/fixtures

View File

@ -0,0 +1,15 @@
# Configuration for probot-no-response - https://github.com/probot/no-response
# Number of days of inactivity before an issue is closed for lack of response
daysUntilClose: 28
# Label requiring a response
responseRequiredLabel: more-information-needed
# Comment to post when closing an issue for lack of response. Set to `false` to disable.
closeComment: >
This issue has been automatically closed because there has been no response
to our request for more information from the original author. With only the
information that is currently in the issue, we don't have enough information
to take action. Please reach out if you have or find the answers we need so
that we can investigate further.

View File

@ -0,0 +1,23 @@
name: CI
on: [push]
env:
CI: true
jobs:
Test:
strategy:
matrix:
os: [ubuntu-latest, macos-latest, windows-latest]
channel: [stable, beta]
runs-on: ${{ matrix.os }}
steps:
- uses: actions/checkout@v1
- uses: UziTech/action-setup-atom@v2
with:
version: ${{ matrix.channel }}
- name: Install dependencies
run: apm install
- name: Run tests
run: atom --test spec

1
packages/language-clojure/.gitignore vendored Normal file
View File

@ -0,0 +1 @@
node_modules

View File

@ -0,0 +1,40 @@
<!--
Have you read Atom's Code of Conduct? By filing an Issue, you are expected to comply with it, including treating everyone with respect: https://github.com/atom/atom/blob/master/CODE_OF_CONDUCT.md
Do you want to ask a question? Are you looking for support? The Atom message board is the best place for getting support: https://discuss.atom.io
-->
### Prerequisites
* [ ] Put an X between the brackets on this line if you have done all of the following:
* Reproduced the problem in Safe Mode: http://flight-manual.atom.io/hacking-atom/sections/debugging/#using-safe-mode
* Followed all applicable steps in the debugging guide: http://flight-manual.atom.io/hacking-atom/sections/debugging/
* Checked the FAQs on the message board for common solutions: https://discuss.atom.io/c/faq
* Checked that your issue isn't already filed: https://github.com/issues?utf8=✓&q=is%3Aissue+user%3Aatom
* Checked that there is not already an Atom package that provides the described functionality: https://atom.io/packages
### Description
[Description of the issue]
### Steps to Reproduce
1. [First Step]
2. [Second Step]
3. [and so on...]
**Expected behavior:** [What you expect to happen]
**Actual behavior:** [What actually happens]
**Reproduces how often:** [What percentage of the time does it reproduce?]
### Versions
You can get this information from copy and pasting the output of `atom --version` and `apm --version` from the command line. Also, please include the OS and what version of the OS you're running.
### Additional Information
Any additional information, configuration or data that might be necessary to reproduce the issue.

View File

@ -0,0 +1,48 @@
Copyright (c) 2014 GitHub Inc.
Permission is hereby granted, free of charge, to any person obtaining
a copy of this software and associated documentation files (the
"Software"), to deal in the Software without restriction, including
without limitation the rights to use, copy, modify, merge, publish,
distribute, sublicense, and/or sell copies of the Software, and to
permit persons to whom the Software is furnished to do so, subject to
the following conditions:
The above copyright notice and this permission notice shall be
included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
--------------------------------------------------------------------
This package was derived from a TextMate bundle located at
https://github.com/mmcgrana/textmate-clojure and distributed under the
following license, located in `LICENSE.md`:
The MIT License (MIT)
Copyright (c) 2010- Mark McGranaghan
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.

View File

@ -0,0 +1,28 @@
### Requirements
* Filling out the template is required. Any pull request that does not include enough information to be reviewed in a timely manner may be closed at the maintainers' discretion.
* All new code requires tests to ensure against regressions
### Description of the Change
<!--
We must be able to understand the design of your change from this description. If we can't get a good idea of what the code will be doing from the description here, the pull request may be closed at the maintainers' discretion. Keep in mind that the maintainer reviewing this PR may not be familiar with or have worked with the code here recently, so please walk us through the concepts.
-->
### Alternate Designs
<!-- Explain what other alternates were considered and why the proposed version was selected -->
### Benefits
<!-- What benefits will be realized by the code change? -->
### Possible Drawbacks
<!-- What are the possible side-effects or negative impacts of the code change? -->
### Applicable Issues
<!-- Enter any applicable Issues here -->

View File

@ -0,0 +1,9 @@
# Clojure language support in Atom
[![macOS Build Status](https://travis-ci.org/atom/language-clojure.svg?branch=master)](https://travis-ci.org/atom/language-clojure) [![Build status](https://ci.appveyor.com/api/projects/status/6kd5fs48y5hixde6/branch/master?svg=true)](https://ci.appveyor.com/project/Atom/language-clojure/branch/master) [![Dependency Status](https://david-dm.org/atom/language-clojure.svg)](https://david-dm.org/atom/language-clojure)
Adds syntax highlighting to Clojure files in Atom.
Originally [converted](http://flight-manual.atom.io/hacking-atom/sections/converting-from-textmate)
from the [Clojure TextMate bundle](https://github.com/mmcgrana/textmate-clojure).
Contributions are greatly appreciated. Please fork this repository and open a pull request to add snippets, make grammar tweaks, etc.

View File

@ -0,0 +1,37 @@
{
"max_line_length": {
"level": "ignore"
},
"no_empty_param_list": {
"level": "error"
},
"arrow_spacing": {
"level": "error"
},
"no_interpolation_in_single_quotes": {
"level": "error"
},
"no_debugger": {
"level": "error"
},
"prefer_english_operator": {
"level": "error"
},
"colon_assignment_spacing": {
"spacing": {
"left": 0,
"right": 1
},
"level": "error"
},
"braces_spacing": {
"spaces": 0,
"level": "error"
},
"spacing_after_comma": {
"level": "error"
},
"no_stand_alone_at": {
"level": "error"
}
}

View File

@ -0,0 +1,398 @@
'scopeName': 'source.clojure'
'fileTypes': [
'boot'
'clj'
'clj.hl'
'cljc'
'cljs'
'cljs.hl'
'cljx'
'clojure'
'edn'
'org'
'joke'
'joker'
]
'foldingStartMarker': '\\(\\s*$'
'foldingStopMarker': '^\\s*\\)'
'firstLineMatch': '''(?x)
# Hashbang
^\\#!.*(?:\\s|\\/)
boot
(?:$|\\s)
|
# Modeline
(?i:
# Emacs
-\\*-(?:\\s*(?=[^:;\\s]+\\s*-\\*-)|(?:.*?[;\\s]|(?<=-\\*-))mode\\s*:\\s*)
clojure(script)?
(?=[\\s;]|(?<![-*])-\\*-).*?-\\*-
|
# Vim
(?:(?:\\s|^)vi(?:m[<=>]?\\d+|m)?|\\sex)(?=:(?=\\s*set?\\s[^\\n:]+:)|:(?!\\s*set?\\s))(?:(?:\\s|\\s*:\\s*)\\w*(?:\\s*=(?:[^\\n\\\\\\s]|\\\\.)*)?)*[\\s:](?:filetype|ft|syntax)\\s*=
clojure
(?=\\s|:|$)
)
'''
'name': 'Clojure'
'patterns': [
{
'include': '#comment'
}
{
'include': '#shebang-comment'
}
{
'include': '#quoted-sexp'
}
{
'include': '#sexp'
}
{
'include': '#keyfn'
}
{
'include': '#string'
}
{
'include': '#vector'
}
{
'include': '#set'
}
{
'include': '#map'
}
{
'include': '#regexp'
}
{
'include': '#var'
}
{
'include': '#constants'
}
{
'include': '#dynamic-variables'
}
{
'include': '#metadata'
}
{
'include': '#namespace-symbol'
}
{
'include': '#symbol'
}
]
'repository':
'comment':
# NOTE: This must be kept as a begin/end match for language-todo to work
'begin': '(?<!\\\\);'
'beginCaptures':
'0':
'name': 'punctuation.definition.comment.clojure'
'end': '$'
'name': 'comment.line.semicolon.clojure'
'constants':
'patterns': [
{
'match': '(nil)(?=(\\s|\\)|\\]|\\}))'
'name': 'constant.language.nil.clojure'
}
{
'match': '(true|false)'
'name': 'constant.language.boolean.clojure'
}
{
'match': '(##(?:Inf|-Inf|NaN))'
'name': 'constant.numeric.symbol.clojure'
}
{
'match': '([-+]?\\d+/\\d+)'
'name': 'constant.numeric.ratio.clojure'
}
{
# Only Radixes between 2 and 36 are allowed
'match': '([-+]?(?:(?:3[0-6])|(?:[12]\\d)|[2-9])[rR][0-9A-Za-z]+N?)'
'name': 'constant.numeric.arbitrary-radix.clojure'
}
{
'match': '([-+]?0[xX][0-9a-fA-F]+N?)'
'name': 'constant.numeric.hexadecimal.clojure'
}
{
'match': '([-+]?0[0-7]+N?)'
'name': 'constant.numeric.octal.clojure'
}
{
# The decimal separator is optional only when followed by e, E or M!
'match': '([-+]?[0-9]+(?:(\\.|(?=[eEM]))[0-9]*([eE][-+]?[0-9]+)?)M?)'
'name': 'constant.numeric.double.clojure'
}
{
'match': '([-+]?\\d+N?)'
'name': 'constant.numeric.long.clojure'
}
{ # separating the pattern for reuse
'include': '#keyword'
}
]
'keyword':
'match': '(?<=(\\s|\\(|\\[|\\{)):[\\w\\#\\.\\-\\_\\:\\+\\=\\>\\<\\/\\!\\?\\*]+(?=(\\s|\\)|\\]|\\}|\\,))'
'name': 'constant.keyword.clojure'
'keyfn':
'patterns': [
{
'match': '(?<=(\\s|\\(|\\[|\\{))(if(-[-\\p{Ll}\\?]*)?|when(-[-\\p{Ll}]*)?|for(-[-\\p{Ll}]*)?|cond|do|let(-[-\\p{Ll}\\?]*)?|binding|loop|recur|fn|throw[\\p{Ll}\\-]*|try|catch|finally|([\\p{Ll}]*case))(?=(\\s|\\)|\\]|\\}))'
'name': 'storage.control.clojure'
}
{
'match': '(?<=(\\s|\\(|\\[|\\{))(declare-?|(in-)?ns|import|use|require|load|compile|(def[\\p{Ll}\\-]*))(?=(\\s|\\)|\\]|\\}))'
'name': 'keyword.control.clojure'
}
]
'dynamic-variables':
'match': '\\*[\\w\\.\\-\\_\\:\\+\\=\\>\\<\\!\\?\\d]+\\*'
'name': 'meta.symbol.dynamic.clojure'
'map':
'begin': '(\\{)'
'beginCaptures':
'1':
'name': 'punctuation.section.map.begin.clojure'
'end': '(\\}(?=[\\}\\]\\)\\s]*(?:;|$)))|(\\})'
'endCaptures':
'1':
'name': 'punctuation.section.map.end.trailing.clojure'
'2':
'name': 'punctuation.section.map.end.clojure'
'name': 'meta.map.clojure'
'patterns': [
{
'include': '$self'
}
]
'metadata':
'patterns': [
{
'begin': '(\\^\\{)'
'beginCaptures':
'1':
'name': 'punctuation.section.metadata.map.begin.clojure'
'end': '(\\}(?=[\\}\\]\\)\\s]*(?:;|$)))|(\\})'
'endCaptures':
'1':
'name': 'punctuation.section.metadata.map.end.trailing.clojure'
'2':
'name': 'punctuation.section.metadata.map.end.clojure'
'name': 'meta.metadata.map.clojure'
'patterns': [
{
'include': '$self'
}
]
}
{
'begin': '(\\^)'
'end': '(\\s)'
'name': 'meta.metadata.simple.clojure'
'patterns': [
{
'include': '#keyword'
}
{
'include': '$self'
}
]
}
]
'quoted-sexp':
'begin': '([\'``]\\()'
'beginCaptures':
'1':
'name': 'punctuation.section.expression.begin.clojure'
'end': '(\\))$|(\\)(?=[\\}\\]\\)\\s]*(?:;|$)))|(\\))'
'endCaptures':
'1':
'name': 'punctuation.section.expression.end.trailing.clojure'
'2':
'name': 'punctuation.section.expression.end.trailing.clojure'
'3':
'name': 'punctuation.section.expression.end.clojure'
'name': 'meta.quoted-expression.clojure'
'patterns': [
{
'include': '$self'
}
]
'regexp':
'begin': '#"'
'beginCaptures':
'0':
'name': 'punctuation.definition.regexp.begin.clojure'
'end': '"'
'endCaptures':
'0':
'name': 'punctuation.definition.regexp.end.clojure'
'name': 'string.regexp.clojure'
'patterns': [
{
'include': '#regexp_escaped_char'
}
]
'regexp_escaped_char':
'match': '\\\\.'
'name': 'constant.character.escape.clojure'
'set':
'begin': '(\\#\\{)'
'beginCaptures':
'1':
'name': 'punctuation.section.set.begin.clojure'
'end': '(\\}(?=[\\}\\]\\)\\s]*(?:;|$)))|(\\})'
'endCaptures':
'1':
'name': 'punctuation.section.set.end.trailing.clojure'
'2':
'name': 'punctuation.section.set.end.clojure'
'name': 'meta.set.clojure'
'patterns': [
{
'include': '$self'
}
]
'sexp':
'begin': '(\\()'
'beginCaptures':
'1':
'name': 'punctuation.section.expression.begin.clojure'
'end': '(\\))$|(\\)(?=[\\}\\]\\)\\s]*(?:;|$)))|(\\))'
'endCaptures':
'1':
'name': 'punctuation.section.expression.end.trailing.clojure'
'2':
'name': 'punctuation.section.expression.end.trailing.clojure'
'3':
'name': 'punctuation.section.expression.end.clojure'
'name': 'meta.expression.clojure'
'patterns': [
{
# ns, declare and everything that starts with def* or namespace/def*
'begin': '(?<=\\()(ns|declare|def[\\w\\d._:+=><!?*-]*|[\\w._:+=><!?*-][\\w\\d._:+=><!?*-]*/def[\\w\\d._:+=><!?*-]*)\\s+'
'beginCaptures':
'1':
'name': 'keyword.control.clojure'
'end': '(?=\\))'
'name': 'meta.definition.global.clojure'
'patterns': [
{
# there may be some metadata before an actual definition
'include': '#metadata'
}
{ # dynamic variables are rendered diferently
'include': '#dynamic-variables'
}
{
# recognizing a symbol as being defined here
# copied and pasted from #symbol, screw it
'match': '([\\p{L}\\.\\-\\_\\+\\=\\>\\<\\!\\?\\*][\\w\\.\\-\\_\\:\\+\\=\\>\\<\\!\\?\\*\\d]*)'
'name': 'entity.global.clojure'
}
{
'include': '$self'
}
]
}
{
'include': '#keyfn'
}
{
'include': '#constants'
}
{
'include': '#vector'
}
{
'include': '#map'
}
{
'include': '#set'
}
{
'include': '#sexp'
}
{
'match': '(?<=\\()(.+?)(?=\\s|\\))'
'captures':
'1':
'name': 'entity.name.function.clojure'
'patterns': [
{
'include': '$self'
}
]
}
{
'include': '$self'
}
]
'shebang-comment':
# NOTE: This must be kept as a begin/end match for language-todo to work
'begin': '^(#!)'
'beginCaptures':
'1':
'name': 'punctuation.definition.comment.shebang.clojure'
'end': '$'
'name': 'comment.line.shebang.clojure'
'string':
'begin': '(?<!\\\\)(")'
'beginCaptures':
'1':
'name': 'punctuation.definition.string.begin.clojure'
'end': '(")'
'endCaptures':
'1':
'name': 'punctuation.definition.string.end.clojure'
'name': 'string.quoted.double.clojure'
'patterns': [
{
'match': '\\\\.'
'name': 'constant.character.escape.clojure'
}
]
'namespace-symbol':
'patterns': [
{ # copied from #symbol, plus a / at the end. Matches the "app/" part of
# "app/*config*"
'match': '([\\p{L}\\.\\-\\_\\+\\=\\>\\<\\!\\?\\*][\\w\\.\\-\\_\\:\\+\\=\\>\\<\\!\\?\\*\\d]*)/'
'captures':
'1':
'name': 'meta.symbol.namespace.clojure'
}
]
'symbol':
'patterns': [
{
'match': '([\\p{L}\\.\\-\\_\\+\\=\\>\\<\\!\\?\\*][\\w\\.\\-\\_\\:\\+\\=\\>\\<\\!\\?\\*\\d]*)'
'name': 'meta.symbol.clojure'
}
]
'var':
'match': '(?<=(\\s|\\(|\\[|\\{)\\#)\'[\\w\\.\\-\\_\\:\\+\\=\\>\\<\\/\\!\\?\\*]+(?=(\\s|\\)|\\]|\\}))'
'name': 'meta.var.clojure'
'vector':
'begin': '(\\[)'
'beginCaptures':
'1':
'name': 'punctuation.section.vector.begin.clojure'
'end': '(\\](?=[\\}\\]\\)\\s]*(?:;|$)))|(\\])'
'endCaptures':
'1':
'name': 'punctuation.section.vector.end.trailing.clojure'
'2':
'name': 'punctuation.section.vector.end.clojure'
'name': 'meta.vector.clojure'
'patterns': [
{
'include': '$self'
}
]

View File

@ -0,0 +1,21 @@
{
"name": "language-clojure",
"version": "0.22.8",
"description": "Clojure language support in Atom",
"engines": {
"atom": "*",
"node": "*"
},
"homepage": "http://atom.github.io/language-clojure",
"repository": {
"type": "git",
"url": "https://github.com/atom/language-clojure"
},
"license": "MIT",
"bugs": {
"url": "https://github.com/atom/language-clojure/issues"
},
"devDependencies": {
"coffeelint": "^1.10.1"
}
}

View File

@ -0,0 +1,5 @@
'.source.clojure':
'editor':
'commentStart': '; '
'autocomplete':
'extraWordCharacters': '-'

View File

@ -0,0 +1,111 @@
'.source.clojure':
'ns':
'prefix': 'ns'
'body': """
(ns ${1:name}
(:require [${2:libraries}]))
$0
"""
'def':
'prefix': 'def'
'body': '(def ${1:symbol} ${2:value})'
'defn':
'prefix': 'defn'
'body': """
(defn ${1:name}
[${2:params}]
${3:body})
"""
'fn':
'prefix': 'fn'
'body': """
(fn [${1:params}]
${2:body})$0
"""
'let':
'prefix': 'let'
'body': """
(let [${1:bindings}]
${2:body})
"""
'if':
'prefix': 'if'
'body': """
(if ${1:test}
${2:then}
${3:else})
"""
'if-let':
'prefix': 'ifl'
'body': """
(if-let [${1:bindings}]
${2:then}
${3:else})
"""
'if-not':
'prefix': 'ifn'
'body': """
(if-not ${1:test}
${2:then}
${3:else})
"""
'when':
'prefix': 'when'
'body': """
(when ${1:test}
${2:body})
"""
'when-let':
'prefix': 'whenl'
'body': """
(when-let [${1:bindings}]
${2:body})
"""
'when-not':
'prefix': 'whenn'
'body': """
(when-not ${1:test}
${2:body})
"""
'map':
'prefix': 'map'
'body': '(map $1 $2)'
'map lambda':
'prefix': 'mapl'
'body': '(map #($1) $2)'
'condp':
'prefix': 'condp'
'body': """
(condp ${1:pred} ${2:expr}
$0)
"""
'try':
'prefix': 'try'
'body': """
(try
$1
(catch ${2:exception} e
$3))
"""
'prn':
'prefix': 'prn'
'body': '(prn $1)'
'println':
'prefix': 'prnl'
'body': '(println $1)'

View File

@ -0,0 +1,391 @@
describe "Clojure grammar", ->
grammar = null
beforeEach ->
waitsForPromise ->
atom.packages.activatePackage("language-clojure")
runs ->
grammar = atom.grammars.grammarForScopeName("source.clojure")
it "parses the grammar", ->
expect(grammar).toBeDefined()
expect(grammar.scopeName).toBe "source.clojure"
it "tokenizes semicolon comments", ->
{tokens} = grammar.tokenizeLine "; clojure"
expect(tokens[0]).toEqual value: ";", scopes: ["source.clojure", "comment.line.semicolon.clojure", "punctuation.definition.comment.clojure"]
expect(tokens[1]).toEqual value: " clojure", scopes: ["source.clojure", "comment.line.semicolon.clojure"]
it "does not tokenize escaped semicolons as comments", ->
{tokens} = grammar.tokenizeLine "\\; clojure"
expect(tokens[0]).toEqual value: "\\; ", scopes: ["source.clojure"]
expect(tokens[1]).toEqual value: "clojure", scopes: ["source.clojure", "meta.symbol.clojure"]
it "tokenizes shebang comments", ->
{tokens} = grammar.tokenizeLine "#!/usr/bin/env clojure"
expect(tokens[0]).toEqual value: "#!", scopes: ["source.clojure", "comment.line.shebang.clojure", "punctuation.definition.comment.shebang.clojure"]
expect(tokens[1]).toEqual value: "/usr/bin/env clojure", scopes: ["source.clojure", "comment.line.shebang.clojure"]
it "tokenizes strings", ->
{tokens} = grammar.tokenizeLine '"foo bar"'
expect(tokens[0]).toEqual value: '"', scopes: ["source.clojure", "string.quoted.double.clojure", "punctuation.definition.string.begin.clojure"]
expect(tokens[1]).toEqual value: 'foo bar', scopes: ["source.clojure", "string.quoted.double.clojure"]
expect(tokens[2]).toEqual value: '"', scopes: ["source.clojure", "string.quoted.double.clojure", "punctuation.definition.string.end.clojure"]
it "tokenizes character escape sequences", ->
{tokens} = grammar.tokenizeLine '"\\n"'
expect(tokens[0]).toEqual value: '"', scopes: ["source.clojure", "string.quoted.double.clojure", "punctuation.definition.string.begin.clojure"]
expect(tokens[1]).toEqual value: '\\n', scopes: ["source.clojure", "string.quoted.double.clojure", "constant.character.escape.clojure"]
expect(tokens[2]).toEqual value: '"', scopes: ["source.clojure", "string.quoted.double.clojure", "punctuation.definition.string.end.clojure"]
it "tokenizes regexes", ->
{tokens} = grammar.tokenizeLine '#"foo"'
expect(tokens[0]).toEqual value: '#"', scopes: ["source.clojure", "string.regexp.clojure", "punctuation.definition.regexp.begin.clojure"]
expect(tokens[1]).toEqual value: 'foo', scopes: ["source.clojure", "string.regexp.clojure"]
expect(tokens[2]).toEqual value: '"', scopes: ["source.clojure", "string.regexp.clojure", "punctuation.definition.regexp.end.clojure"]
it "tokenizes backslash escape character in regexes", ->
{tokens} = grammar.tokenizeLine '#"\\\\" "/"'
expect(tokens[0]).toEqual value: '#"', scopes: ["source.clojure", "string.regexp.clojure", "punctuation.definition.regexp.begin.clojure"]
expect(tokens[1]).toEqual value: "\\\\", scopes: ['source.clojure', 'string.regexp.clojure', 'constant.character.escape.clojure']
expect(tokens[2]).toEqual value: '"', scopes: ['source.clojure', 'string.regexp.clojure', "punctuation.definition.regexp.end.clojure"]
expect(tokens[4]).toEqual value: '"', scopes: ['source.clojure', 'string.quoted.double.clojure', 'punctuation.definition.string.begin.clojure']
expect(tokens[5]).toEqual value: "/", scopes: ['source.clojure', 'string.quoted.double.clojure']
expect(tokens[6]).toEqual value: '"', scopes: ['source.clojure', 'string.quoted.double.clojure', 'punctuation.definition.string.end.clojure']
it "tokenizes escaped double quote in regexes", ->
{tokens} = grammar.tokenizeLine '#"\\""'
expect(tokens[0]).toEqual value: '#"', scopes: ["source.clojure", "string.regexp.clojure", "punctuation.definition.regexp.begin.clojure"]
expect(tokens[1]).toEqual value: '\\"', scopes: ['source.clojure', 'string.regexp.clojure', 'constant.character.escape.clojure']
expect(tokens[2]).toEqual value: '"', scopes: ['source.clojure', 'string.regexp.clojure', "punctuation.definition.regexp.end.clojure"]
it "tokenizes numerics", ->
numbers =
"constant.numeric.ratio.clojure": ["1/2", "123/456", "+0/2", "-23/1"]
"constant.numeric.arbitrary-radix.clojure": ["2R1011", "16rDEADBEEF", "16rDEADBEEFN", "36rZebra"]
"constant.numeric.hexadecimal.clojure": ["0xDEADBEEF", "0XDEADBEEF", "0xDEADBEEFN", "0x0"]
"constant.numeric.octal.clojure": ["0123", "0123N", "00"]
"constant.numeric.double.clojure": ["123.45", "123.45e6", "123.45E6", "123.456M", "42.", "42.M", "42E+9M", "42E-0", "0M", "+0M", "42.E-23M"]
"constant.numeric.long.clojure": ["123", "12321", "123N", "+123N", "-123", "0"]
"constant.numeric.symbol.clojure": ["##Inf", "##-Inf", "##NaN"]
for scope, nums of numbers
for num in nums
{tokens} = grammar.tokenizeLine num
expect(tokens[0]).toEqual value: num, scopes: ["source.clojure", scope]
it "tokenizes booleans", ->
booleans =
"constant.language.boolean.clojure": ["true", "false"]
for scope, bools of booleans
for bool in bools
{tokens} = grammar.tokenizeLine bool
expect(tokens[0]).toEqual value: bool, scopes: ["source.clojure", scope]
it "tokenizes nil", ->
{tokens} = grammar.tokenizeLine "nil"
expect(tokens[0]).toEqual value: "nil", scopes: ["source.clojure", "constant.language.nil.clojure"]
it "tokenizes keywords", ->
tests =
"meta.expression.clojure": ["(:foo)"]
"meta.map.clojure": ["{:foo}"]
"meta.vector.clojure": ["[:foo]"]
"meta.quoted-expression.clojure": ["'(:foo)", "`(:foo)"]
for metaScope, lines of tests
for line in lines
{tokens} = grammar.tokenizeLine line
expect(tokens[1]).toEqual value: ":foo", scopes: ["source.clojure", metaScope, "constant.keyword.clojure"]
{tokens} = grammar.tokenizeLine "(def foo :bar)"
expect(tokens[5]).toEqual value: ":bar", scopes: ["source.clojure", "meta.expression.clojure", "meta.definition.global.clojure", "constant.keyword.clojure"]
# keywords can start with an uppercase non-ASCII letter
{tokens} = grammar.tokenizeLine "(def foo :Öπ)"
expect(tokens[5]).toEqual value: ":Öπ", scopes: ["source.clojure", "meta.expression.clojure", "meta.definition.global.clojure", "constant.keyword.clojure"]
it "tokenizes keyfns (keyword control)", ->
keyfns = ["declare", "declare-", "ns", "in-ns", "import", "use", "require", "load", "compile", "def", "defn", "defn-", "defmacro", "defåπç"]
for keyfn in keyfns
{tokens} = grammar.tokenizeLine "(#{keyfn})"
expect(tokens[1]).toEqual value: keyfn, scopes: ["source.clojure", "meta.expression.clojure", "keyword.control.clojure"]
it "tokenizes keyfns (storage control)", ->
keyfns = ["if", "when", "for", "cond", "do", "let", "binding", "loop", "recur", "fn", "throw", "try", "catch", "finally", "case"]
for keyfn in keyfns
{tokens} = grammar.tokenizeLine "(#{keyfn})"
expect(tokens[1]).toEqual value: keyfn, scopes: ["source.clojure", "meta.expression.clojure", "storage.control.clojure"]
it "tokenizes global definitions", ->
macros = ["ns", "declare", "def", "defn", "defn-", "defroutes", "compojure/defroutes", "rum.core/defc123-", "some.nested-ns/def-nested->symbol!?*", "def+!.?abc8:<>", "ns/def+!.?abc8:<>", "ns/defåÄÖπç"]
for macro in macros
{tokens} = grammar.tokenizeLine "(#{macro} foo 'bar)"
expect(tokens[1]).toEqual value: macro, scopes: ["source.clojure", "meta.expression.clojure", "meta.definition.global.clojure", "keyword.control.clojure"]
expect(tokens[3]).toEqual value: "foo", scopes: ["source.clojure", "meta.expression.clojure", "meta.definition.global.clojure", "entity.global.clojure"]
it "tokenizes dynamic variables", ->
mutables = ["*ns*", "*foo-bar*", "*åÄÖπç*"]
for mutable in mutables
{tokens} = grammar.tokenizeLine mutable
expect(tokens[0]).toEqual value: mutable, scopes: ["source.clojure", "meta.symbol.dynamic.clojure"]
it "tokenizes metadata", ->
{tokens} = grammar.tokenizeLine "^Foo"
expect(tokens[0]).toEqual value: "^", scopes: ["source.clojure", "meta.metadata.simple.clojure"]
expect(tokens[1]).toEqual value: "Foo", scopes: ["source.clojure", "meta.metadata.simple.clojure", "meta.symbol.clojure"]
# non-ASCII letters
{tokens} = grammar.tokenizeLine "^Öπ"
expect(tokens[0]).toEqual value: "^", scopes: ["source.clojure", "meta.metadata.simple.clojure"]
expect(tokens[1]).toEqual value: "Öπ", scopes: ["source.clojure", "meta.metadata.simple.clojure", "meta.symbol.clojure"]
{tokens} = grammar.tokenizeLine "^{:foo true}"
expect(tokens[0]).toEqual value: "^{", scopes: ["source.clojure", "meta.metadata.map.clojure", "punctuation.section.metadata.map.begin.clojure"]
expect(tokens[1]).toEqual value: ":foo", scopes: ["source.clojure", "meta.metadata.map.clojure", "constant.keyword.clojure"]
expect(tokens[2]).toEqual value: " ", scopes: ["source.clojure", "meta.metadata.map.clojure"]
expect(tokens[3]).toEqual value: "true", scopes: ["source.clojure", "meta.metadata.map.clojure", "constant.language.boolean.clojure"]
expect(tokens[4]).toEqual value: "}", scopes: ["source.clojure", "meta.metadata.map.clojure", "punctuation.section.metadata.map.end.trailing.clojure"]
it "tokenizes functions", ->
expressions = ["(foo)", "(foo 1 10)"]
for expr in expressions
{tokens} = grammar.tokenizeLine expr
expect(tokens[1]).toEqual value: "foo", scopes: ["source.clojure", "meta.expression.clojure", "entity.name.function.clojure"]
#non-ASCII letters
{tokens} = grammar.tokenizeLine "(Öπ 2 20)"
expect(tokens[1]).toEqual value: "Öπ", scopes: ["source.clojure", "meta.expression.clojure", "entity.name.function.clojure"]
it "tokenizes vars", ->
{tokens} = grammar.tokenizeLine "(func #'foo)"
expect(tokens[2]).toEqual value: " #", scopes: ["source.clojure", "meta.expression.clojure"]
expect(tokens[3]).toEqual value: "'foo", scopes: ["source.clojure", "meta.expression.clojure", "meta.var.clojure"]
# non-ASCII letters
{tokens} = grammar.tokenizeLine "(func #'Öπ)"
expect(tokens[2]).toEqual value: " #", scopes: ["source.clojure", "meta.expression.clojure"]
expect(tokens[3]).toEqual value: "'Öπ", scopes: ["source.clojure", "meta.expression.clojure", "meta.var.clojure"]
it "tokenizes symbols", ->
{tokens} = grammar.tokenizeLine "x"
expect(tokens[0]).toEqual value: "x", scopes: ["source.clojure", "meta.symbol.clojure"]
# non-ASCII letters
{tokens} = grammar.tokenizeLine "Öπ"
expect(tokens[0]).toEqual value: "Öπ", scopes: ["source.clojure", "meta.symbol.clojure"]
# Should not be tokenized as a symbol
{tokens} = grammar.tokenizeLine "1foobar"
expect(tokens[0]).toEqual value: "1", scopes: ["source.clojure", "constant.numeric.long.clojure"]
it "tokenizes namespaces", ->
{tokens} = grammar.tokenizeLine "foo/bar"
expect(tokens[0]).toEqual value: "foo", scopes: ["source.clojure", "meta.symbol.namespace.clojure"]
expect(tokens[1]).toEqual value: "/", scopes: ["source.clojure"]
expect(tokens[2]).toEqual value: "bar", scopes: ["source.clojure", "meta.symbol.clojure"]
# non-ASCII letters
{tokens} = grammar.tokenizeLine "Öπ/Åä"
expect(tokens[0]).toEqual value: "Öπ", scopes: ["source.clojure", "meta.symbol.namespace.clojure"]
expect(tokens[1]).toEqual value: "/", scopes: ["source.clojure"]
expect(tokens[2]).toEqual value: "Åä", scopes: ["source.clojure", "meta.symbol.clojure"]
testMetaSection = (metaScope, puncScope, startsWith, endsWith) ->
# Entire expression on one line.
{tokens} = grammar.tokenizeLine "#{startsWith}foo, bar#{endsWith}"
[start, mid..., end] = tokens
expect(start).toEqual value: startsWith, scopes: ["source.clojure", "meta.#{metaScope}.clojure", "punctuation.section.#{puncScope}.begin.clojure"]
expect(end).toEqual value: endsWith, scopes: ["source.clojure", "meta.#{metaScope}.clojure", "punctuation.section.#{puncScope}.end.trailing.clojure"]
for token in mid
expect(token.scopes.slice(0, 2)).toEqual ["source.clojure", "meta.#{metaScope}.clojure"]
# Expression broken over multiple lines.
tokens = grammar.tokenizeLines("#{startsWith}foo\n bar#{endsWith}")
[start, mid..., after] = tokens[0]
expect(start).toEqual value: startsWith, scopes: ["source.clojure", "meta.#{metaScope}.clojure", "punctuation.section.#{puncScope}.begin.clojure"]
for token in mid
expect(token.scopes.slice(0, 2)).toEqual ["source.clojure", "meta.#{metaScope}.clojure"]
[mid..., end] = tokens[1]
expect(end).toEqual value: endsWith, scopes: ["source.clojure", "meta.#{metaScope}.clojure", "punctuation.section.#{puncScope}.end.trailing.clojure"]
for token in mid
expect(token.scopes.slice(0, 2)).toEqual ["source.clojure", "meta.#{metaScope}.clojure"]
it "tokenizes expressions", ->
testMetaSection "expression", "expression", "(", ")"
it "tokenizes quoted expressions", ->
testMetaSection "quoted-expression", "expression", "'(", ")"
testMetaSection "quoted-expression", "expression", "`(", ")"
it "tokenizes vectors", ->
testMetaSection "vector", "vector", "[", "]"
it "tokenizes maps", ->
testMetaSection "map", "map", "{", "}"
it "tokenizes sets", ->
testMetaSection "set", "set", "\#{", "}"
it "tokenizes functions in nested sexp", ->
{tokens} = grammar.tokenizeLine "((foo bar) baz)"
expect(tokens[0]).toEqual value: "(", scopes: ["source.clojure", "meta.expression.clojure", "punctuation.section.expression.begin.clojure"]
expect(tokens[1]).toEqual value: "(", scopes: ["source.clojure", "meta.expression.clojure", "meta.expression.clojure", "punctuation.section.expression.begin.clojure"]
expect(tokens[2]).toEqual value: "foo", scopes: ["source.clojure", "meta.expression.clojure", "meta.expression.clojure", "entity.name.function.clojure"]
expect(tokens[3]).toEqual value: " ", scopes: ["source.clojure", "meta.expression.clojure", "meta.expression.clojure"]
expect(tokens[4]).toEqual value: "bar", scopes: ["source.clojure", "meta.expression.clojure", "meta.expression.clojure", "meta.symbol.clojure"]
expect(tokens[5]).toEqual value: ")", scopes: ["source.clojure", "meta.expression.clojure", "meta.expression.clojure", "punctuation.section.expression.end.clojure"]
expect(tokens[6]).toEqual value: " ", scopes: ["source.clojure", "meta.expression.clojure"]
expect(tokens[7]).toEqual value: "baz", scopes: ["source.clojure", "meta.expression.clojure", "meta.symbol.clojure"]
expect(tokens[8]).toEqual value: ")", scopes: ["source.clojure", "meta.expression.clojure", "punctuation.section.expression.end.trailing.clojure"]
it "tokenizes maps used as functions", ->
{tokens} = grammar.tokenizeLine "({:foo bar} :foo)"
expect(tokens[0]).toEqual value: "(", scopes: ["source.clojure", "meta.expression.clojure", "punctuation.section.expression.begin.clojure"]
expect(tokens[1]).toEqual value: "{", scopes: ["source.clojure", "meta.expression.clojure", "meta.map.clojure", "punctuation.section.map.begin.clojure"]
expect(tokens[2]).toEqual value: ":foo", scopes: ["source.clojure", "meta.expression.clojure", "meta.map.clojure", "constant.keyword.clojure"]
expect(tokens[3]).toEqual value: " ", scopes: ["source.clojure", "meta.expression.clojure", "meta.map.clojure"]
expect(tokens[4]).toEqual value: "bar", scopes: ["source.clojure", "meta.expression.clojure", "meta.map.clojure", "meta.symbol.clojure"]
expect(tokens[5]).toEqual value: "}", scopes: ["source.clojure", "meta.expression.clojure", "meta.map.clojure", "punctuation.section.map.end.clojure"]
expect(tokens[6]).toEqual value: " ", scopes: ["source.clojure", "meta.expression.clojure"]
expect(tokens[7]).toEqual value: ":foo", scopes: ["source.clojure", "meta.expression.clojure", "constant.keyword.clojure"]
expect(tokens[8]).toEqual value: ")", scopes: ["source.clojure", "meta.expression.clojure", "punctuation.section.expression.end.trailing.clojure"]
it "tokenizes sets used in functions", ->
{tokens} = grammar.tokenizeLine "(\#{:foo :bar})"
expect(tokens[0]).toEqual value: "(", scopes: ["source.clojure", "meta.expression.clojure", "punctuation.section.expression.begin.clojure"]
expect(tokens[1]).toEqual value: "\#{", scopes: ["source.clojure", "meta.expression.clojure", "meta.set.clojure", "punctuation.section.set.begin.clojure"]
expect(tokens[2]).toEqual value: ":foo", scopes: ["source.clojure", "meta.expression.clojure", "meta.set.clojure", "constant.keyword.clojure"]
expect(tokens[3]).toEqual value: " ", scopes: ["source.clojure", "meta.expression.clojure", "meta.set.clojure"]
expect(tokens[4]).toEqual value: ":bar", scopes: ["source.clojure", "meta.expression.clojure", "meta.set.clojure", "constant.keyword.clojure"]
expect(tokens[5]).toEqual value: "}", scopes: ["source.clojure", "meta.expression.clojure", "meta.set.clojure", "punctuation.section.set.end.trailing.clojure"]
expect(tokens[6]).toEqual value: ")", scopes: ["source.clojure", "meta.expression.clojure", "punctuation.section.expression.end.trailing.clojure"]
describe "firstLineMatch", ->
it "recognises interpreter directives", ->
valid = """
#!/usr/sbin/boot foo
#!/usr/bin/boot foo=bar/
#!/usr/sbin/boot
#!/usr/sbin/boot foo bar baz
#!/usr/bin/boot perl
#!/usr/bin/boot bin/perl
#!/usr/bin/boot
#!/bin/boot
#!/usr/bin/boot --script=usr/bin
#! /usr/bin/env A=003 B=149 C=150 D=xzd E=base64 F=tar G=gz H=head I=tail boot
#!\t/usr/bin/env --foo=bar boot --quu=quux
#! /usr/bin/boot
#!/usr/bin/env boot
"""
for line in valid.split /\n/
expect(grammar.firstLineRegex.scanner.findNextMatchSync(line)).not.toBeNull()
invalid = """
\x20#!/usr/sbin/boot
\t#!/usr/sbin/boot
#!/usr/bin/env-boot/node-env/
#!/usr/bin/das-boot
#! /usr/binboot
#!\t/usr/bin/env --boot=bar
"""
for line in invalid.split /\n/
expect(grammar.firstLineRegex.scanner.findNextMatchSync(line)).toBeNull()
it "recognises Emacs modelines", ->
valid = """
#-*- Clojure -*-
#-*- mode: ClojureScript -*-
/* -*-clojureScript-*- */
// -*- Clojure -*-
/* -*- mode:Clojure -*- */
// -*- font:bar;mode:Clojure -*-
// -*- font:bar;mode:Clojure;foo:bar; -*-
// -*-font:mode;mode:Clojure-*-
// -*- foo:bar mode: clojureSCRIPT bar:baz -*-
" -*-foo:bar;mode:clojure;bar:foo-*- ";
" -*-font-mode:foo;mode:clojure;foo-bar:quux-*-"
"-*-font:x;foo:bar; mode : clojure; bar:foo;foooooo:baaaaar;fo:ba;-*-";
"-*- font:x;foo : bar ; mode : ClojureScript ; bar : foo ; foooooo:baaaaar;fo:ba-*-";
"""
for line in valid.split /\n/
expect(grammar.firstLineRegex.scanner.findNextMatchSync(line)).not.toBeNull()
invalid = """
/* --*clojure-*- */
/* -*-- clojure -*-
/* -*- -- Clojure -*-
/* -*- Clojure -;- -*-
// -*- iClojure -*-
// -*- Clojure; -*-
// -*- clojure-door -*-
/* -*- model:clojure -*-
/* -*- indent-mode:clojure -*-
// -*- font:mode;Clojure -*-
// -*- mode: -*- Clojure
// -*- mode: das-clojure -*-
// -*-font:mode;mode:clojure--*-
"""
for line in invalid.split /\n/
expect(grammar.firstLineRegex.scanner.findNextMatchSync(line)).toBeNull()
it "recognises Vim modelines", ->
valid = """
vim: se filetype=clojure:
# vim: se ft=clojure:
# vim: set ft=Clojure:
# vim: set filetype=Clojure:
# vim: ft=Clojure
# vim: syntax=Clojure
# vim: se syntax=Clojure:
# ex: syntax=Clojure
# vim:ft=clojure
# vim600: ft=clojure
# vim>600: set ft=clojure:
# vi:noai:sw=3 ts=6 ft=clojure
# vi::::::::::noai:::::::::::: ft=clojure
# vim:ts=4:sts=4:sw=4:noexpandtab:ft=clojure
# vi:: noai : : : : sw =3 ts =6 ft =clojure
# vim: ts=4: pi sts=4: ft=clojure: noexpandtab: sw=4:
# vim: ts=4 sts=4: ft=clojure noexpandtab:
# vim:noexpandtab sts=4 ft=clojure ts=4
# vim:noexpandtab:ft=clojure
# vim:ts=4:sts=4 ft=clojure:noexpandtab:\x20
# vim:noexpandtab titlestring=hi\|there\\\\ ft=clojure ts=4
"""
for line in valid.split /\n/
expect(grammar.firstLineRegex.scanner.findNextMatchSync(line)).not.toBeNull()
invalid = """
ex: se filetype=clojure:
_vi: se filetype=clojure:
vi: se filetype=clojure
# vim set ft=klojure
# vim: soft=clojure
# vim: clean-syntax=clojure:
# vim set ft=clojure:
# vim: setft=clojure:
# vim: se ft=clojure backupdir=tmp
# vim: set ft=clojure set cmdheight=1
# vim:noexpandtab sts:4 ft:clojure ts:4
# vim:noexpandtab titlestring=hi\\|there\\ ft=clojure ts=4
# vim:noexpandtab titlestring=hi\\|there\\\\\\ ft=clojure ts=4
"""
for line in invalid.split /\n/
expect(grammar.firstLineRegex.scanner.findNextMatchSync(line)).toBeNull()

View File

@ -0,0 +1,15 @@
# Configuration for probot-no-response - https://github.com/probot/no-response
# Number of days of inactivity before an issue is closed for lack of response
daysUntilClose: 28
# Label requiring a response
responseRequiredLabel: more-information-needed
# Comment to post when closing an issue for lack of response. Set to `false` to disable.
closeComment: >
This issue has been automatically closed because there has been no response
to our request for more information from the original author. With only the
information that is currently in the issue, we don't have enough information
to take action. Please reach out if you have or find the answers we need so
that we can investigate further.

View File

@ -0,0 +1,23 @@
name: CI
on: [push]
env:
CI: true
jobs:
Test:
strategy:
matrix:
os: [ubuntu-latest, macos-latest, windows-latest]
channel: [stable, beta]
runs-on: ${{ matrix.os }}
steps:
- uses: actions/checkout@v1
- uses: UziTech/action-setup-atom@v2
with:
version: ${{ matrix.channel }}
- name: Install dependencies
run: apm install
- name: Run tests
run: atom --test spec

View File

@ -0,0 +1,2 @@
*.cache
node_modules

View File

@ -0,0 +1 @@
See the [Atom contributing guide](https://github.com/atom/atom/blob/master/CONTRIBUTING.md)

View File

@ -0,0 +1,40 @@
<!--
Have you read Atom's Code of Conduct? By filing an Issue, you are expected to comply with it, including treating everyone with respect: https://github.com/atom/atom/blob/master/CODE_OF_CONDUCT.md
Do you want to ask a question? Are you looking for support? The Atom message board is the best place for getting support: https://discuss.atom.io
-->
### Prerequisites
* [ ] Put an X between the brackets on this line if you have done all of the following:
* Reproduced the problem in Safe Mode: http://flight-manual.atom.io/hacking-atom/sections/debugging/#using-safe-mode
* Followed all applicable steps in the debugging guide: http://flight-manual.atom.io/hacking-atom/sections/debugging/
* Checked the FAQs on the message board for common solutions: https://discuss.atom.io/c/faq
* Checked that your issue isn't already filed: https://github.com/issues?utf8=✓&q=is%3Aissue+user%3Aatom
* Checked that there is not already an Atom package that provides the described functionality: https://atom.io/packages
### Description
[Description of the issue]
### Steps to Reproduce
1. [First Step]
2. [Second Step]
3. [and so on...]
**Expected behavior:** [What you expect to happen]
**Actual behavior:** [What actually happens]
**Reproduces how often:** [What percentage of the time does it reproduce?]
### Versions
You can get this information from copy and pasting the output of `atom --version` and `apm --version` from the command line. Also, please include the OS and what version of the OS you're running.
### Additional Information
Any additional information, configuration or data that might be necessary to reproduce the issue.

View File

@ -0,0 +1,49 @@
Copyright (c) 2014 GitHub Inc.
Permission is hereby granted, free of charge, to any person obtaining
a copy of this software and associated documentation files (the
"Software"), to deal in the Software without restriction, including
without limitation the rights to use, copy, modify, merge, publish,
distribute, sublicense, and/or sell copies of the Software, and to
permit persons to whom the Software is furnished to do so, subject to
the following conditions:
The above copyright notice and this permission notice shall be
included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
--------------------------------------------------------------------
This package was derived from a TextMate bundle located at
https://github.com/jashkenas/coffee-script-tmbundle and distributed under the
following license, located in `LICENSE`:
Copyright (c) 2009-2014 Jeremy Ashkenas
Permission is hereby granted, free of charge, to any person
obtaining a copy of this software and associated documentation
files (the "Software"), to deal in the Software without
restriction, including without limitation the rights to use,
copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the
Software is furnished to do so, subject to the following
conditions:
The above copyright notice and this permission notice shall be
included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
OTHER DEALINGS IN THE SOFTWARE.

View File

@ -0,0 +1,28 @@
### Requirements
* Filling out the template is required. Any pull request that does not include enough information to be reviewed in a timely manner may be closed at the maintainers' discretion.
* All new code requires tests to ensure against regressions
### Description of the Change
<!--
We must be able to understand the design of your change from this description. If we can't get a good idea of what the code will be doing from the description here, the pull request may be closed at the maintainers' discretion. Keep in mind that the maintainer reviewing this PR may not be familiar with or have worked with the code here recently, so please walk us through the concepts.
-->
### Alternate Designs
<!-- Explain what other alternates were considered and why the proposed version was selected -->
### Benefits
<!-- What benefits will be realized by the code change? -->
### Possible Drawbacks
<!-- What are the possible side-effects or negative impacts of the code change? -->
### Applicable Issues
<!-- Enter any applicable Issues here -->

View File

@ -0,0 +1,9 @@
# CoffeeScript language support in Atom
[![macOS Build Status](https://travis-ci.org/atom/language-coffee-script.svg?branch=master)](https://travis-ci.org/atom/language-coffee-script)
[![Windows Build status](https://ci.appveyor.com/api/projects/status/4j9aak7iwn2f2x7a/branch/master?svg=true)](https://ci.appveyor.com/project/Atom/language-coffee-script/branch/master) [![Dependency Status](https://david-dm.org/atom/language-coffee-script.svg)](https://david-dm.org/atom/language-coffee-script)
Adds syntax highlighting and snippets to CoffeeScript files in Atom.
Originally [converted](http://flight-manual.atom.io/hacking-atom/sections/converting-from-textmate) from the [CoffeeScript TextMate bundle](https://github.com/jashkenas/coffee-script-tmbundle).
Contributions are greatly appreciated. Please fork this repository and open a pull request to add snippets, make grammar tweaks, etc.

View File

@ -0,0 +1,37 @@
{
"max_line_length": {
"level": "ignore"
},
"no_empty_param_list": {
"level": "error"
},
"arrow_spacing": {
"level": "error"
},
"no_interpolation_in_single_quotes": {
"level": "error"
},
"no_debugger": {
"level": "error"
},
"prefer_english_operator": {
"level": "error"
},
"colon_assignment_spacing": {
"spacing": {
"left": 0,
"right": 1
},
"level": "error"
},
"braces_spacing": {
"spaces": 0,
"level": "error"
},
"spacing_after_comma": {
"level": "error"
},
"no_stand_alone_at": {
"level": "error"
}
}

View File

@ -0,0 +1,724 @@
'comment': 'CoffeeScript (Literate)'
'fileTypes': [
'litcoffee'
'litcoffee.erb'
'coffee.md'
]
'name': 'CoffeeScript (Literate)'
'scopeName': 'source.litcoffee'
'firstLineMatch': '''(?x)
# Hashbang
^\\#!.*(?:\\s|\\/)
coffee(?:\\s.+?)?\\s(?:-l|--literate)
(?:\\s|$)
|
# Modeline
(?i:
# Emacs
-\\*-(?:\\s*(?=[^:;\\s]+\\s*-\\*-)|(?:.*?[;\\s]|(?<=-\\*-))mode\\s*:\\s*)
litcoffee
(?=[\\s;]|(?<![-*])-\\*-).*?-\\*-
|
# Vim
(?:(?:\\s|^)vi(?:m[<=>]?\\d+|m)?|\\sex)(?=:(?=\\s*set?\\s[^\\n:]+:)|:(?!\\s*set?\\s))(?:(?:\\s|\\s*:\\s*)\\w*(?:\\s*=(?:[^\\n\\\\\\s]|\\\\.)*)?)*[\\s:](?:filetype|ft|syntax)\\s*=
litcoffee
(?=\\s|:|$)
)
'''
'patterns': [
{
'begin': '^(?=([ ]{4}|\\t)(?!$))'
'end': '^(?!([ ]{4}|\\t))'
'name': 'markup.raw.block.markdown'
'patterns': [
{
'include': '#block_raw'
}
]
}
{
'begin': '''
(?x)^
(?= [ ]{0,3}>.
| [#]{1,6}\\s*+
| [ ]{0,3}(?<marker>[-*_])([ ]{0,2}\\k<marker>){2,}[ \\t]*+$
)
'''
'comment': 'We could also use an empty end match and set applyEndPatternLast, but then we must be sure that the begin pattern will only match stuff matched by the sub-patterns.'
'end': '''
(?x)^
(?! [ ]{0,3}>.
| [#]{1,6}\\s*+
| [ ]{0,3}(?<marker>[-*_])([ ]{0,2}\\k<marker>){2,}[ \\t]*+$
)
'''
'name': 'meta.block-level.markdown'
'patterns': [
{
'include': '#block_quote'
}
{
'include': '#heading'
}
{
'include': '#separator'
}
]
}
{
'begin': '^[ ]{0,3}([*+-])(?=\\s)'
'captures':
'1':
'name': 'punctuation.definition.list_item.markdown'
'end': '^(?=\\S|[ ]{4,})|(?!\\G)'
'name': 'markup.list.unnumbered.markdown'
'patterns': [
{
'include': '#list-paragraph'
}
]
}
{
'begin': '^[ ]{0,3}([0-9]+\\.)(?=\\s)'
'captures':
'1':
'name': 'punctuation.definition.list_item.markdown'
'end': '^(?=\\S|[ ]{4,})|(?!\\G)'
'name': 'markup.list.numbered.markdown'
'patterns': [
{
'include': '#list-paragraph'
}
]
}
{
'begin': '^(?=<(p|div|h[1-6]|blockquote|pre|table|dl|ol|ul|script|noscript|form|fieldset|iframe|math|ins|del)\\b)(?!.*?</\\1>)'
'comment': 'Markdown formatting is disabled inside block-level tags.'
'end': '(?<=^</\\1>$\\n)'
'name': 'meta.disable-markdown'
'patterns': [
{
'include': 'text.html.basic'
}
]
}
{
'begin': '^(?=<(p|div|h[1-6]|blockquote|pre|table|dl|ol|ul|script|noscript|form|fieldset|iframe|math|ins|del)\\b)'
'comment': 'Same rule but for one line disables.'
'end': '$\\n?'
'name': 'meta.disable-markdown'
'patterns': [
{
'include': 'text.html.basic'
}
]
}
{
'captures':
'1':
'name': 'punctuation.definition.constant.markdown'
'2':
'name': 'constant.other.reference.link.markdown'
'3':
'name': 'punctuation.definition.constant.markdown'
'4':
'name': 'punctuation.separator.key-value.markdown'
'5':
'name': 'punctuation.definition.link.markdown'
'6':
'name': 'markup.underline.link.markdown'
'7':
'name': 'punctuation.definition.link.markdown'
'8':
'name': 'string.other.link.description.title.markdown'
'9':
'name': 'punctuation.definition.string.begin.markdown'
'10':
'name': 'punctuation.definition.string.end.markdown'
'11':
'name': 'string.other.link.description.title.markdown'
'12':
'name': 'punctuation.definition.string.begin.markdown'
'13':
'name': 'punctuation.definition.string.end.markdown'
'match': '''
(?x)
\\s* # Leading whitespace
(\\[)(.+?)(\\])(:) # Reference name
[ \\t]* # Optional whitespace
(<?)(\\S+?)(>?) # The url
[ \\t]* # Optional whitespace
(?:
((\\().+?(\\))) # Match title in quotes…
| ((").+?(")) # or in parens.
)? # Title is optional
\\s* # Optional whitespace
$
'''
'name': 'meta.link.reference.def.markdown'
}
{
'begin': '^(?=\\S)(?![=-]{3,}(?=$))'
'end': '^(?:\\s*$|(?=[ ]{0,3}>.))|(?=[ \\t]*\\n)(?<=^===|^====|=====|^---|^----|-----)[ \\t]*\\n|(?=^#)'
'name': 'meta.paragraph.markdown'
'patterns': [
{
'include': '#inline'
}
{
'include': 'text.html.basic'
}
{
'captures':
'1':
'name': 'punctuation.definition.heading.markdown'
'match': '^(={3,})(?=[ \\t]*$)'
'name': 'markup.heading.1.markdown'
}
{
'captures':
'1':
'name': 'punctuation.definition.heading.markdown'
'match': '^(-{3,})(?=[ \\t]*$)'
'name': 'markup.heading.2.markdown'
}
]
}
]
'repository':
'ampersand':
'comment': 'Markdown will convert this for us. We match it so that the HTML grammar will not mark it up as invalid.'
'match': '&(?!([a-zA-Z0-9]+|#[0-9]+|#x[0-9a-fA-F]+);)'
'name': 'meta.other.valid-ampersand.markdown'
'block_quote':
'begin': '\\G[ ]{0,3}(>)(?!$)[ ]?'
'beginCaptures':
'1':
'name': 'punctuation.definition.blockquote.markdown'
'comment': ' We terminate the block quote when seeing an empty line, a separator or a line with leading > characters. The latter is to “reset” the quote level for quoted lines.'
'end': '''
(?x)^
(?= \\s*$
| [ ]{0,3}(?<marker>[-*_])([ ]{0,2}\\k<marker>){2,}[ \\t]*+$
| [ ]{0,3}>.
)
'''
'name': 'markup.quote.markdown'
'patterns': [
{
'begin': '''
(?x)\\G
(?= [ ]{0,3}>.
)
'''
'end': '^'
'patterns': [
{
'include': '#block_quote'
}
]
}
{
'applyEndPatternLast': 1
'begin': '''
(?x)\\G
(?= ([ ]{4}|\\t)
| [#]{1,6}\\s*+
| [ ]{0,3}(?<marker>[-*_])([ ]{0,2}\\k<marker>){2,}[ \\t]*+$
)
'''
'end': '^'
'patterns': [
{
'include': '#block_raw'
}
{
'include': '#heading'
}
{
'include': '#separator'
}
]
}
{
'begin': '''
(?x)\\G
(?! $
| [ ]{0,3}>.
| ([ ]{4}|\\t)
| [#]{1,6}\\s*+
| [ ]{0,3}(?<marker>[-*_])([ ]{0,2}\\k<marker>){2,}[ \\t]*+$
)
'''
'end': '$|(?<=\\n)'
'patterns': [
{
'include': '#inline'
}
]
}
]
'block_raw':
'name': 'markup.raw.block.markdown'
'patterns': [
{
'include': '#coffee_script'
}
]
'bold':
'begin': '''
(?x)
(\\*\\*|__)(?=\\S) # Open
(?=
(
<[^>]*+> # HTML tags
| (?<raw>`+)([^`]|(?!(?<!`)\\k<raw>(?!`))`)*+\\k<raw> # Raw
| \\\\[\\\\`*_{}\\[\\]()#.!+\\->]?+ # Escapes
| \\[
(
(?<square> # Named group
[^\\[\\]\\\\] # Match most chars
| \\\\. # Escaped chars
| \\[ \\g<square>*+ \\] # Nested brackets
)*+
\\]
(
( # Reference Link
[ ]? # Optional space
\\[[^\\]]*+\\] # Ref name
)
|
( # Inline Link
\\( # Opening paren
[ \\t]*+ # Optional whitespace
<?(.*?)>? # URL
[ \\t]*+ # Optional whitespace
( # Optional Title
(?<title>[\'"])
(.*?)
\\k<title>
)?
\\)
)
)
)
| (?!(?<=\\S)\\1). # Everything besides
)++
(?<=\\S)\\1 # Close
)
'''
'captures':
'1':
'name': 'punctuation.definition.bold.markdown'
'end': '(?<=\\S)(\\1)'
'name': 'markup.bold.markdown'
'patterns': [
{
'applyEndPatternLast': 1
'begin': '(?=<[^>]*?>)'
'end': '(?<=>)'
'patterns': [
{
'include': 'text.html.basic'
}
]
}
{
'include': '#escape'
}
{
'include': '#ampersand'
}
{
'include': '#bracket'
}
{
'include': '#raw'
}
{
'include': '#italic'
}
{
'include': '#image-inline'
}
{
'include': '#link-inline'
}
{
'include': '#link-inet'
}
{
'include': '#link-email'
}
{
'include': '#image-ref'
}
{
'include': '#link-ref-literal'
}
{
'include': '#link-ref'
}
]
'bracket':
'comment': 'Markdown will convert this for us. We match it so that the HTML grammar will not mark it up as invalid.'
'match': '<(?![a-z/?\\$!])'
'name': 'meta.other.valid-bracket.markdown'
'coffee_script':
'patterns': [
{
'include': 'source.coffee'
}
]
'escape':
'match': '\\\\[-`*_#+.!(){}\\[\\]\\\\>]'
'name': 'constant.character.escape.markdown'
'heading':
'begin': '\\G(#{1,6})(?!#)\\s*(?=\\S)'
'captures':
'1':
'name': 'punctuation.definition.heading.markdown'
'contentName': 'entity.name.section.markdown'
'end': '\\s*(#*)$\\n?'
'name': 'markup.heading.markdown'
'patterns': [
{
'include': '#inline'
}
]
'image-inline':
'captures':
'1':
'name': 'punctuation.definition.string.begin.markdown'
'2':
'name': 'string.other.link.description.markdown'
'3':
'name': 'punctuation.definition.string.end.markdown'
'5':
'name': 'invalid.illegal.whitespace.markdown'
'6':
'name': 'punctuation.definition.metadata.markdown'
'7':
'name': 'punctuation.definition.link.markdown'
'8':
'name': 'markup.underline.link.image.markdown'
'9':
'name': 'punctuation.definition.link.markdown'
'10':
'name': 'string.other.link.description.title.markdown'
'11':
'name': 'punctuation.definition.string.markdown'
'12':
'name': 'punctuation.definition.string.markdown'
'13':
'name': 'string.other.link.description.title.markdown'
'14':
'name': 'punctuation.definition.string.markdown'
'15':
'name': 'punctuation.definition.string.markdown'
'16':
'name': 'punctuation.definition.metadata.markdown'
'match': '''
(?x)
\\! # Images start with !
(\\[)((?<square>[^\\[\\]\\\\]|\\\\.|\\[\\g<square>*+\\])*+)(\\]) # Match the link text
([ ])? # Space not allowed
(\\() # Opening paren for url
(<?)(\\S+?)(>?) # The url
[ \\t]* # Optional whitespace
(?:
((\\().+?(\\))) # Match title in parens…
| ((").+?(")) # or in quotes.
)? # Title is optional
\\s* # Optional whitespace
(\\))
'''
'name': 'meta.image.inline.markdown'
'image-ref':
'captures':
'1':
'name': 'punctuation.definition.string.begin.markdown'
'2':
'name': 'string.other.link.description.markdown'
'4':
'name': 'punctuation.definition.string.begin.markdown'
'5':
'name': 'punctuation.definition.constant.markdown'
'6':
'name': 'constant.other.reference.link.markdown'
'7':
'name': 'punctuation.definition.constant.markdown'
'match': '\\!(\\[)((?<square>[^\\[\\]\\\\]|\\\\.|\\[\\g<square>*+\\])*+)(\\])[ ]?(\\[)(.*?)(\\])'
'name': 'meta.image.reference.markdown'
'inline':
'patterns': [
{
'include': '#escape'
}
{
'include': '#ampersand'
}
{
'include': '#bracket'
}
{
'include': '#raw'
}
{
'include': '#bold'
}
{
'include': '#italic'
}
{
'include': '#line-break'
}
{
'include': '#image-inline'
}
{
'include': '#link-inline'
}
{
'include': '#link-inet'
}
{
'include': '#link-email'
}
{
'include': '#image-ref'
}
{
'include': '#link-ref-literal'
}
{
'include': '#link-ref'
}
]
'italic':
'begin': '''
(?x)
(\\*|_)(?=\\S) # Open
(?=
(
<[^>]*+> # HTML tags
| (?<raw>`+)([^`]|(?!(?<!`)\\k<raw>(?!`))`)*+\\k<raw> # Raw
| \\\\[\\\\`*_{}\\[\\]()#.!+\\->]?+ # Escapes
| \\[
(
(?<square> # Named group
[^\\[\\]\\\\] # Match most chars
| \\\\. # Escaped chars
| \\[ \\g<square>*+ \\] # Nested brackets
)*+
\\]
(
( # Reference Link
[ ]? # Optional space
\\[[^\\]]*+\\] # Ref name
)
|
( # Inline Link
\\( # Opening paren
[ \\t]*+ # Optional whitespace
<?(.*?)>? # URL
[ \\t]*+ # Optional whitespace
( # Optional Title
(?<title>[\'"])
(.*?)
\\k<title>
)?
\\)
)
)
)
| \\1\\1 # Must be bold closer
| (?!(?<=\\S)\\1). # Everything besides
)++
(?<=\\S)\\1 # Close
)
'''
'captures':
'1':
'name': 'punctuation.definition.italic.markdown'
'end': '(?<=\\S)(\\1)((?!\\1)|(?=\\1\\1))'
'name': 'markup.italic.markdown'
'patterns': [
{
'applyEndPatternLast': 1
'begin': '(?=<[^>]*?>)'
'end': '(?<=>)'
'patterns': [
{
'include': 'text.html.basic'
}
]
}
{
'include': '#escape'
}
{
'include': '#ampersand'
}
{
'include': '#bracket'
}
{
'include': '#raw'
}
{
'include': '#bold'
}
{
'include': '#image-inline'
}
{
'include': '#link-inline'
}
{
'include': '#link-inet'
}
{
'include': '#link-email'
}
{
'include': '#image-ref'
}
{
'include': '#link-ref-literal'
}
{
'include': '#link-ref'
}
]
'line-break':
'match': ' {2,}$'
'name': 'meta.dummy.line-break'
'link-email':
'captures':
'1':
'name': 'punctuation.definition.link.markdown'
'2':
'name': 'markup.underline.link.markdown'
'4':
'name': 'punctuation.definition.link.markdown'
'match': '(<)((?:mailto:)?[-.\\w]+@[-a-z0-9]+(\\.[-a-z0-9]+)*\\.[a-z]+)(>)'
'name': 'meta.link.email.lt-gt.markdown'
'link-inet':
'captures':
'1':
'name': 'punctuation.definition.link.markdown'
'2':
'name': 'markup.underline.link.markdown'
'3':
'name': 'punctuation.definition.link.markdown'
'match': '(<)((?:https?|ftp)://.*?)(>)'
'name': 'meta.link.inet.markdown'
'link-inline':
'captures':
'1':
'name': 'punctuation.definition.string.begin.markdown'
'2':
'name': 'string.other.link.title.markdown'
'4':
'name': 'punctuation.definition.string.end.markdown'
'5':
'name': 'invalid.illegal.whitespace.markdown'
'6':
'name': 'punctuation.definition.metadata.markdown'
'7':
'name': 'punctuation.definition.link.markdown'
'8':
'name': 'markup.underline.link.markdown'
'9':
'name': 'punctuation.definition.link.markdown'
'10':
'name': 'string.other.link.description.title.markdown'
'11':
'name': 'punctuation.definition.string.begin.markdown'
'12':
'name': 'punctuation.definition.string.end.markdown'
'13':
'name': 'string.other.link.description.title.markdown'
'14':
'name': 'punctuation.definition.string.begin.markdown'
'15':
'name': 'punctuation.definition.string.end.markdown'
'16':
'name': 'punctuation.definition.metadata.markdown'
'match': '''
(?x)
(\\[)((?<square>[^\\[\\]\\\\]|\\\\.|\\[\\g<square>*+\\])*+)(\\]) # Match the link text.
([ ])? # Space not allowed
(\\() # Opening paren for url
(<?)(.*?)(>?) # The url
[ \\t]* # Optional whitespace
(?:
((\\().+?(\\))) # Match title in parens…
| ((").+?(")) # or in quotes.
)? # Title is optional
\\s* # Optional whitespace
(\\))
'''
'name': 'meta.link.inline.markdown'
'link-ref':
'captures':
'1':
'name': 'punctuation.definition.string.begin.markdown'
'2':
'name': 'string.other.link.title.markdown'
'4':
'name': 'punctuation.definition.string.end.markdown'
'5':
'name': 'punctuation.definition.constant.begin.markdown'
'6':
'name': 'constant.other.reference.link.markdown'
'7':
'name': 'punctuation.definition.constant.end.markdown'
'match': '(\\[)((?<square>[^\\[\\]\\\\]|\\\\.|\\[\\g<square>*+\\])*+)(\\])[ ]?(\\[)([^\\]]*+)(\\])'
'name': 'meta.link.reference.markdown'
'link-ref-literal':
'captures':
'1':
'name': 'punctuation.definition.string.begin.markdown'
'2':
'name': 'string.other.link.title.markdown'
'4':
'name': 'punctuation.definition.string.end.markdown'
'5':
'name': 'punctuation.definition.constant.begin.markdown'
'6':
'name': 'punctuation.definition.constant.end.markdown'
'match': '(\\[)((?<square>[^\\[\\]\\\\]|\\\\.|\\[\\g<square>*+\\])*+)(\\])[ ]?(\\[)(\\])'
'name': 'meta.link.reference.literal.markdown'
'list-paragraph':
'patterns': [
{
'begin': '\\G\\s+(?=\\S)'
'end': '^\\s*$'
'name': 'meta.paragraph.list.markdown'
'patterns': [
{
'include': '#inline'
}
{
'captures':
'1':
'name': 'punctuation.definition.list_item.markdown'
'comment': 'Match the list punctuation'
'match': '^\\s*([*+-]|[0-9]+\\.)'
}
]
}
]
'raw':
'captures':
'1':
'name': 'punctuation.definition.raw.markdown'
'3':
'name': 'punctuation.definition.raw.markdown'
'match': '(`+)([^`]|(?!(?<!`)\\1(?!`))`)*+(\\1)'
'name': 'markup.raw.inline.markdown'
'separator':
'match': '\\G[ ]{0,3}([-*_])([ ]{0,2}\\1){2,}[ \\t]*$\\n?'
'name': 'meta.separator.markdown'

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,156 @@
{
"name": "language-coffee-script",
"version": "0.50.0",
"lockfileVersion": 1,
"requires": true,
"dependencies": {
"balanced-match": {
"version": "1.0.0",
"resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.0.tgz",
"integrity": "sha1-ibTRmasr7kneFk6gK4nORi1xt2c=",
"dev": true
},
"brace-expansion": {
"version": "1.1.11",
"resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz",
"integrity": "sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==",
"dev": true,
"requires": {
"balanced-match": "^1.0.0",
"concat-map": "0.0.1"
}
},
"coffee-script": {
"version": "1.11.1",
"resolved": "https://registry.npmjs.org/coffee-script/-/coffee-script-1.11.1.tgz",
"integrity": "sha1-vxxHrWREOg2V0S3ysUfMCk2q1uk=",
"dev": true
},
"coffeelint": {
"version": "1.16.2",
"resolved": "https://registry.npmjs.org/coffeelint/-/coffeelint-1.16.2.tgz",
"integrity": "sha512-6mzgOo4zb17WfdrSui/cSUEgQ0AQkW3gXDht+6lHkfkqGUtSYKwGdGcXsDfAyuScVzTlTtKdfwkAlJWfqul7zg==",
"dev": true,
"requires": {
"coffee-script": "~1.11.0",
"glob": "^7.0.6",
"ignore": "^3.0.9",
"optimist": "^0.6.1",
"resolve": "^0.6.3",
"strip-json-comments": "^1.0.2"
}
},
"concat-map": {
"version": "0.0.1",
"resolved": "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz",
"integrity": "sha1-2Klr13/Wjfd5OnMDajug1UBdR3s=",
"dev": true
},
"fs.realpath": {
"version": "1.0.0",
"resolved": "https://registry.npmjs.org/fs.realpath/-/fs.realpath-1.0.0.tgz",
"integrity": "sha1-FQStJSMVjKpA20onh8sBQRmU6k8=",
"dev": true
},
"glob": {
"version": "7.1.3",
"resolved": "https://registry.npmjs.org/glob/-/glob-7.1.3.tgz",
"integrity": "sha512-vcfuiIxogLV4DlGBHIUOwI0IbrJ8HWPc4MU7HzviGeNho/UJDfi6B5p3sHeWIQ0KGIU0Jpxi5ZHxemQfLkkAwQ==",
"dev": true,
"requires": {
"fs.realpath": "^1.0.0",
"inflight": "^1.0.4",
"inherits": "2",
"minimatch": "^3.0.4",
"once": "^1.3.0",
"path-is-absolute": "^1.0.0"
}
},
"ignore": {
"version": "3.3.10",
"resolved": "https://registry.npmjs.org/ignore/-/ignore-3.3.10.tgz",
"integrity": "sha512-Pgs951kaMm5GXP7MOvxERINe3gsaVjUWFm+UZPSq9xYriQAksyhg0csnS0KXSNRD5NmNdapXEpjxG49+AKh/ug==",
"dev": true
},
"inflight": {
"version": "1.0.6",
"resolved": "https://registry.npmjs.org/inflight/-/inflight-1.0.6.tgz",
"integrity": "sha1-Sb1jMdfQLQwJvJEKEHW6gWW1bfk=",
"dev": true,
"requires": {
"once": "^1.3.0",
"wrappy": "1"
}
},
"inherits": {
"version": "2.0.3",
"resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.3.tgz",
"integrity": "sha1-Yzwsg+PaQqUC9SRmAiSA9CCCYd4=",
"dev": true
},
"minimatch": {
"version": "3.0.4",
"resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.0.4.tgz",
"integrity": "sha512-yJHVQEhyqPLUTgt9B83PXu6W3rx4MvvHvSUvToogpwoGDOUQ+yDrR0HRot+yOCdCO7u4hX3pWft6kWBBcqh0UA==",
"dev": true,
"requires": {
"brace-expansion": "^1.1.7"
}
},
"minimist": {
"version": "0.0.10",
"resolved": "https://registry.npmjs.org/minimist/-/minimist-0.0.10.tgz",
"integrity": "sha1-3j+YVD2/lggr5IrRoMfNqDYwHc8=",
"dev": true
},
"once": {
"version": "1.4.0",
"resolved": "https://registry.npmjs.org/once/-/once-1.4.0.tgz",
"integrity": "sha1-WDsap3WWHUsROsF9nFC6753Xa9E=",
"dev": true,
"requires": {
"wrappy": "1"
}
},
"optimist": {
"version": "0.6.1",
"resolved": "https://registry.npmjs.org/optimist/-/optimist-0.6.1.tgz",
"integrity": "sha1-2j6nRob6IaGaERwybpDrFaAZZoY=",
"dev": true,
"requires": {
"minimist": "~0.0.1",
"wordwrap": "~0.0.2"
}
},
"path-is-absolute": {
"version": "1.0.1",
"resolved": "https://registry.npmjs.org/path-is-absolute/-/path-is-absolute-1.0.1.tgz",
"integrity": "sha1-F0uSaHNVNP+8es5r9TpanhtcX18=",
"dev": true
},
"resolve": {
"version": "0.6.3",
"resolved": "https://registry.npmjs.org/resolve/-/resolve-0.6.3.tgz",
"integrity": "sha1-3ZV5gufnNt699TtYpN2RdUV13UY=",
"dev": true
},
"strip-json-comments": {
"version": "1.0.4",
"resolved": "https://registry.npmjs.org/strip-json-comments/-/strip-json-comments-1.0.4.tgz",
"integrity": "sha1-HhX7ysl9Pumb8tc7TGVrCCu6+5E=",
"dev": true
},
"wordwrap": {
"version": "0.0.3",
"resolved": "https://registry.npmjs.org/wordwrap/-/wordwrap-0.0.3.tgz",
"integrity": "sha1-o9XabNXAvAAI03I0u68b7WMFkQc=",
"dev": true
},
"wrappy": {
"version": "1.0.2",
"resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz",
"integrity": "sha1-tSQ9jz7BqjXxNkYFvA0QNuMKtp8=",
"dev": true
}
}
}

View File

@ -0,0 +1,21 @@
{
"version": "0.50.0",
"name": "language-coffee-script",
"description": "CoffeeScript language support in Atom",
"license": "MIT",
"engines": {
"atom": "*",
"node": "*"
},
"homepage": "http://atom.github.io/language-coffee-script",
"repository": {
"type": "git",
"url": "https://github.com/atom/language-coffee-script.git"
},
"bugs": {
"url": "https://github.com/atom/language-coffee-script/issues"
},
"devDependencies": {
"coffeelint": "^1.10.1"
}
}

View File

@ -0,0 +1,19 @@
'.source.coffee, .source.litcoffee, .source.coffee.md':
'editor':
'commentStart': '# '
'.source.coffee':
'editor':
'autoIndentOnPaste': false
'increaseIndentPattern': '(?x)
^\\s*
(
.*\\b(?<!\\.)class(\\s|$)
| [a-zA-Z\\$_](\\w|\\$|:|\\.)*\\s*(?=\\:(\\s*\\(.*\\))?\\s*((=|-)>\\s*$))
| [a-zA-Z\\$_](\\w|\\$|\\.)*\\s*(:|=)\\s*((if|while)(?!.*?then)|for|$)
| \\b(if|else|unless|while|when)\\b(?!.*?then)|\\b(for|loop)\\b
| \\b(try|finally|catch|((catch|switch)\\s+\\S.*))\\b\\s*$
| .*[-=]>\\s*$
| .*[\\{\\[]\\s*$
| .*:\\s*$
)'
'decreaseIndentPattern': '^\\s*(\\}|\\]|\\b(else|catch|finally)\\b)$'

View File

@ -0,0 +1,89 @@
'.source.coffee':
'Array Comprehension':
'prefix': 'fora'
'body': 'for ${1:name} in ${2:array}\n ${0:# body...}'
'Function (bound)':
'prefix': 'bfun'
'body': '(${1:args}) =>\n ${0:# body...}'
'Class':
'prefix': 'cla'
'body': 'class ${1:ClassName}${2: extends ${3:Ancestor}}\n\n ${4:constructor: (${5:args}) ->\n ${6:# body...}}\n $7'
'Else if':
'prefix': 'elif'
'body': 'else if ${1:condition}\n ${0:# body...}'
'Function':
'prefix': 'fun'
'body': '(${1:args}) ->\n ${0:# body...}\n\n'
'If .. Else':
'prefix': 'ife'
'body': 'if ${1:condition}\n ${2:# body...}\nelse\n ${3:# body...}'
'If':
'prefix': 'if'
'body': 'if ${1:condition}\n ${0:# body...}'
'Object comprehension':
'prefix': 'foro'
'body': 'for ${1:key}, ${2:value} of ${3:Object}\n ${0:# body...}'
'Range comprehension (exclusive)':
'prefix': 'forrex'
'body': 'for ${1:name} in [${2:start}...${3:finish}]${4: by ${5:step}}\n ${0:# body...}'
'Range comprehension (inclusive)':
'prefix': 'forr'
'body': 'for ${1:name} in [${2:start}..${3:finish}]${4: by ${5:step}}\n ${0:# body...}'
'Switch':
'prefix': 'swi'
'body': 'switch ${1:object}\n when ${2:value}\n ${0:# body...}'
'Ternary If':
'prefix': 'ifte'
'body': 'if ${1:condition} then ${2:value} else ${3:other}'
'Try .. Catch':
'prefix': 'try'
'body': 'try\n $1\ncatch ${2:error}\n $3'
'Unless':
'prefix': 'unl'
'body': '${1:action} unless ${2:condition}'
'Subheader':
'prefix': '/3'
'body': '# $1\n# -------------------------\n$0'
'log':
'prefix': 'log'
'body': 'console.log $0'
'warn':
'prefix': 'warn'
'body': 'console.warn $0'
'error':
'prefix': 'error'
'body': 'console.error $0'
'require':
'prefix': 'req'
'body': '${1:sys} $3= require \'${2:${1:sys}}\'$4'
'Describe block':
'prefix': 'de',
'body': 'describe "${1:description}", ->\n ${2:body}'
'It block':
'prefix': 'i',
'body': 'it "$1", ->\n $2'
'Before each':
'prefix': 'be',
'body': 'beforeEach ->\n $1'
'After each':
'prefix': 'af',
'body': 'afterEach ->\n $1'
'Expectation':
'prefix': 'ex',
'body': 'expect($1).to$2'
'Range array':
'prefix': 'ra',
'body': '[[$1, $2], [$3, $4]]'
'Point array':
'prefix': 'pt',
'body': '[$1, $2]'
'Key-value pair':
'prefix': 'kv',
'body': '${1:\'${2:key}\'}: ${3:value}'
'Create Jasmine spy':
'prefix': 'spy',
'body': 'jasmine.createSpy(\'${1:description}\')$2'
'.string.quoted.double.coffee:not(.string .source), .string.quoted.double.heredoc.coffee:not(.string .source)':
'Interpolated Code':
'prefix': '#'
'body': '#{$1}$2'

View File

@ -0,0 +1,122 @@
describe "CoffeeScript (Literate) grammar", ->
grammar = null
beforeEach ->
waitsForPromise ->
atom.packages.activatePackage("language-coffee-script")
runs ->
grammar = atom.grammars.grammarForScopeName("source.litcoffee")
it "parses the grammar", ->
expect(grammar).toBeTruthy()
expect(grammar.scopeName).toBe "source.litcoffee"
it "recognizes a code block after a list", ->
tokens = grammar.tokenizeLines '''
1. Example
2. List
1 + 2
'''
expect(tokens[3][1]).toEqual value: "1", scopes: ["source.litcoffee", "markup.raw.block.markdown", "constant.numeric.decimal.coffee"]
describe "firstLineMatch", ->
it "recognises interpreter directives", ->
valid = """
#!/usr/local/bin/coffee --no-header --literate -w
#!/usr/local/bin/coffee -l
#!/usr/local/bin/env coffee --literate -w
"""
for line in valid.split /\n/
expect(grammar.firstLineRegex.scanner.findNextMatchSync(line)).not.toBeNull()
invalid = """
#!/usr/local/bin/coffee --no-head -literate -w
#!/usr/local/bin/coffee --wl
#!/usr/local/bin/env coffee --illiterate -w=l
"""
for line in invalid.split /\n/
expect(grammar.firstLineRegex.scanner.findNextMatchSync(line)).toBeNull()
it "recognises Emacs modelines", ->
valid = """
#-*- litcoffee -*-
#-*- mode: litcoffee -*-
/* -*-litcoffee-*- */
// -*- litcoffee -*-
/* -*- mode:LITCOFFEE -*- */
// -*- font:bar;mode:LitCoffee -*-
// -*- font:bar;mode:litcoffee;foo:bar; -*-
// -*-font:mode;mode:litcoffee-*-
// -*- foo:bar mode: litcoffee bar:baz -*-
" -*-foo:bar;mode:litcoffee;bar:foo-*- ";
" -*-font-mode:foo;mode:LITcofFEE;foo-bar:quux-*-"
"-*-font:x;foo:bar; mode : litCOFFEE; bar:foo;foooooo:baaaaar;fo:ba;-*-";
"-*- font:x;foo : bar ; mode : LiTcOFFEe ; bar : foo ; foooooo:baaaaar;fo:ba-*-";
"""
for line in valid.split /\n/
expect(grammar.firstLineRegex.scanner.findNextMatchSync(line)).not.toBeNull()
invalid = """
/* --*litcoffee-*- */
/* -*-- litcoffee -*-
/* -*- -- litcoffee -*-
/* -*- LITCOFFEE -;- -*-
// -*- itsLitCoffeeFam -*-
// -*- litcoffee; -*-
// -*- litcoffee-stuff -*-
/* -*- model:litcoffee -*-
/* -*- indent-mode:litcoffee -*-
// -*- font:mode;litcoffee -*-
// -*- mode: -*- litcoffee
// -*- mode: burnt-because-litcoffee -*-
// -*-font:mode;mode:litcoffee--*-
"""
for line in invalid.split /\n/
expect(grammar.firstLineRegex.scanner.findNextMatchSync(line)).toBeNull()
it "recognises Vim modelines", ->
valid = """
vim: se filetype=litcoffee:
# vim: se ft=litcoffee:
# vim: set ft=LITCOFFEE:
# vim: set filetype=litcoffee:
# vim: ft=LITCOFFEE
# vim: syntax=litcoffee
# vim: se syntax=litcoffee:
# ex: syntax=litcoffee
# vim:ft=LitCoffee
# vim600: ft=litcoffee
# vim>600: set ft=litcoffee:
# vi:noai:sw=3 ts=6 ft=litcoffee
# vi::::::::::noai:::::::::::: ft=litcoffee
# vim:ts=4:sts=4:sw=4:noexpandtab:ft=LITCOFFEE
# vi:: noai : : : : sw =3 ts =6 ft =litCoffee
# vim: ts=4: pi sts=4: ft=litcoffee: noexpandtab: sw=4:
# vim: ts=4 sts=4: ft=litcoffee noexpandtab:
# vim:noexpandtab sts=4 ft=LitCOffEE ts=4
# vim:noexpandtab:ft=litcoffee
# vim:ts=4:sts=4 ft=litcoffee:noexpandtab:\x20
# vim:noexpandtab titlestring=hi\|there\\\\ ft=litcoffee ts=4
"""
for line in valid.split /\n/
expect(grammar.firstLineRegex.scanner.findNextMatchSync(line)).not.toBeNull()
invalid = """
ex: se filetype=litcoffee:
_vi: se filetype=litcoffee:
vi: se filetype=litcoffee
# vim set ft=illitcoffee
# vim: soft=litcoffee
# vim: clean-syntax=litcoffee:
# vim set ft=litcoffee:
# vim: setft=litcoffee:
# vim: se ft=litcoffee backupdir=tmp
# vim: set ft=LITCOFFEE set cmdheight=1
# vim:noexpandtab sts:4 ft:litcoffee ts:4
# vim:noexpandtab titlestring=hi\\|there\\ ft=litcoffee ts=4
# vim:noexpandtab titlestring=hi\\|there\\\\\\ ft=litcoffee ts=4
"""
for line in invalid.split /\n/
expect(grammar.firstLineRegex.scanner.findNextMatchSync(line)).toBeNull()

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1 @@
spec/fixtures

View File

@ -0,0 +1,15 @@
# Configuration for probot-no-response - https://github.com/probot/no-response
# Number of days of inactivity before an issue is closed for lack of response
daysUntilClose: 28
# Label requiring a response
responseRequiredLabel: more-information-needed
# Comment to post when closing an issue for lack of response. Set to `false` to disable.
closeComment: >
This issue has been automatically closed because there has been no response
to our request for more information from the original author. With only the
information that is currently in the issue, we don't have enough information
to take action. Please reach out if you have or find the answers we need so
that we can investigate further.

View File

@ -0,0 +1,23 @@
name: CI
on: [push]
env:
CI: true
jobs:
Test:
strategy:
matrix:
os: [ubuntu-latest, macos-latest, windows-latest]
channel: [stable, beta]
runs-on: ${{ matrix.os }}
steps:
- uses: actions/checkout@v1
- uses: UziTech/action-setup-atom@v2
with:
version: ${{ matrix.channel }}
- name: Install dependencies
run: apm install
- name: Run tests
run: atom --test spec

1
packages/language-csharp/.gitignore vendored Normal file
View File

@ -0,0 +1 @@
node_modules

View File

@ -0,0 +1,40 @@
<!--
Have you read Atom's Code of Conduct? By filing an Issue, you are expected to comply with it, including treating everyone with respect: https://github.com/atom/atom/blob/master/CODE_OF_CONDUCT.md
Do you want to ask a question? Are you looking for support? The Atom message board is the best place for getting support: https://discuss.atom.io
-->
### Prerequisites
* [ ] Put an X between the brackets on this line if you have done all of the following:
* Reproduced the problem in Safe Mode: http://flight-manual.atom.io/hacking-atom/sections/debugging/#using-safe-mode
* Followed all applicable steps in the debugging guide: http://flight-manual.atom.io/hacking-atom/sections/debugging/
* Checked the FAQs on the message board for common solutions: https://discuss.atom.io/c/faq
* Checked that your issue isn't already filed: https://github.com/issues?utf8=✓&q=is%3Aissue+user%3Aatom
* Checked that there is not already an Atom package that provides the described functionality: https://atom.io/packages
### Description
[Description of the issue]
### Steps to Reproduce
1. [First Step]
2. [Second Step]
3. [and so on...]
**Expected behavior:** [What you expect to happen]
**Actual behavior:** [What actually happens]
**Reproduces how often:** [What percentage of the time does it reproduce?]
### Versions
You can get this information from copy and pasting the output of `atom --version` and `apm --version` from the command line. Also, please include the OS and what version of the OS you're running.
### Additional Information
Any additional information, configuration or data that might be necessary to reproduce the issue.

View File

@ -0,0 +1,26 @@
MIT License
Copyright (c) 2016 .NET Foundation, GitHub Inc.
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
--------------------------------------------------------------------
This package uses the
https://github.com/dotnet/csharp-tmLanguage from the .NET Foundation

View File

@ -0,0 +1,28 @@
### Requirements
* Filling out the template is required. Any pull request that does not include enough information to be reviewed in a timely manner may be closed at the maintainers' discretion.
* All new code requires tests to ensure against regressions
### Description of the Change
<!--
We must be able to understand the design of your change from this description. If we can't get a good idea of what the code will be doing from the description here, the pull request may be closed at the maintainers' discretion. Keep in mind that the maintainer reviewing this PR may not be familiar with or have worked with the code here recently, so please walk us through the concepts.
-->
### Alternate Designs
<!-- Explain what other alternates were considered and why the proposed version was selected -->
### Benefits
<!-- What benefits will be realized by the code change? -->
### Possible Drawbacks
<!-- What are the possible side-effects or negative impacts of the code change? -->
### Applicable Issues
<!-- Enter any applicable Issues here -->

View File

@ -0,0 +1,9 @@
# C# language support in Atom
[![macOS Build Status](https://travis-ci.org/atom/language-csharp.svg?branch=master)](https://travis-ci.org/atom/language-csharp)
[![Windows Build Status](https://ci.appveyor.com/api/projects/status/j1as3753y5t90obn/branch/master?svg=true)](https://ci.appveyor.com/project/Atom/language-csharp/branch/master) [![Dependency Status](https://david-dm.org/atom/language-csharp.svg)](https://david-dm.org/atom/language-csharp)
Adds syntax highlighting and snippets to C# files in Atom.
The C# grammar comes from the [.NET Foundation's C# grammar](https://github.com/dotnet/csharp-tmLanguage)
Contributions and issues with the the grammar should be raised upstream.

View File

@ -0,0 +1,14 @@
scopeName: "source.cake"
name: "C# Cake File"
fileTypes: [
"cake"
]
patterns: [
{
include: "source.cs"
}
{
match: "^#(load|l)"
name: "preprocessor.source.cake"
}
]

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,14 @@
scopeName: "source.csx"
name: "C# Script File"
fileTypes: [
"csx"
]
patterns: [
{
include: "source.cs"
}
{
match: "^#(load|r)"
name: "preprocessor.source.csx"
}
]

View File

@ -0,0 +1,17 @@
{
"name": "language-csharp",
"version": "1.1.0",
"private": true,
"description": "C# language support for Atom",
"repository": "https://github.com/atom/language-csharp",
"keywords": [
"C#",
"csharp",
".Net"
],
"license": "MIT",
"engines": {
"atom": ">0.50.0"
},
"dependencies": {}
}

View File

@ -0,0 +1,19 @@
# Removes '-' characters from named groups to make
# Oniguruma expressions compatible with PCRE engine.
import re
def read(filename):
with open(filename, 'rt', encoding='utf8') as file:
return file.read()
def write(filename, content):
with open(filename, 'w', encoding='utf8') as file:
file.write(content)
def convert(string):
result = re.sub(r'\?<([a-zA-Z-_]*)>', lambda x: x.group().replace('-', ''), string)
return re.sub(r'\\\\g<([a-zA-Z-]*)>', lambda x: x.group().replace('-', ''), result)
content = read('../grammars/csharp.cson')
updated = convert(content)
write('../grammars/csharp.cson', updated)

View File

@ -0,0 +1,5 @@
'.source.cs':
'editor':
'commentStart': '// '
'increaseIndentPattern': '(?x)\n\t\t^ .* \\{ [^}"\']* $\n\t| ^ \\s* \\{ \\} $\n\t'
'decreaseIndentPattern': '(?x)\n\t\t^ (.*\\*/)? \\s* \\} ( [^}{"\']* \\{ | \\s* while \\s* \\( .* )? [;\\s]* (//.*|/\\*.*\\*/\\s*)? $\n\t'

View File

@ -0,0 +1,142 @@
'.source.cs':
'Abstract':
'prefix': 'ab'
'body': 'abstract '
'Array':
'prefix': 'arr'
'body': '${0:DataType}[] ${1:VariableName} = {};'
'Async Task':
'prefix': 'at'
'body': 'async Task<${0:T}> ${1:MethodName}($2) {\n\t$3\n}'
'Async Void':
'prefix': 'av'
'body': 'async void ${0:MethodName}($1) {\n\t$2\n}'
'Await':
'prefix': 'aw'
'body': 'await '
'Break':
'prefix': 'br'
'body': 'break;\n'
'Case':
'prefix': 'cs'
'body': 'case ${1:Condition}:\n\t$2\n$0'
'Catch':
'prefix': 'ca'
'body': 'catch (${1:Exception} ${2:e}) {\n\t$0\n}'
'Class':
'prefix': 'cl'
'body': 'class $1\n{\n\t$0\n}'
'Constant String':
'prefix': 'cos'
'body': 'public const string ${1:Var} = $2;$0'
'Constant':
'prefix': 'co'
'body': 'public const ${1:string} ${2:Var} = $3;$0'
'Default':
'prefix': 'de'
'body': 'default:\n\t$0'
'Do While':
'prefix': 'do'
'body': 'do {\n\t$0\n} while (${1:Condition});'
'Else If':
'prefix': 'elif'
'body': 'else if (${1:Condition}) {\n\t$0\n}'
'Else':
'prefix': 'el'
'body': 'else {\n\t$0\n}'
'Enumeration':
'prefix': 'enum'
'body': 'enum $1\n{\n\t$0\n}'
'Finally':
'prefix': 'fy'
'body': 'finally {\n\t$0\n}'
'Fixed':
'prefix': 'fi'
'body': 'fixed (${1:Expression}) {\n\t$0\n}'
'For':
'prefix': 'for'
'body': 'for (${1:Initializer}; ${2:Condition}; ${3:Update}) {\n\t$0\n}'
'For Each':
'prefix': 'fore'
'body': 'foreach (${1:Type} in ${2:Collection}) {\n\t$0\n}'
'If ':
'prefix': 'if'
'body': 'if (${1:Condition}) {\n\t$0\n}'
'Interface':
'prefix': 'in'
'body': 'interface $1\n{\n\t$0\n}'
'Method (Main)':
'prefix': 'main'
'body': '/// <summary>\n/// The main entry point for the application\n/// </summary>\n[STAThread]\npublic static void Main(string[] args)\n{\n\t$0\n}'
'Method':
'prefix': 'm'
'body': '${1:void} ${2:Method}($3)\n{\n\t$0\n}'
'Namespace ':
'prefix': 'ns'
'body': 'namespace ${1:NamespaceName}\n{\n\t$0\n}'
'Override':
'prefix': 'over'
'body': 'override '
'Parse':
'prefix': 'par'
'body': '${0:DataType}.Parse(${1:VariableName});'
'Private':
'prefix': 'pr'
'body': 'private '
'Property':
'prefix': 'prop'
'body': 'public ${1:string} ${2:PropertyName} { get; set; }'
'Protected':
'prefix': 'po'
'body': 'protected '
'Public ':
'prefix': 'pu'
'body': 'public '
'ReadLine':
'prefix': 'rl'
'body': 'Console.ReadLine();'
'Region':
'prefix': 'reg'
'body': '#region ${1:Region Name}\n\n$0\n\n#endregion\n'
'Return':
'prefix': 're'
'body': 'return '
'Sealed':
'prefix': 'se'
'body': 'sealed '
'Static':
'prefix': 'st'
'body': 'static '
'Struct':
'prefix': 'su'
'body': 'struct $1\n{\n\t$0\n}'
'Switch':
'prefix': 'sw'
'body': 'switch (${1:Expression}) {\n\t$0\n}'
'Throw New':
'prefix': 'tn'
'body': 'throw new $0'
'Throw':
'prefix': 'th'
'body': 'throw $0'
'Try':
'prefix': 'tr'
'body': 'try {\n\t$0\n}'
'Using':
'prefix': 'us'
'body': 'using ${1:System};$0'
'Variable':
'prefix': 'v'
'body': '${1:string} ${2:var}${3: = ${0:null}};'
'Virtual':
'prefix': 'virt'
'body': 'virtual '
'While':
'prefix': 'wh'
'body': 'while (${1:Condition}) {\n\t$0\n}'
'Write':
'prefix': 'w'
'body': 'Console.Write($1);$0'
'WriteLine':
'prefix': 'wl'
'body': 'Console.WriteLine($1);$0'

View File

@ -0,0 +1,17 @@
describe "Language C# package", ->
beforeEach ->
waitsForPromise ->
atom.packages.activatePackage("language-csharp")
describe "C# Script grammar", ->
it "parses the grammar", ->
grammar = atom.grammars.grammarForScopeName("source.csx")
expect(grammar).toBeDefined()
expect(grammar.scopeName).toBe "source.csx"
describe "C# Cake grammar", ->
it "parses the grammar", ->
grammar = atom.grammars.grammarForScopeName("source.cake")
expect(grammar).toBeDefined()
expect(grammar.scopeName).toBe "source.cake"

View File

@ -0,0 +1,15 @@
# Configuration for probot-no-response - https://github.com/probot/no-response
# Number of days of inactivity before an issue is closed for lack of response
daysUntilClose: 28
# Label requiring a response
responseRequiredLabel: more-information-needed
# Comment to post when closing an issue for lack of response. Set to `false` to disable.
closeComment: >
This issue has been automatically closed because there has been no response
to our request for more information from the original author. With only the
information that is currently in the issue, we don't have enough information
to take action. Please reach out if you have or find the answers we need so
that we can investigate further.

View File

@ -0,0 +1,28 @@
name: CI
on: [push]
env:
CI: true
jobs:
Test:
strategy:
matrix:
os: [ubuntu-latest, macos-latest, windows-latest]
channel: [stable, beta]
runs-on: ${{ matrix.os }}
steps:
- uses: actions/checkout@v1
- uses: UziTech/action-setup-atom@v2
with:
version: ${{ matrix.channel }}
- name: Install windows-build-tools
if: ${{ matrix.os == 'windows-latest' }}
run: |
npm install node-gyp@latest
npm config set msvs_version 2019
- name: Install dependencies
run: npm i
- name: Run tests
run: atom --test spec

1
packages/language-css/.gitignore vendored Normal file
View File

@ -0,0 +1 @@
node_modules

View File

@ -0,0 +1 @@
See the [Atom contributing guide](https://github.com/atom/atom/blob/master/CONTRIBUTING.md)

View File

@ -0,0 +1,40 @@
<!--
Have you read Atom's Code of Conduct? By filing an Issue, you are expected to comply with it, including treating everyone with respect: https://github.com/atom/atom/blob/master/CODE_OF_CONDUCT.md
Do you want to ask a question? Are you looking for support? The Atom message board is the best place for getting support: https://discuss.atom.io
-->
### Prerequisites
* [ ] Put an X between the brackets on this line if you have done all of the following:
* Reproduced the problem in Safe Mode: http://flight-manual.atom.io/hacking-atom/sections/debugging/#using-safe-mode
* Followed all applicable steps in the debugging guide: http://flight-manual.atom.io/hacking-atom/sections/debugging/
* Checked the FAQs on the message board for common solutions: https://discuss.atom.io/c/faq
* Checked that your issue isn't already filed: https://github.com/issues?utf8=✓&q=is%3Aissue+user%3Aatom
* Checked that there is not already an Atom package that provides the described functionality: https://atom.io/packages
### Description
[Description of the issue]
### Steps to Reproduce
1. [First Step]
2. [Second Step]
3. [and so on...]
**Expected behavior:** [What you expect to happen]
**Actual behavior:** [What actually happens]
**Reproduces how often:** [What percentage of the time does it reproduce?]
### Versions
You can get this information from copy and pasting the output of `atom --version` and `apm --version` from the command line. Also, please include the OS and what version of the OS you're running.
### Additional Information
Any additional information, configuration or data that might be necessary to reproduce the issue.

View File

@ -0,0 +1,31 @@
Copyright (c) 2014 GitHub Inc.
Permission is hereby granted, free of charge, to any person obtaining
a copy of this software and associated documentation files (the
"Software"), to deal in the Software without restriction, including
without limitation the rights to use, copy, modify, merge, publish,
distribute, sublicense, and/or sell copies of the Software, and to
permit persons to whom the Software is furnished to do so, subject to
the following conditions:
The above copyright notice and this permission notice shall be
included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
--------------------------------------------------------------------
This package was derived from a TextMate bundle located at
https://github.com/textmate/css.tmbundle and distributed under the following
license, located in `README.mdown`:
Permission to copy, use, modify, sell and distribute this
software is granted. This software is provided "as is" without
express or implied warranty, and with no claim as to its
suitability for any purpose.

View File

@ -0,0 +1,28 @@
### Requirements
* Filling out the template is required. Any pull request that does not include enough information to be reviewed in a timely manner may be closed at the maintainers' discretion.
* All new code requires tests to ensure against regressions
### Description of the Change
<!--
We must be able to understand the design of your change from this description. If we can't get a good idea of what the code will be doing from the description here, the pull request may be closed at the maintainers' discretion. Keep in mind that the maintainer reviewing this PR may not be familiar with or have worked with the code here recently, so please walk us through the concepts.
-->
### Alternate Designs
<!-- Explain what other alternates were considered and why the proposed version was selected -->
### Benefits
<!-- What benefits will be realized by the code change? -->
### Possible Drawbacks
<!-- What are the possible side-effects or negative impacts of the code change? -->
### Applicable Issues
<!-- Enter any applicable Issues here -->

View File

@ -0,0 +1,10 @@
# CSS language support in Atom
![CI Status](https://github.com/atom/language-css/actions/workflows/main.yml/badge.svg)
Adds syntax highlighting, completions, and snippets to CSS files in Atom.
Originally [converted](http://flight-manual.atom.io/hacking-atom/sections/converting-from-textmate)
from the [CSS TextMate bundle](https://github.com/textmate/css.tmbundle).
Contributions are greatly appreciated. Please fork this repository and open a
pull request to add snippets, make grammar tweaks, etc.

View File

@ -0,0 +1,37 @@
{
"max_line_length": {
"level": "ignore"
},
"no_empty_param_list": {
"level": "error"
},
"arrow_spacing": {
"level": "error"
},
"no_interpolation_in_single_quotes": {
"level": "error"
},
"no_debugger": {
"level": "error"
},
"prefer_english_operator": {
"level": "error"
},
"colon_assignment_spacing": {
"spacing": {
"left": 0,
"right": 1
},
"level": "error"
},
"braces_spacing": {
"spaces": 0,
"level": "error"
},
"spacing_after_comma": {
"level": "error"
},
"no_stand_alone_at": {
"level": "error"
}
}

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,107 @@
name: 'CSS'
scopeName: 'source.css'
type: 'tree-sitter'
parser: 'tree-sitter-css'
fileTypes: [
'css'
]
injectionRegExp: '(css|CSS)'
folds: [
{
start: {index: 0, type: '{'},
end: {index: -1, type: '}'}
}
{
type: 'comment'
}
]
comments:
start: '/*'
end: '*/'
scopes:
'stylesheet': 'source.css'
'comment': 'comment'
'tag_name': 'entity.name.tag'
'nesting_selector, universal_selector': 'entity.name.tag'
'sibling_selector > "~"': 'keyword.operator.combinator'
'child_selector > ">"': 'keyword.operator.combinator'
'adjacent_sibling_selector > "+"': 'keyword.operator.combinator'
'attribute_selector > "="': 'keyword.operator.pattern'
'attribute_selector > "^="': 'keyword.operator.pattern'
'attribute_selector > "|="': 'keyword.operator.pattern'
'attribute_selector > "~="': 'keyword.operator.pattern'
'attribute_selector > "$="': 'keyword.operator.pattern'
'attribute_selector > "*="': 'keyword.operator.pattern'
'attribute_selector > plain_value': 'string.unquoted.attribute-value'
'pseudo_element_selector > tag_name': 'entity.other.attribute-name.pseudo-element'
'pseudo_class_selector > class_name': 'entity.other.attribute-name.pseudo-class'
'class_name': 'entity.other.attribute-name.class'
'id_name': 'entity.other.attribute-name.id'
'namespace_name': 'entity.namespace.name'
'function_name': 'support.function'
'property_name, plain_value': [
{match: '^--', scopes: 'variable.css'}
]
'property_name': 'support.property-name'
'attribute_name': 'entity.other.attribute-name'
'
"@media",
"@import",
"@charset",
"@namespace",
"@supports",
"@keyframes",
at_keyword
': 'keyword.control.at-rule'
'to, from': 'keyword.control'
'important': 'keyword.other.important.css'
'string_value': 'string'
'color_value': 'constant.other.color'
'integer_value': 'numeric.constant'
'integer_value > unit': 'keyword.other.unit'
'float_value': 'numeric.constant'
'float_value > unit': 'keyword.other.unit'
'plain_value': [
{match:'^(aqua|black|blue|fuchsia|gray|green|lime|maroon|navy|olive|orange|purple|red|silver|teal|white|yellow)$', scopes:'support.constant.color.w3c-standard'}
{match:'^(aliceblue|antiquewhite|aquamarine|azure|beige|bisque|blanchedalmond|blueviolet|brown|burlywood
|cadetblue|chartreuse|chocolate|coral|cornflowerblue|cornsilk|crimson|cyan|darkblue|darkcyan
|darkgoldenrod|darkgray|darkgreen|darkgrey|darkkhaki|darkmagenta|darkolivegreen|darkorange
|darkorchid|darkred|darksalmon|darkseagreen|darkslateblue|darkslategray|darkslategrey|darkturquoise
|darkviolet|deeppink|deepskyblue|dimgray|dimgrey|dodgerblue|firebrick|floralwhite|forestgreen
|gainsboro|ghostwhite|gold|goldenrod|greenyellow|grey|honeydew|hotpink|indianred|indigo|ivory|khaki
|lavender|lavenderblush|lawngreen|lemonchiffon|lightblue|lightcoral|lightcyan|lightgoldenrodyellow
|lightgray|lightgreen|lightgrey|lightpink|lightsalmon|lightseagreen|lightskyblue|lightslategray
|lightslategrey|lightsteelblue|lightyellow|limegreen|linen|magenta|mediumaquamarine|mediumblue
|mediumorchid|mediumpurple|mediumseagreen|mediumslateblue|mediumspringgreen|mediumturquoise
|mediumvioletred|midnightblue|mintcream|mistyrose|moccasin|navajowhite|oldlace|olivedrab|orangered
|orchid|palegoldenrod|palegreen|paleturquoise|palevioletred|papayawhip|peachpuff|peru|pink|plum
|powderblue|rebeccapurple|rosybrown|royalblue|saddlebrown|salmon|sandybrown|seagreen|seashell
|sienna|skyblue|slateblue|slategray|slategrey|snow|springgreen|steelblue|tan|thistle|tomato
|transparent|turquoise|violet|wheat|whitesmoke|yellowgreen)$', scopes: 'support.constant.color.w3c-extended'}
'support.constant.property-value.css'
]
'feature_name': 'support.type.property-name'
'color_value > "#"': 'punctuation.definition.constant.css'
'id_selector > "#"': 'punctuation.definition.entity.css'
'selectors > ","': 'punctuation.separator.list.comma.css'
'"and", "or", "not", "only"': 'keyword.operator'
'keyword_query': 'keyword.operator'
'binary_expression > "+"': 'keyword.operator'
'binary_expression > "-"': 'keyword.operator'
'binary_expression > "/"': 'keyword.operator'
'binary_expression > "*"': 'keyword.operator'

169
packages/language-css/package-lock.json generated Normal file
View File

@ -0,0 +1,169 @@
{
"name": "language-css",
"version": "0.45.4",
"lockfileVersion": 1,
"requires": true,
"dependencies": {
"balanced-match": {
"version": "1.0.0",
"resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.0.tgz",
"integrity": "sha1-ibTRmasr7kneFk6gK4nORi1xt2c=",
"dev": true
},
"brace-expansion": {
"version": "1.1.11",
"resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz",
"integrity": "sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==",
"dev": true,
"requires": {
"balanced-match": "^1.0.0",
"concat-map": "0.0.1"
}
},
"coffee-script": {
"version": "1.11.1",
"resolved": "https://registry.npmjs.org/coffee-script/-/coffee-script-1.11.1.tgz",
"integrity": "sha1-vxxHrWREOg2V0S3ysUfMCk2q1uk=",
"dev": true
},
"coffeelint": {
"version": "1.16.2",
"resolved": "https://registry.npmjs.org/coffeelint/-/coffeelint-1.16.2.tgz",
"integrity": "sha512-6mzgOo4zb17WfdrSui/cSUEgQ0AQkW3gXDht+6lHkfkqGUtSYKwGdGcXsDfAyuScVzTlTtKdfwkAlJWfqul7zg==",
"dev": true,
"requires": {
"coffee-script": "~1.11.0",
"glob": "^7.0.6",
"ignore": "^3.0.9",
"optimist": "^0.6.1",
"resolve": "^0.6.3",
"strip-json-comments": "^1.0.2"
}
},
"concat-map": {
"version": "0.0.1",
"resolved": "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz",
"integrity": "sha1-2Klr13/Wjfd5OnMDajug1UBdR3s=",
"dev": true
},
"fs.realpath": {
"version": "1.0.0",
"resolved": "https://registry.npmjs.org/fs.realpath/-/fs.realpath-1.0.0.tgz",
"integrity": "sha1-FQStJSMVjKpA20onh8sBQRmU6k8=",
"dev": true
},
"glob": {
"version": "7.1.6",
"resolved": "https://registry.npmjs.org/glob/-/glob-7.1.6.tgz",
"integrity": "sha512-LwaxwyZ72Lk7vZINtNNrywX0ZuLyStrdDtabefZKAY5ZGJhVtgdznluResxNmPitE0SAO+O26sWTHeKSI2wMBA==",
"dev": true,
"requires": {
"fs.realpath": "^1.0.0",
"inflight": "^1.0.4",
"inherits": "2",
"minimatch": "^3.0.4",
"once": "^1.3.0",
"path-is-absolute": "^1.0.0"
}
},
"ignore": {
"version": "3.3.10",
"resolved": "https://registry.npmjs.org/ignore/-/ignore-3.3.10.tgz",
"integrity": "sha512-Pgs951kaMm5GXP7MOvxERINe3gsaVjUWFm+UZPSq9xYriQAksyhg0csnS0KXSNRD5NmNdapXEpjxG49+AKh/ug==",
"dev": true
},
"inflight": {
"version": "1.0.6",
"resolved": "https://registry.npmjs.org/inflight/-/inflight-1.0.6.tgz",
"integrity": "sha1-Sb1jMdfQLQwJvJEKEHW6gWW1bfk=",
"dev": true,
"requires": {
"once": "^1.3.0",
"wrappy": "1"
}
},
"inherits": {
"version": "2.0.4",
"resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.4.tgz",
"integrity": "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==",
"dev": true
},
"minimatch": {
"version": "3.0.4",
"resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.0.4.tgz",
"integrity": "sha512-yJHVQEhyqPLUTgt9B83PXu6W3rx4MvvHvSUvToogpwoGDOUQ+yDrR0HRot+yOCdCO7u4hX3pWft6kWBBcqh0UA==",
"dev": true,
"requires": {
"brace-expansion": "^1.1.7"
}
},
"minimist": {
"version": "0.0.10",
"resolved": "https://registry.npmjs.org/minimist/-/minimist-0.0.10.tgz",
"integrity": "sha1-3j+YVD2/lggr5IrRoMfNqDYwHc8=",
"dev": true
},
"nan": {
"version": "2.14.2",
"resolved": "https://registry.npmjs.org/nan/-/nan-2.14.2.tgz",
"integrity": "sha512-M2ufzIiINKCuDfBSAUr1vWQ+vuVcA9kqx8JJUsbQi6yf1uGRyb7HfpdfUr5qLXf3B/t8dPvcjhKMmlfnP47EzQ=="
},
"once": {
"version": "1.4.0",
"resolved": "https://registry.npmjs.org/once/-/once-1.4.0.tgz",
"integrity": "sha1-WDsap3WWHUsROsF9nFC6753Xa9E=",
"dev": true,
"requires": {
"wrappy": "1"
}
},
"optimist": {
"version": "0.6.1",
"resolved": "https://registry.npmjs.org/optimist/-/optimist-0.6.1.tgz",
"integrity": "sha1-2j6nRob6IaGaERwybpDrFaAZZoY=",
"dev": true,
"requires": {
"minimist": "~0.0.1",
"wordwrap": "~0.0.2"
}
},
"path-is-absolute": {
"version": "1.0.1",
"resolved": "https://registry.npmjs.org/path-is-absolute/-/path-is-absolute-1.0.1.tgz",
"integrity": "sha1-F0uSaHNVNP+8es5r9TpanhtcX18=",
"dev": true
},
"resolve": {
"version": "0.6.3",
"resolved": "https://registry.npmjs.org/resolve/-/resolve-0.6.3.tgz",
"integrity": "sha1-3ZV5gufnNt699TtYpN2RdUV13UY=",
"dev": true
},
"strip-json-comments": {
"version": "1.0.4",
"resolved": "https://registry.npmjs.org/strip-json-comments/-/strip-json-comments-1.0.4.tgz",
"integrity": "sha1-HhX7ysl9Pumb8tc7TGVrCCu6+5E=",
"dev": true
},
"tree-sitter-css": {
"version": "0.19.0",
"resolved": "https://registry.npmjs.org/tree-sitter-css/-/tree-sitter-css-0.19.0.tgz",
"integrity": "sha512-LYCHS1V2bzeNJr8Mgh60H06qB8NNJyRJVgW1gKCEjcm5S48d8H9xOnrzIlsyLHaXFfnGWCrHJ6jxN6G3s5fJTA==",
"requires": {
"nan": "^2.14.1"
}
},
"wordwrap": {
"version": "0.0.3",
"resolved": "https://registry.npmjs.org/wordwrap/-/wordwrap-0.0.3.tgz",
"integrity": "sha1-o9XabNXAvAAI03I0u68b7WMFkQc=",
"dev": true
},
"wrappy": {
"version": "1.0.2",
"resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz",
"integrity": "sha1-tSQ9jz7BqjXxNkYFvA0QNuMKtp8=",
"dev": true
}
}
}

View File

@ -0,0 +1,27 @@
{
"name": "language-css",
"description": "CSS support in Atom",
"keywords": [
"tree-sitter"
],
"version": "0.45.4",
"engines": {
"atom": "*",
"node": "*"
},
"homepage": "http://atom.github.io/language-css",
"repository": {
"type": "git",
"url": "https://github.com/atom/language-css.git"
},
"license": "MIT",
"bugs": {
"url": "https://github.com/atom/language-css/issues"
},
"devDependencies": {
"coffeelint": "^1.10.1"
},
"dependencies": {
"tree-sitter-css": "^0.19.0"
}
}

View File

@ -0,0 +1,43 @@
'.source.css':
'core':
'useTreeSitterParsers': false
'editor':
'commentStart': '/*'
'commentEnd': '*/'
'foldEndPattern': '(?<!\\*)\\*\\*/|^\\s*\\}|\\/*\\s*@end\\s*\\*\\/'
'autocomplete':
'extraWordCharacters': '-'
'symbols':
'selector':
'selector': '.css.selector'
'typePriority': 1
'.source.css .meta.property-value.css':
'autocomplete':
'symbols':
'builtins':
'suggestions': [{
'type': 'function'
'snippet': "rgb(${1:255}, ${2:0}, ${3:0})${4:;}$0"
'rightLabel': 'CSS builtin'
'description': "Creates a Color from red (0-255), green (0-255), and blue (0-255)."
'descriptionMoreURL': 'https://developer.mozilla.org/en-US/docs/Web/CSS/color_value#rgb()'
}, {
'type': 'function'
'snippet': "rgba(${1:255}, ${2:0}, ${3:0}, ${4:0.5})${5:;}$0"
'rightLabel': 'CSS builtin'
'description': "Creates a Color from red (0-255), green (0-255), blue (0-255), and alpha (0-1)."
'descriptionMoreURL': 'https://developer.mozilla.org/en-US/docs/Web/CSS/color_value#rgba()'
}, {
'type': 'function'
'snippet': "hsl(${1:360}, ${2:100%}, ${3:100%})${4:;}$0"
'rightLabel': 'CSS builtin'
'description': "Creates a Color from hue (0-360), saturation (0-100%), and lightness (0-100%)."
'descriptionMoreURL': 'https://developer.mozilla.org/en-US/docs/Web/CSS/color_value#hsl()'
},{
'type': 'function'
'snippet': "hsla(${1:360}, ${2:100%}, ${3:100%}, ${4:0.5})${5:;}$0"
'rightLabel': 'CSS builtin'
'description': "Creates a Color from hue (0-360), saturation (0-100%), lightness (0-100%), and alpha (0-1)."
'descriptionMoreURL': 'https://developer.mozilla.org/en-US/docs/Web/CSS/color_value#hsla()'
}]

View File

@ -0,0 +1,51 @@
'.source.css':
'!important':
'prefix': '!'
'body': '!important${1:;}$0'
'@charset':
'prefix': 'charset'
'body': '@charset "${1:UTF-8}";$0'
'description': 'Specifies the character encoding used in the style sheet.'
'descriptionMoreURL': 'https://developer.mozilla.org/en-US/docs/Web/CSS/@charset'
'@font-face':
'prefix': 'fontface'
'body': '@font-face {\n\t$1\n}$0'
'description': 'Specify online fonts to display text on their web pages.'
'descriptionMoreURL': 'https://developer.mozilla.org/en-US/docs/Web/CSS/@font-face'
'@import':
'prefix': 'import'
'body': '@import "$0";'
'description': 'Import style rules from other style sheets.'
'descriptionMoreURL': 'https://developer.mozilla.org/en-US/docs/Web/CSS/@import'
'@keyframes':
'prefix': 'keyframes'
'body': '@keyframes ${1:identifier} {\n\t$2\n}$0'
'description': 'Specifies a CSS animation.'
'descriptionMoreURL': 'https://developer.mozilla.org/en/docs/Web/CSS/@keyframes'
'@media':
'prefix': 'media'
'body': '@media ${1:query} {\n\t$2\n}$0'
'description': 'A set of nested statements with a condition defined by a media query.'
'descriptionMoreURL': 'https://developer.mozilla.org/en-US/docs/Web/CSS/@media'
'@supports':
'prefix': 'supports'
'body': '@supports ${1:condition} {\n\t$2\n}$0'
'description': 'A set of nested statements with a condition defined by a supports condition.'
'descriptionMoreURL': 'https://developer.mozilla.org/en-US/docs/Web/CSS/@supports'
# We dont want the snippets to show in properties
'.source.css .meta.property-list':
'!important':
'prefix': '!'
'@charset':
'prefix': 'charset'
'@font-face':
'prefix': 'fontface'
'@import':
'prefix': 'import'
'@keyframes':
'prefix': 'keyframes'
'@media':
'prefix': 'media'
'@supports':
'prefix': 'supports'

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,15 @@
# Configuration for probot-no-response - https://github.com/probot/no-response
# Number of days of inactivity before an issue is closed for lack of response
daysUntilClose: 28
# Label requiring a response
responseRequiredLabel: more-information-needed
# Comment to post when closing an issue for lack of response. Set to `false` to disable.
closeComment: >
This issue has been automatically closed because there has been no response
to our request for more information from the original author. With only the
information that is currently in the issue, we don't have enough information
to take action. Please reach out if you have or find the answers we need so
that we can investigate further.

View File

@ -0,0 +1,23 @@
name: CI
on: [push]
env:
CI: true
jobs:
Test:
strategy:
matrix:
os: [ubuntu-latest, macos-latest, windows-latest]
channel: [stable, beta]
runs-on: ${{ matrix.os }}
steps:
- uses: actions/checkout@v1
- uses: UziTech/action-setup-atom@v2
with:
version: ${{ matrix.channel }}
- name: Install dependencies
run: apm install
- name: Run tests
run: atom --test spec

1
packages/language-gfm/.gitignore vendored Normal file
View File

@ -0,0 +1 @@
node_modules

View File

@ -0,0 +1 @@
See the [Atom contributing guide](https://github.com/atom/atom/blob/master/CONTRIBUTING.md)

View File

@ -0,0 +1,40 @@
<!--
Have you read Atom's Code of Conduct? By filing an Issue, you are expected to comply with it, including treating everyone with respect: https://github.com/atom/atom/blob/master/CODE_OF_CONDUCT.md
Do you want to ask a question? Are you looking for support? The Atom message board is the best place for getting support: https://discuss.atom.io
-->
### Prerequisites
* [ ] Put an X between the brackets on this line if you have done all of the following:
* Reproduced the problem in Safe Mode: http://flight-manual.atom.io/hacking-atom/sections/debugging/#using-safe-mode
* Followed all applicable steps in the debugging guide: http://flight-manual.atom.io/hacking-atom/sections/debugging/
* Checked the FAQs on the message board for common solutions: https://discuss.atom.io/c/faq
* Checked that your issue isn't already filed: https://github.com/issues?utf8=✓&q=is%3Aissue+user%3Aatom
* Checked that there is not already an Atom package that provides the described functionality: https://atom.io/packages
### Description
[Description of the issue]
### Steps to Reproduce
1. [First Step]
2. [Second Step]
3. [and so on...]
**Expected behavior:** [What you expect to happen]
**Actual behavior:** [What actually happens]
**Reproduces how often:** [What percentage of the time does it reproduce?]
### Versions
You can get this information from copy and pasting the output of `atom --version` and `apm --version` from the command line. Also, please include the OS and what version of the OS you're running.
### Additional Information
Any additional information, configuration or data that might be necessary to reproduce the issue.

View File

@ -0,0 +1,20 @@
Copyright (c) 2014 GitHub Inc.
Permission is hereby granted, free of charge, to any person obtaining
a copy of this software and associated documentation files (the
"Software"), to deal in the Software without restriction, including
without limitation the rights to use, copy, modify, merge, publish,
distribute, sublicense, and/or sell copies of the Software, and to
permit persons to whom the Software is furnished to do so, subject to
the following conditions:
The above copyright notice and this permission notice shall be
included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.

View File

@ -0,0 +1,28 @@
### Requirements
* Filling out the template is required. Any pull request that does not include enough information to be reviewed in a timely manner may be closed at the maintainers' discretion.
* All new code requires tests to ensure against regressions
### Description of the Change
<!--
We must be able to understand the design of your change from this description. If we can't get a good idea of what the code will be doing from the description here, the pull request may be closed at the maintainers' discretion. Keep in mind that the maintainer reviewing this PR may not be familiar with or have worked with the code here recently, so please walk us through the concepts.
-->
### Alternate Designs
<!-- Explain what other alternates were considered and why the proposed version was selected -->
### Benefits
<!-- What benefits will be realized by the code change? -->
### Possible Drawbacks
<!-- What are the possible side-effects or negative impacts of the code change? -->
### Applicable Issues
<!-- Enter any applicable Issues here -->

View File

@ -0,0 +1,6 @@
# GitHub flavored Markdown package
[![OS X Build Status](https://travis-ci.org/atom/language-gfm.svg?branch=master)](https://travis-ci.org/atom/language-gfm) [![Windows Build Status](https://ci.appveyor.com/api/projects/status/rpub8qjyd8lt7wai/branch/master?svg=true)](https://ci.appveyor.com/project/Atom/language-gfm/branch/master) [![Dependency Status](https://david-dm.org/atom/language-gfm.svg)](https://david-dm.org/atom/language-gfm)
Adds syntax highlighting and snippets to [GitHub flavored Markdown](https://help.github.com/articles/github-flavored-markdown) files in Atom.
Contributions are greatly appreciated. Please fork this repository and open a pull request to add snippets, make grammar tweaks, etc.

View File

@ -0,0 +1,37 @@
{
"max_line_length": {
"level": "ignore"
},
"no_empty_param_list": {
"level": "error"
},
"arrow_spacing": {
"level": "error"
},
"no_interpolation_in_single_quotes": {
"level": "error"
},
"no_debugger": {
"level": "error"
},
"prefer_english_operator": {
"level": "error"
},
"colon_assignment_spacing": {
"spacing": {
"left": 0,
"right": 1
},
"level": "error"
},
"braces_spacing": {
"spaces": 0,
"level": "error"
},
"spacing_after_comma": {
"level": "error"
},
"no_stand_alone_at": {
"level": "error"
}
}

File diff suppressed because one or more lines are too long

View File

@ -0,0 +1,14 @@
{
"name": "language-gfm",
"version": "0.90.8",
"description": "Syntax highlighting and snippets for GitHub Flavored Markdown (GFM).",
"repository": "https://github.com/atom/language-gfm",
"license": "MIT",
"engines": {
"atom": "*"
},
"devDependencies": {
"coffee-script": "1.7.0",
"coffeelint": "^1.10.1"
}
}

View File

@ -0,0 +1,5 @@
'.source.gfm:not(.markup.code)':
'editor':
'softWrap': true
'commentStart': '<!-- '
'commentEnd': ' -->'

View File

@ -0,0 +1,42 @@
'.source.gfm':
'bold text':
'prefix': 'b'
'body': '**$1**$0'
'code':
'prefix': 'code'
'body': """
```$1
$2
```$0
"""
'italic text':
'prefix': 'i'
'body': '*$1*$0'
'embedded image':
'prefix': 'img'
'body': '![$1]($2)$0'
'link':
'prefix': 'l'
'body': '[$1]($2)$0'
'bullet point':
'prefix': 'p'
'body': '- $1'
'reference':
'prefix': 'ref'
'body': '[${1:id}]: ${2:url}${3: "${4:title}"}$0'
'reference image':
'prefix': 'rimg'
'body': '![$1][$2]$0'
'reference link':
'prefix': 'rl'
'body': '[$1][$2]$0'
'todo':
'prefix': 't'
'body': '- [ ] $1'
'table':
'prefix': 'table'
'body': """
| ${1:Header One } | ${2:Header Two } |
| :------------- | :------------- |
| ${3:Item One } | ${4:Item Two } |$0
"""

View File

@ -0,0 +1,897 @@
describe "GitHub Flavored Markdown grammar", ->
grammar = null
beforeEach ->
waitsForPromise ->
atom.packages.activatePackage("language-gfm")
runs ->
grammar = atom.grammars.grammarForScopeName("source.gfm")
it "parses the grammar", ->
expect(grammar).toBeDefined()
expect(grammar.scopeName).toBe "source.gfm"
it "tokenizes spaces", ->
{tokens} = grammar.tokenizeLine(" ")
expect(tokens[0]).toEqual value: " ", scopes: ["source.gfm"]
it "tokenizes horizontal rules", ->
{tokens} = grammar.tokenizeLine("***")
expect(tokens[0]).toEqual value: "***", scopes: ["source.gfm", "comment.hr.gfm"]
{tokens} = grammar.tokenizeLine("---")
expect(tokens[0]).toEqual value: "---", scopes: ["source.gfm", "comment.hr.gfm"]
{tokens} = grammar.tokenizeLine("___")
expect(tokens[0]).toEqual value: "___", scopes: ["source.gfm", "comment.hr.gfm"]
it "tokenizes escaped characters", ->
{tokens} = grammar.tokenizeLine("\\*")
expect(tokens[0]).toEqual value: "\\*", scopes: ["source.gfm", "constant.character.escape.gfm"]
{tokens} = grammar.tokenizeLine("\\\\")
expect(tokens[0]).toEqual value: "\\\\", scopes: ["source.gfm", "constant.character.escape.gfm"]
{tokens} = grammar.tokenizeLine("\\abc")
expect(tokens[0]).toEqual value: "\\a", scopes: ["source.gfm", "constant.character.escape.gfm"]
expect(tokens[1]).toEqual value: "bc", scopes: ["source.gfm"]
it "tokenizes ***bold italic*** text", ->
{tokens} = grammar.tokenizeLine("this is ***bold italic*** text")
expect(tokens[0]).toEqual value: "this is ", scopes: ["source.gfm"]
expect(tokens[1]).toEqual value: "***", scopes: ["source.gfm", "markup.bold.italic.gfm"]
expect(tokens[2]).toEqual value: "bold italic", scopes: ["source.gfm", "markup.bold.italic.gfm"]
expect(tokens[3]).toEqual value: "***", scopes: ["source.gfm", "markup.bold.italic.gfm"]
expect(tokens[4]).toEqual value: " text", scopes: ["source.gfm"]
[firstLineTokens, secondLineTokens] = grammar.tokenizeLines("this is ***bold\nitalic***!")
expect(firstLineTokens[0]).toEqual value: "this is ", scopes: ["source.gfm"]
expect(firstLineTokens[1]).toEqual value: "***", scopes: ["source.gfm", "markup.bold.italic.gfm"]
expect(firstLineTokens[2]).toEqual value: "bold", scopes: ["source.gfm", "markup.bold.italic.gfm"]
expect(secondLineTokens[0]).toEqual value: "italic", scopes: ["source.gfm", "markup.bold.italic.gfm"]
expect(secondLineTokens[1]).toEqual value: "***", scopes: ["source.gfm", "markup.bold.italic.gfm"]
expect(secondLineTokens[2]).toEqual value: "!", scopes: ["source.gfm"]
it "tokenizes ___bold italic___ text", ->
{tokens} = grammar.tokenizeLine("this is ___bold italic___ text")
expect(tokens[0]).toEqual value: "this is ", scopes: ["source.gfm"]
expect(tokens[1]).toEqual value: "___", scopes: ["source.gfm", "markup.bold.italic.gfm"]
expect(tokens[2]).toEqual value: "bold italic", scopes: ["source.gfm", "markup.bold.italic.gfm"]
expect(tokens[3]).toEqual value: "___", scopes: ["source.gfm", "markup.bold.italic.gfm"]
expect(tokens[4]).toEqual value: " text", scopes: ["source.gfm"]
[firstLineTokens, secondLineTokens] = grammar.tokenizeLines("this is ___bold\nitalic___!")
expect(firstLineTokens[0]).toEqual value: "this is ", scopes: ["source.gfm"]
expect(firstLineTokens[1]).toEqual value: "___", scopes: ["source.gfm", "markup.bold.italic.gfm"]
expect(firstLineTokens[2]).toEqual value: "bold", scopes: ["source.gfm", "markup.bold.italic.gfm"]
expect(secondLineTokens[0]).toEqual value: "italic", scopes: ["source.gfm", "markup.bold.italic.gfm"]
expect(secondLineTokens[1]).toEqual value: "___", scopes: ["source.gfm", "markup.bold.italic.gfm"]
expect(secondLineTokens[2]).toEqual value: "!", scopes: ["source.gfm"]
it "tokenizes **bold** text", ->
{tokens} = grammar.tokenizeLine("**bold**")
expect(tokens[0]).toEqual value: "**", scopes: ["source.gfm", "markup.bold.gfm", "punctuation.definition.entity.gfm"]
expect(tokens[1]).toEqual value: "bold", scopes: ["source.gfm", "markup.bold.gfm"]
expect(tokens[2]).toEqual value: "**", scopes: ["source.gfm", "markup.bold.gfm", "punctuation.definition.entity.gfm"]
[firstLineTokens, secondLineTokens] = grammar.tokenizeLines("this is **not\nbold**!")
expect(firstLineTokens[0]).toEqual value: "this is **not", scopes: ["source.gfm"]
expect(secondLineTokens[0]).toEqual value: "bold**!", scopes: ["source.gfm"]
{tokens} = grammar.tokenizeLine("not**bold**")
expect(tokens[0]).toEqual value: "not", scopes: ["source.gfm"]
expect(tokens[1]).toEqual value: "**", scopes: ["source.gfm", "markup.bold.gfm", "punctuation.definition.entity.gfm"]
expect(tokens[2]).toEqual value: "bold", scopes: ["source.gfm", "markup.bold.gfm"]
expect(tokens[3]).toEqual value: "**", scopes: ["source.gfm", "markup.bold.gfm", "punctuation.definition.entity.gfm"]
it "tokenizes __bold__ text", ->
{tokens} = grammar.tokenizeLine("____")
expect(tokens[0]).toEqual value: "____", scopes: ["source.gfm", "comment.hr.gfm"]
{tokens} = grammar.tokenizeLine("__bold__")
expect(tokens[0]).toEqual value: "__", scopes: [ 'source.gfm', 'markup.bold.gfm', 'punctuation.definition.entity.gfm' ]
expect(tokens[1]).toEqual value: "bold", scopes: ["source.gfm", "markup.bold.gfm"]
expect(tokens[2]).toEqual value: "__", scopes: [ 'source.gfm', 'markup.bold.gfm', 'punctuation.definition.entity.gfm' ]
[firstLineTokens, secondLineTokens] = grammar.tokenizeLines("this is __not\nbold__!")
expect(firstLineTokens[0]).toEqual value: "this is __not", scopes: ["source.gfm"]
expect(secondLineTokens[0]).toEqual value: "bold__!", scopes: ["source.gfm"]
{tokens} = grammar.tokenizeLine("not__bold__")
expect(tokens[0]).toEqual value: "not__bold__", scopes: ["source.gfm"]
it "tokenizes *italic* text", ->
{tokens} = grammar.tokenizeLine("**")
expect(tokens[0]).toEqual value: "**", scopes: ["source.gfm"]
{tokens} = grammar.tokenizeLine("this is *italic* text")
expect(tokens[0]).toEqual value: "this is ", scopes: ["source.gfm"]
expect(tokens[1]).toEqual value: "*", scopes: [ "source.gfm", "markup.italic.gfm", "punctuation.definition.entity.gfm" ]
expect(tokens[2]).toEqual value: "italic", scopes: ["source.gfm", "markup.italic.gfm"]
expect(tokens[3]).toEqual value: "*", scopes: [ "source.gfm", "markup.italic.gfm", "punctuation.definition.entity.gfm" ]
expect(tokens[4]).toEqual value: " text", scopes: ["source.gfm"]
{tokens} = grammar.tokenizeLine("is*italic*")
expect(tokens[0]).toEqual value: "is", scopes: ["source.gfm"]
expect(tokens[1]).toEqual value: "*", scopes: [ "source.gfm", "markup.italic.gfm", "punctuation.definition.entity.gfm" ]
expect(tokens[2]).toEqual value: "italic", scopes: ["source.gfm", "markup.italic.gfm"]
expect(tokens[3]).toEqual value: "*", scopes: [ "source.gfm", "markup.italic.gfm", "punctuation.definition.entity.gfm" ]
{tokens} = grammar.tokenizeLine("* not italic")
expect(tokens[0]).toEqual value: "*", scopes: ["source.gfm", "variable.unordered.list.gfm"]
expect(tokens[1]).toEqual value: " ", scopes: ["source.gfm"]
expect(tokens[2]).toEqual value: "not italic", scopes: ["source.gfm"]
[firstLineTokens, secondLineTokens] = grammar.tokenizeLines("this is *not\nitalic*!")
expect(firstLineTokens[0]).toEqual value: "this is *not", scopes: ["source.gfm"]
expect(secondLineTokens[0]).toEqual value: "italic*!", scopes: ["source.gfm"]
it "tokenizes _italic_ text", ->
{tokens} = grammar.tokenizeLine("__")
expect(tokens[0]).toEqual value: "__", scopes: ["source.gfm"]
{tokens} = grammar.tokenizeLine("this is _italic_ text")
expect(tokens[0]).toEqual value: "this is ", scopes: ["source.gfm"]
expect(tokens[1]).toEqual value: "_", scopes: [ 'source.gfm', 'markup.italic.gfm', 'punctuation.definition.entity.gfm' ]
expect(tokens[2]).toEqual value: "italic", scopes: ["source.gfm", "markup.italic.gfm"]
expect(tokens[3]).toEqual value: "_", scopes: [ 'source.gfm', 'markup.italic.gfm', 'punctuation.definition.entity.gfm' ]
expect(tokens[4]).toEqual value: " text", scopes: ["source.gfm"]
{tokens} = grammar.tokenizeLine("not_italic_")
expect(tokens[0]).toEqual value: "not_italic_", scopes: ["source.gfm"]
{tokens} = grammar.tokenizeLine("not x^{a}_m y^{b}_n italic")
expect(tokens[0]).toEqual value: "not x^{a}_m y^{b}_n italic", scopes: ["source.gfm"]
[firstLineTokens, secondLineTokens] = grammar.tokenizeLines("this is _not\nitalic_!")
expect(firstLineTokens[0]).toEqual value: "this is _not", scopes: ["source.gfm"]
expect(secondLineTokens[0]).toEqual value: "italic_!", scopes: ["source.gfm"]
it "tokenizes ~~strike~~ text", ->
{tokens} = grammar.tokenizeLine("~~strike~~")
expect(tokens[0]).toEqual value: "~~", scopes: ["source.gfm", "markup.strike.gfm"]
expect(tokens[1]).toEqual value: "strike", scopes: ["source.gfm", "markup.strike.gfm"]
expect(tokens[2]).toEqual value: "~~", scopes: ["source.gfm", "markup.strike.gfm"]
[firstLineTokens, secondLineTokens] = grammar.tokenizeLines("this is ~~str\nike~~!")
expect(firstLineTokens[0]).toEqual value: "this is ", scopes: ["source.gfm"]
expect(firstLineTokens[1]).toEqual value: "~~", scopes: ["source.gfm", "markup.strike.gfm"]
expect(firstLineTokens[2]).toEqual value: "str", scopes: ["source.gfm", "markup.strike.gfm"]
expect(secondLineTokens[0]).toEqual value: "ike", scopes: ["source.gfm", "markup.strike.gfm"]
expect(secondLineTokens[1]).toEqual value: "~~", scopes: ["source.gfm", "markup.strike.gfm"]
expect(secondLineTokens[2]).toEqual value: "!", scopes: ["source.gfm"]
{tokens} = grammar.tokenizeLine("not~~strike~~")
expect(tokens[0]).toEqual value: "not~~strike~~", scopes: ["source.gfm"]
it "tokenizes headings", ->
{tokens} = grammar.tokenizeLine("# Heading 1")
expect(tokens[0]).toEqual value: "#", scopes: ["source.gfm", "markup.heading.heading-1.gfm", "markup.heading.marker.gfm"]
expect(tokens[1]).toEqual value: " ", scopes: ["source.gfm", "markup.heading.heading-1.gfm", "markup.heading.space.gfm"]
expect(tokens[2]).toEqual value: "Heading 1", scopes: ["source.gfm", "markup.heading.heading-1.gfm"]
{tokens} = grammar.tokenizeLine("## Heading 2")
expect(tokens[0]).toEqual value: "##", scopes: ["source.gfm", "markup.heading.heading-2.gfm", "markup.heading.marker.gfm"]
expect(tokens[1]).toEqual value: " ", scopes: ["source.gfm", "markup.heading.heading-2.gfm", "markup.heading.space.gfm"]
expect(tokens[2]).toEqual value: "Heading 2", scopes: ["source.gfm", "markup.heading.heading-2.gfm"]
{tokens} = grammar.tokenizeLine("### Heading 3")
expect(tokens[0]).toEqual value: "###", scopes: ["source.gfm", "markup.heading.heading-3.gfm", "markup.heading.marker.gfm"]
expect(tokens[1]).toEqual value: " ", scopes: ["source.gfm", "markup.heading.heading-3.gfm", "markup.heading.space.gfm"]
expect(tokens[2]).toEqual value: "Heading 3", scopes: ["source.gfm", "markup.heading.heading-3.gfm"]
{tokens} = grammar.tokenizeLine("#### Heading 4")
expect(tokens[0]).toEqual value: "####", scopes: ["source.gfm", "markup.heading.heading-4.gfm", "markup.heading.marker.gfm"]
expect(tokens[1]).toEqual value: " ", scopes: ["source.gfm", "markup.heading.heading-4.gfm", "markup.heading.space.gfm"]
expect(tokens[2]).toEqual value: "Heading 4", scopes: ["source.gfm", "markup.heading.heading-4.gfm"]
{tokens} = grammar.tokenizeLine("##### Heading 5")
expect(tokens[0]).toEqual value: "#####", scopes: ["source.gfm", "markup.heading.heading-5.gfm", "markup.heading.marker.gfm"]
expect(tokens[1]).toEqual value: " ", scopes: ["source.gfm", "markup.heading.heading-5.gfm", "markup.heading.space.gfm"]
expect(tokens[2]).toEqual value: "Heading 5", scopes: ["source.gfm", "markup.heading.heading-5.gfm"]
{tokens} = grammar.tokenizeLine("###### Heading 6")
expect(tokens[0]).toEqual value: "######", scopes: ["source.gfm", "markup.heading.heading-6.gfm", "markup.heading.marker.gfm"]
expect(tokens[1]).toEqual value: " ", scopes: ["source.gfm", "markup.heading.heading-6.gfm", "markup.heading.space.gfm"]
expect(tokens[2]).toEqual value: "Heading 6", scopes: ["source.gfm", "markup.heading.heading-6.gfm"]
it "tokenizes matches inside of headers", ->
{tokens} = grammar.tokenizeLine("# Heading :one:")
expect(tokens[0]).toEqual value: "#", scopes: ["source.gfm", "markup.heading.heading-1.gfm", "markup.heading.marker.gfm"]
expect(tokens[1]).toEqual value: " ", scopes: ["source.gfm", "markup.heading.heading-1.gfm", "markup.heading.space.gfm"]
expect(tokens[2]).toEqual value: "Heading ", scopes: ["source.gfm", "markup.heading.heading-1.gfm"]
expect(tokens[3]).toEqual value: ":", scopes: ["source.gfm", "markup.heading.heading-1.gfm", "string.emoji.gfm", "string.emoji.start.gfm"]
expect(tokens[4]).toEqual value: "one", scopes: ["source.gfm", "markup.heading.heading-1.gfm", "string.emoji.gfm", "string.emoji.word.gfm"]
expect(tokens[5]).toEqual value: ":", scopes: ["source.gfm", "markup.heading.heading-1.gfm", "string.emoji.gfm", "string.emoji.end.gfm"]
it "tokenizes an :emoji:", ->
{tokens} = grammar.tokenizeLine("this is :no_good:")
expect(tokens[0]).toEqual value: "this is ", scopes: ["source.gfm"]
expect(tokens[1]).toEqual value: ":", scopes: ["source.gfm", "string.emoji.gfm", "string.emoji.start.gfm"]
expect(tokens[2]).toEqual value: "no_good", scopes: ["source.gfm", "string.emoji.gfm", "string.emoji.word.gfm"]
expect(tokens[3]).toEqual value: ":", scopes: ["source.gfm", "string.emoji.gfm", "string.emoji.end.gfm"]
{tokens} = grammar.tokenizeLine("this is :no good:")
expect(tokens[0]).toEqual value: "this is :no good:", scopes: ["source.gfm"]
{tokens} = grammar.tokenizeLine("http://localhost:8080")
expect(tokens[0]).toEqual value: "http://localhost:8080", scopes: ["source.gfm"]
it "tokenizes a ``` code block", ->
{tokens, ruleStack} = grammar.tokenizeLine("```")
expect(tokens[0]).toEqual value: "```", scopes: ["source.gfm", "markup.raw.gfm", "support.gfm"]
{tokens, ruleStack} = grammar.tokenizeLine("-> 'hello'", ruleStack)
expect(tokens[0]).toEqual value: "-> 'hello'", scopes: ["source.gfm", "markup.raw.gfm"]
{tokens} = grammar.tokenizeLine("```", ruleStack)
expect(tokens[0]).toEqual value: "```", scopes: ["source.gfm", "markup.raw.gfm", "support.gfm"]
it "tokenizes a ~~~ code block", ->
{tokens, ruleStack} = grammar.tokenizeLine("~~~")
expect(tokens[0]).toEqual value: "~~~", scopes: ["source.gfm", "markup.raw.gfm", "support.gfm"]
{tokens, ruleStack} = grammar.tokenizeLine("-> 'hello'", ruleStack)
expect(tokens[0]).toEqual value: "-> 'hello'", scopes: ["source.gfm", "markup.raw.gfm"]
{tokens} = grammar.tokenizeLine("~~~", ruleStack)
expect(tokens[0]).toEqual value: "~~~", scopes: ["source.gfm", "markup.raw.gfm", "support.gfm"]
it "doesn't tokenise ~`~ as a code block", ->
{tokens} = grammar.tokenizeLine("~`~")
expect(tokens[0]).toEqual value: '~', scopes: ['source.gfm']
expect(tokens[1]).toEqual value: '`', scopes: ['source.gfm', 'markup.raw.gfm']
expect(tokens[2]).toEqual value: '~', scopes: ['source.gfm', 'markup.raw.gfm']
it "tokenises code-blocks with borders of differing lengths", ->
[firstLineTokens, secondLineTokens, thirdLineTokens] = grammar.tokenizeLines("~~~\nfoo bar\n~~~~~~~")
expect(firstLineTokens[0]).toEqual value: '~~~', scopes: ['source.gfm', 'markup.raw.gfm', 'support.gfm']
expect(secondLineTokens[0]).toEqual value: 'foo bar', scopes: ['source.gfm', 'markup.raw.gfm']
expect(thirdLineTokens[0]).toEqual value: '~~~~~~~', scopes: ['source.gfm', 'markup.raw.gfm', 'support.gfm']
[firstLineTokens, secondLineTokens, thirdLineTokens] = grammar.tokenizeLines("~~~~~~~\nfoo bar\n~~~")
expect(firstLineTokens[0]).toEqual value: '~~~~~~~', scopes: ['source.gfm', 'markup.raw.gfm', 'support.gfm']
expect(secondLineTokens[0]).toEqual value: 'foo bar', scopes: ['source.gfm', 'markup.raw.gfm']
expect(thirdLineTokens[0]).toEqual value: '~~~', scopes: ['source.gfm', 'markup.raw.gfm']
it "tokenizes a ``` code block with trailing whitespace", ->
{tokens, ruleStack} = grammar.tokenizeLine("```")
expect(tokens[0]).toEqual value: "```", scopes: ["source.gfm", "markup.raw.gfm", "support.gfm"]
{tokens, ruleStack} = grammar.tokenizeLine("-> 'hello'", ruleStack)
expect(tokens[0]).toEqual value: "-> 'hello'", scopes: ["source.gfm", "markup.raw.gfm"]
{tokens} = grammar.tokenizeLine("``` ", ruleStack)
expect(tokens[0]).toEqual value: "``` ", scopes: ["source.gfm", "markup.raw.gfm", "support.gfm"]
it "tokenizes a ~~~ code block with trailing whitespace", ->
{tokens, ruleStack} = grammar.tokenizeLine("~~~")
expect(tokens[0]).toEqual value: "~~~", scopes: ["source.gfm", "markup.raw.gfm", "support.gfm"]
{tokens, ruleStack} = grammar.tokenizeLine("-> 'hello'", ruleStack)
expect(tokens[0]).toEqual value: "-> 'hello'", scopes: ["source.gfm", "markup.raw.gfm"]
{tokens} = grammar.tokenizeLine("~~~ ", ruleStack)
expect(tokens[0]).toEqual value: "~~~ ", scopes: ["source.gfm", "markup.raw.gfm", "support.gfm"]
it "tokenises a ``` code block with an unknown language", ->
{tokens, ruleStack} = grammar.tokenizeLine("``` myLanguage")
expect(tokens[0]).toEqual value: '``` myLanguage', scopes: ['source.gfm', 'markup.code.other.gfm', 'support.gfm']
{tokens, ruleStack} = grammar.tokenizeLine("-> 'hello'", ruleStack)
expect(tokens[0]).toEqual value: "-> 'hello'", scopes: ['source.gfm', 'markup.code.other.gfm', 'source.embedded.mylanguage']
{tokens} = grammar.tokenizeLine("```", ruleStack)
expect(tokens[0]).toEqual value: '```', scopes: ['source.gfm', 'markup.code.other.gfm', 'support.gfm']
it "tokenizes a ``` code block with a known language", ->
{tokens, ruleStack} = grammar.tokenizeLine("``` bash")
expect(tokens[0]).toEqual value: "``` bash", scopes: ["source.gfm", "markup.code.shell.gfm", "support.gfm"]
expect(ruleStack[1].contentScopeName).toBe "source.embedded.shell"
{tokens, ruleStack} = grammar.tokenizeLine("```js ")
expect(tokens[0]).toEqual value: "```js ", scopes: ["source.gfm", "markup.code.js.gfm", "support.gfm"]
expect(ruleStack[1].contentScopeName).toBe "source.embedded.js"
{tokens, ruleStack} = grammar.tokenizeLine("```JS ")
expect(tokens[0]).toEqual value: "```JS ", scopes: ["source.gfm", "markup.code.js.gfm", "support.gfm"]
expect(ruleStack[1].contentScopeName).toBe "source.embedded.js"
{tokens, ruleStack} = grammar.tokenizeLine("```r ")
expect(tokens[0]).toEqual value: "```r ", scopes: ["source.gfm", "markup.code.r.gfm", "support.gfm"]
expect(ruleStack[1].contentScopeName).toBe "source.embedded.r"
{tokens, ruleStack} = grammar.tokenizeLine("```properties ")
expect(tokens[0]).toEqual value: "```properties ", scopes: ["source.gfm", "markup.code.git-config.gfm", "support.gfm"]
expect(ruleStack[1].contentScopeName).toBe "source.embedded.git-config"
it "tokenizes a Rmarkdown ``` code block", ->
{tokens, ruleStack} = grammar.tokenizeLine("```{r}")
expect(tokens[0]).toEqual value: "```{r}", scopes: ["source.gfm", "markup.code.r.gfm", "support.gfm"]
expect(ruleStack[1].contentScopeName).toBe "source.embedded.r"
{tokens, ruleStack} = grammar.tokenizeLine("```{r,eval=TRUE,cache=FALSE}")
expect(tokens[0]).toEqual value: "```{r,eval=TRUE,cache=FALSE}", scopes: ["source.gfm", "markup.code.r.gfm", "support.gfm"]
expect(ruleStack[1].contentScopeName).toBe "source.embedded.r"
{tokens, ruleStack} = grammar.tokenizeLine("```{r eval=TRUE,cache=FALSE}")
expect(tokens[0]).toEqual value: "```{r eval=TRUE,cache=FALSE}", scopes: ["source.gfm", "markup.code.r.gfm", "support.gfm"]
expect(ruleStack[1].contentScopeName).toBe "source.embedded.r"
it "tokenizes a Rmarkdown ``` code block with whitespace", ->
{tokens, ruleStack} = grammar.tokenizeLine("```{r }")
expect(tokens[0]).toEqual value: "```{r }", scopes: ["source.gfm", "markup.code.r.gfm", "support.gfm"]
expect(ruleStack[1].contentScopeName).toBe "source.embedded.r"
{tokens, ruleStack} = grammar.tokenizeLine("```{R } ")
expect(tokens[0]).toEqual value: "```{R } ", scopes: ["source.gfm", "markup.code.r.gfm", "support.gfm"]
expect(ruleStack[1].contentScopeName).toBe "source.embedded.r"
{tokens, ruleStack} = grammar.tokenizeLine("```{r eval = TRUE, cache = FALSE}")
expect(tokens[0]).toEqual value: "```{r eval = TRUE, cache = FALSE}", scopes: ["source.gfm", "markup.code.r.gfm", "support.gfm"]
expect(ruleStack[1].contentScopeName).toBe "source.embedded.r"
it "tokenizes a ~~~ code block with a language", ->
{tokens, ruleStack} = grammar.tokenizeLine("~~~ bash")
expect(tokens[0]).toEqual value: "~~~ bash", scopes: ["source.gfm", "markup.code.shell.gfm", "support.gfm"]
expect(ruleStack[1].contentScopeName).toBe "source.embedded.shell"
{tokens, ruleStack} = grammar.tokenizeLine("~~~js ")
expect(tokens[0]).toEqual value: "~~~js ", scopes: ["source.gfm", "markup.code.js.gfm", "support.gfm"]
expect(ruleStack[1].contentScopeName).toBe "source.embedded.js"
{tokens, ruleStack} = grammar.tokenizeLine("~~~properties ")
expect(tokens[0]).toEqual value: "~~~properties ", scopes: ["source.gfm", "markup.code.git-config.gfm", "support.gfm"]
expect(ruleStack[1].contentScopeName).toBe "source.embedded.git-config"
it "tokenizes a ``` code block with a language and trailing whitespace", ->
{tokens, ruleStack} = grammar.tokenizeLine("``` bash")
{tokens} = grammar.tokenizeLine("``` ", ruleStack)
expect(tokens[0]).toEqual value: "``` ", scopes: ["source.gfm", "markup.code.shell.gfm", "support.gfm"]
expect(ruleStack[1].contentScopeName).toBe "source.embedded.shell"
{tokens, ruleStack} = grammar.tokenizeLine("```js ")
{tokens} = grammar.tokenizeLine("``` ", ruleStack)
expect(tokens[0]).toEqual value: "``` ", scopes: ["source.gfm", "markup.code.js.gfm", "support.gfm"]
expect(ruleStack[1].contentScopeName).toBe "source.embedded.js"
it "tokenizes a ~~~ code block with a language and trailing whitespace", ->
{tokens, ruleStack} = grammar.tokenizeLine("~~~ bash")
{tokens} = grammar.tokenizeLine("~~~ ", ruleStack)
expect(tokens[0]).toEqual value: "~~~ ", scopes: ["source.gfm", "markup.code.shell.gfm", "support.gfm"]
expect(ruleStack[1].contentScopeName).toBe "source.embedded.shell"
{tokens, ruleStack} = grammar.tokenizeLine("~~~js ")
{tokens} = grammar.tokenizeLine("~~~ ", ruleStack)
expect(tokens[0]).toEqual value: "~~~ ", scopes: ["source.gfm", "markup.code.js.gfm", "support.gfm"]
expect(ruleStack[1].contentScopeName).toBe "source.embedded.js"
{tokens, ruleStack} = grammar.tokenizeLine("~~~ properties ")
{tokens} = grammar.tokenizeLine("~~~ ", ruleStack)
expect(tokens[0]).toEqual value: "~~~ ", scopes: ["source.gfm", "markup.code.git-config.gfm", "support.gfm"]
expect(ruleStack[1].contentScopeName).toBe "source.embedded.git-config"
it "tokenizes inline `code` blocks", ->
{tokens} = grammar.tokenizeLine("`this` is `code`")
expect(tokens[0]).toEqual value: "`", scopes: ["source.gfm", "markup.raw.gfm"]
expect(tokens[1]).toEqual value: "this", scopes: ["source.gfm", "markup.raw.gfm"]
expect(tokens[2]).toEqual value: "`", scopes: ["source.gfm", "markup.raw.gfm"]
expect(tokens[3]).toEqual value: " is ", scopes: ["source.gfm"]
expect(tokens[4]).toEqual value: "`", scopes: ["source.gfm", "markup.raw.gfm"]
expect(tokens[5]).toEqual value: "code", scopes: ["source.gfm", "markup.raw.gfm"]
expect(tokens[6]).toEqual value: "`", scopes: ["source.gfm", "markup.raw.gfm"]
{tokens} = grammar.tokenizeLine("``")
expect(tokens[0]).toEqual value: "`", scopes: ["source.gfm", "markup.raw.gfm"]
expect(tokens[1]).toEqual value: "`", scopes: ["source.gfm", "markup.raw.gfm"]
{tokens} = grammar.tokenizeLine("``a\\`b``")
expect(tokens[0]).toEqual value: "``", scopes: ["source.gfm", "markup.raw.gfm"]
expect(tokens[1]).toEqual value: "a\\`b", scopes: ["source.gfm", "markup.raw.gfm"]
expect(tokens[2]).toEqual value: "``", scopes: ["source.gfm", "markup.raw.gfm"]
it "tokenizes [links](links)", ->
{tokens} = grammar.tokenizeLine("please click [this link](website)")
expect(tokens[0]).toEqual value: "please click ", scopes: ["source.gfm"]
expect(tokens[1]).toEqual value: "[", scopes: ["source.gfm", "link", "punctuation.definition.begin.gfm"]
expect(tokens[2]).toEqual value: "this link", scopes: ["source.gfm", "link", "entity.gfm"]
expect(tokens[3]).toEqual value: "]", scopes: ["source.gfm", "link", "punctuation.definition.end.gfm"]
expect(tokens[4]).toEqual value: "(", scopes: ["source.gfm", "link", "punctuation.definition.begin.gfm"]
expect(tokens[5]).toEqual value: "website", scopes: ["source.gfm", "link", "markup.underline.link.gfm"]
expect(tokens[6]).toEqual value: ")", scopes: ["source.gfm", "link", "punctuation.definition.end.gfm"]
it "tokenizes reference [links][links]", ->
{tokens} = grammar.tokenizeLine("please click [this link][website]")
expect(tokens[0]).toEqual value: "please click ", scopes: ["source.gfm"]
expect(tokens[1]).toEqual value: "[", scopes: ["source.gfm", "link", "punctuation.definition.begin.gfm"]
expect(tokens[2]).toEqual value: "this link", scopes: ["source.gfm", "link", "entity.gfm"]
expect(tokens[3]).toEqual value: "]", scopes: ["source.gfm", "link", "punctuation.definition.end.gfm"]
expect(tokens[4]).toEqual value: "[", scopes: ["source.gfm", "link", "punctuation.definition.begin.gfm"]
expect(tokens[5]).toEqual value: "website", scopes: ["source.gfm", "link", "markup.underline.link.gfm"]
expect(tokens[6]).toEqual value: "]", scopes: ["source.gfm", "link", "punctuation.definition.end.gfm"]
it "tokenizes id-less reference [links][]", ->
{tokens} = grammar.tokenizeLine("please click [this link][]")
expect(tokens[0]).toEqual value: "please click ", scopes: ["source.gfm"]
expect(tokens[1]).toEqual value: "[", scopes: ["source.gfm", "link", "punctuation.definition.begin.gfm"]
expect(tokens[2]).toEqual value: "this link", scopes: ["source.gfm", "link", "entity.gfm"]
expect(tokens[3]).toEqual value: "]", scopes: ["source.gfm", "link", "punctuation.definition.end.gfm"]
expect(tokens[4]).toEqual value: "[", scopes: ["source.gfm", "link", "punctuation.definition.begin.gfm"]
expect(tokens[5]).toEqual value: "]", scopes: ["source.gfm", "link", "punctuation.definition.end.gfm"]
it "tokenizes [link]: footers", ->
{tokens} = grammar.tokenizeLine("[aLink]: http://website")
expect(tokens[0]).toEqual value: "[", scopes: ["source.gfm", "link", "punctuation.definition.begin.gfm"]
expect(tokens[1]).toEqual value: "aLink", scopes: ["source.gfm", "link", "entity.gfm"]
expect(tokens[2]).toEqual value: "]", scopes: ["source.gfm", "link", "punctuation.definition.end.gfm"]
expect(tokens[3]).toEqual value: ":", scopes: ["source.gfm", "link", "punctuation.separator.key-value.gfm"]
expect(tokens[4]).toEqual value: " ", scopes: ["source.gfm", "link"]
expect(tokens[5]).toEqual value: "http://website", scopes: ["source.gfm", "link", "markup.underline.link.gfm"]
it "tokenizes [link]: <footers>", ->
{tokens} = grammar.tokenizeLine("[aLink]: <http://website>")
expect(tokens[0]).toEqual value: "[", scopes: ["source.gfm", "link", "punctuation.definition.begin.gfm"]
expect(tokens[1]).toEqual value: "aLink", scopes: ["source.gfm", "link", "entity.gfm"]
expect(tokens[2]).toEqual value: "]", scopes: ["source.gfm", "link", "punctuation.definition.end.gfm"]
expect(tokens[3]).toEqual value: ": <", scopes: ["source.gfm", "link"]
expect(tokens[4]).toEqual value: "http://website", scopes: ["source.gfm", "link", "markup.underline.link.gfm"]
expect(tokens[5]).toEqual value: ">", scopes: ["source.gfm", "link"]
it "tokenizes [![links](links)](links)", ->
{tokens} = grammar.tokenizeLine("[![title](image)](link)")
expect(tokens[0]).toEqual value: "[!", scopes: ["source.gfm", "link", "punctuation.definition.begin.gfm"]
expect(tokens[1]).toEqual value: "[", scopes: ["source.gfm", "link", "punctuation.definition.begin.gfm"]
expect(tokens[2]).toEqual value: "title", scopes: ["source.gfm", "link", "entity.gfm"]
expect(tokens[3]).toEqual value: "]", scopes: ["source.gfm", "link", "punctuation.definition.end.gfm"]
expect(tokens[4]).toEqual value: "(", scopes: ["source.gfm", "link", "punctuation.definition.begin.gfm"]
expect(tokens[5]).toEqual value: "image", scopes: ["source.gfm", "link", "markup.underline.link.gfm"]
expect(tokens[6]).toEqual value: ")", scopes: ["source.gfm", "link", "punctuation.definition.end.gfm"]
expect(tokens[7]).toEqual value: "]", scopes: ["source.gfm", "link", "punctuation.definition.end.gfm"]
expect(tokens[8]).toEqual value: "(", scopes: ["source.gfm", "link", "punctuation.definition.begin.gfm"]
expect(tokens[9]).toEqual value: "link", scopes: ["source.gfm", "link", "markup.underline.link.gfm"]
expect(tokens[10]).toEqual value: ")", scopes: ["source.gfm", "link", "punctuation.definition.end.gfm"]
it "tokenizes [![links](links)][links]", ->
{tokens} = grammar.tokenizeLine("[![title](image)][link]")
expect(tokens[0]).toEqual value: "[!", scopes: ["source.gfm", "link", "punctuation.definition.begin.gfm"]
expect(tokens[1]).toEqual value: "[", scopes: ["source.gfm", "link", "punctuation.definition.begin.gfm"]
expect(tokens[2]).toEqual value: "title", scopes: ["source.gfm", "link", "entity.gfm"]
expect(tokens[3]).toEqual value: "]", scopes: ["source.gfm", "link", "punctuation.definition.end.gfm"]
expect(tokens[4]).toEqual value: "(", scopes: ["source.gfm", "link", "punctuation.definition.begin.gfm"]
expect(tokens[5]).toEqual value: "image", scopes: ["source.gfm", "link", "markup.underline.link.gfm"]
expect(tokens[6]).toEqual value: ")", scopes: ["source.gfm", "link", "punctuation.definition.end.gfm"]
expect(tokens[7]).toEqual value: "]", scopes: ["source.gfm", "link", "punctuation.definition.end.gfm"]
expect(tokens[8]).toEqual value: "[", scopes: ["source.gfm", "link", "punctuation.definition.begin.gfm"]
expect(tokens[9]).toEqual value: "link", scopes: ["source.gfm", "link", "markup.underline.link.gfm"]
expect(tokens[10]).toEqual value: "]", scopes: ["source.gfm", "link", "punctuation.definition.end.gfm"]
it "tokenizes [![links][links]](links)", ->
{tokens} = grammar.tokenizeLine("[![title][image]](link)")
expect(tokens[0]).toEqual value: "[!", scopes: ["source.gfm", "link", "punctuation.definition.begin.gfm"]
expect(tokens[1]).toEqual value: "[", scopes: ["source.gfm", "link", "punctuation.definition.begin.gfm"]
expect(tokens[2]).toEqual value: "title", scopes: ["source.gfm", "link", "entity.gfm"]
expect(tokens[3]).toEqual value: "]", scopes: ["source.gfm", "link", "punctuation.definition.end.gfm"]
expect(tokens[4]).toEqual value: "[", scopes: ["source.gfm", "link", "punctuation.definition.begin.gfm"]
expect(tokens[5]).toEqual value: "image", scopes: ["source.gfm", "link", "markup.underline.link.gfm"]
expect(tokens[6]).toEqual value: "]", scopes: ["source.gfm", "link", "punctuation.definition.end.gfm"]
expect(tokens[7]).toEqual value: "]", scopes: ["source.gfm", "link", "punctuation.definition.end.gfm"]
expect(tokens[8]).toEqual value: "(", scopes: ["source.gfm", "link", "punctuation.definition.begin.gfm"]
expect(tokens[9]).toEqual value: "link", scopes: ["source.gfm", "link", "markup.underline.link.gfm"]
expect(tokens[10]).toEqual value: ")", scopes: ["source.gfm", "link", "punctuation.definition.end.gfm"]
it "tokenizes [![links][links]][links]", ->
{tokens} = grammar.tokenizeLine("[![title][image]][link]")
expect(tokens[0]).toEqual value: "[!", scopes: ["source.gfm", "link", "punctuation.definition.begin.gfm"]
expect(tokens[1]).toEqual value: "[", scopes: ["source.gfm", "link", "punctuation.definition.begin.gfm"]
expect(tokens[2]).toEqual value: "title", scopes: ["source.gfm", "link", "entity.gfm"]
expect(tokens[3]).toEqual value: "]", scopes: ["source.gfm", "link", "punctuation.definition.end.gfm"]
expect(tokens[4]).toEqual value: "[", scopes: ["source.gfm", "link", "punctuation.definition.begin.gfm"]
expect(tokens[5]).toEqual value: "image", scopes: ["source.gfm", "link", "markup.underline.link.gfm"]
expect(tokens[6]).toEqual value: "]", scopes: ["source.gfm", "link", "punctuation.definition.end.gfm"]
expect(tokens[7]).toEqual value: "]", scopes: ["source.gfm", "link", "punctuation.definition.end.gfm"]
expect(tokens[8]).toEqual value: "[", scopes: ["source.gfm", "link", "punctuation.definition.begin.gfm"]
expect(tokens[9]).toEqual value: "link", scopes: ["source.gfm", "link", "markup.underline.link.gfm"]
expect(tokens[10]).toEqual value: "]", scopes: ["source.gfm", "link", "punctuation.definition.end.gfm"]
it "tokenizes mentions", ->
{tokens} = grammar.tokenizeLine("sentence with no space before@name ")
expect(tokens[0]).toEqual value: "sentence with no space before@name ", scopes: ["source.gfm"]
{tokens} = grammar.tokenizeLine("@name '@name' @name's @name. @name, (@name) [@name]")
expect(tokens[0]).toEqual value: "@", scopes: ["source.gfm", "variable.mention.gfm"]
expect(tokens[1]).toEqual value: "name", scopes: ["source.gfm", "string.username.gfm"]
expect(tokens[2]).toEqual value: " '", scopes: ["source.gfm"]
expect(tokens[3]).toEqual value: "@", scopes: ["source.gfm", "variable.mention.gfm"]
expect(tokens[4]).toEqual value: "name", scopes: ["source.gfm", "string.username.gfm"]
expect(tokens[5]).toEqual value: "' ", scopes: ["source.gfm"]
expect(tokens[6]).toEqual value: "@", scopes: ["source.gfm", "variable.mention.gfm"]
expect(tokens[7]).toEqual value: "name", scopes: ["source.gfm", "string.username.gfm"]
expect(tokens[8]).toEqual value: "'s ", scopes: ["source.gfm"]
expect(tokens[9]).toEqual value: "@", scopes: ["source.gfm", "variable.mention.gfm"]
expect(tokens[10]).toEqual value: "name", scopes: ["source.gfm", "string.username.gfm"]
expect(tokens[11]).toEqual value: ". ", scopes: ["source.gfm"]
expect(tokens[12]).toEqual value: "@", scopes: ["source.gfm", "variable.mention.gfm"]
expect(tokens[13]).toEqual value: "name", scopes: ["source.gfm", "string.username.gfm"]
expect(tokens[14]).toEqual value: ", (", scopes: ["source.gfm"]
expect(tokens[15]).toEqual value: "@", scopes: ["source.gfm", "variable.mention.gfm"]
expect(tokens[16]).toEqual value: "name", scopes: ["source.gfm", "string.username.gfm"]
expect(tokens[17]).toEqual value: ") [", scopes: ["source.gfm"]
expect(tokens[18]).toEqual value: "@", scopes: ["source.gfm", "variable.mention.gfm"]
expect(tokens[19]).toEqual value: "name", scopes: ["source.gfm", "string.username.gfm"]
expect(tokens[20]).toEqual value: "]", scopes: ["source.gfm"]
{tokens} = grammar.tokenizeLine('"@name"')
expect(tokens[0]).toEqual value: '"', scopes: ["source.gfm"]
expect(tokens[1]).toEqual value: "@", scopes: ["source.gfm", "variable.mention.gfm"]
expect(tokens[2]).toEqual value: "name", scopes: ["source.gfm", "string.username.gfm"]
expect(tokens[3]).toEqual value: '"', scopes: ["source.gfm"]
{tokens} = grammar.tokenizeLine("sentence with a space before @name/ and an invalid symbol after")
expect(tokens[0]).toEqual value: "sentence with a space before @name/ and an invalid symbol after", scopes: ["source.gfm"]
{tokens} = grammar.tokenizeLine("sentence with a space before @name that continues")
expect(tokens[0]).toEqual value: "sentence with a space before ", scopes: ["source.gfm"]
expect(tokens[1]).toEqual value: "@", scopes: ["source.gfm", "variable.mention.gfm"]
expect(tokens[2]).toEqual value: "name", scopes: ["source.gfm", "string.username.gfm"]
expect(tokens[3]).toEqual value: " that continues", scopes: ["source.gfm"]
{tokens} = grammar.tokenizeLine("* @name at the start of an unordered list")
expect(tokens[0]).toEqual value: "*", scopes: ["source.gfm", "variable.unordered.list.gfm"]
expect(tokens[1]).toEqual value: " ", scopes: ["source.gfm"]
expect(tokens[2]).toEqual value: "@", scopes: ["source.gfm", "variable.mention.gfm"]
expect(tokens[3]).toEqual value: "name", scopes: ["source.gfm", "string.username.gfm"]
expect(tokens[4]).toEqual value: " at the start of an unordered list", scopes: ["source.gfm"]
{tokens} = grammar.tokenizeLine("a username @1337_hubot with numbers, letters and underscores")
expect(tokens[0]).toEqual value: "a username ", scopes: ["source.gfm"]
expect(tokens[1]).toEqual value: "@", scopes: ["source.gfm", "variable.mention.gfm"]
expect(tokens[2]).toEqual value: "1337_hubot", scopes: ["source.gfm", "string.username.gfm"]
expect(tokens[3]).toEqual value: " with numbers, letters and underscores", scopes: ["source.gfm"]
{tokens} = grammar.tokenizeLine("a username @1337-hubot with numbers, letters and hyphens")
expect(tokens[0]).toEqual value: "a username ", scopes: ["source.gfm"]
expect(tokens[1]).toEqual value: "@", scopes: ["source.gfm", "variable.mention.gfm"]
expect(tokens[2]).toEqual value: "1337-hubot", scopes: ["source.gfm", "string.username.gfm"]
expect(tokens[3]).toEqual value: " with numbers, letters and hyphens", scopes: ["source.gfm"]
{tokens} = grammar.tokenizeLine("@name at the start of a line")
expect(tokens[0]).toEqual value: "@", scopes: ["source.gfm", "variable.mention.gfm"]
expect(tokens[1]).toEqual value: "name", scopes: ["source.gfm", "string.username.gfm"]
expect(tokens[2]).toEqual value: " at the start of a line", scopes: ["source.gfm"]
{tokens} = grammar.tokenizeLine("any email like you@domain.com shouldn't mistakenly be matched as a mention")
expect(tokens[0]).toEqual value: "any email like you@domain.com shouldn't mistakenly be matched as a mention", scopes: ["source.gfm"]
{tokens} = grammar.tokenizeLine("@person's")
expect(tokens[0]).toEqual value: "@", scopes: ["source.gfm", "variable.mention.gfm"]
expect(tokens[1]).toEqual value: "person", scopes: ["source.gfm", "string.username.gfm"]
expect(tokens[2]).toEqual value: "'s", scopes: ["source.gfm"]
{tokens} = grammar.tokenizeLine("@person;")
expect(tokens[0]).toEqual value: "@", scopes: ["source.gfm", "variable.mention.gfm"]
expect(tokens[1]).toEqual value: "person", scopes: ["source.gfm", "string.username.gfm"]
expect(tokens[2]).toEqual value: ";", scopes: ["source.gfm"]
it "tokenizes issue numbers", ->
{tokens} = grammar.tokenizeLine("sentence with no space before#12 ")
expect(tokens[0]).toEqual value: "sentence with no space before#12 ", scopes: ["source.gfm"]
{tokens} = grammar.tokenizeLine(" #101 '#101' #101's #101. #101, (#101) [#101]")
expect(tokens[1]).toEqual value: "#", scopes: ["source.gfm", "variable.issue.tag.gfm"]
expect(tokens[2]).toEqual value: "101", scopes: ["source.gfm", "string.issue.number.gfm"]
expect(tokens[3]).toEqual value: " '", scopes: ["source.gfm"]
expect(tokens[4]).toEqual value: "#", scopes: ["source.gfm", "variable.issue.tag.gfm"]
expect(tokens[5]).toEqual value: "101", scopes: ["source.gfm", "string.issue.number.gfm"]
expect(tokens[6]).toEqual value: "' ", scopes: ["source.gfm"]
expect(tokens[7]).toEqual value: "#", scopes: ["source.gfm", "variable.issue.tag.gfm"]
expect(tokens[8]).toEqual value: "101", scopes: ["source.gfm", "string.issue.number.gfm"]
expect(tokens[9]).toEqual value: "'s ", scopes: ["source.gfm"]
expect(tokens[10]).toEqual value: "#", scopes: ["source.gfm", "variable.issue.tag.gfm"]
expect(tokens[11]).toEqual value: "101", scopes: ["source.gfm", "string.issue.number.gfm"]
expect(tokens[12]).toEqual value: ". ", scopes: ["source.gfm"]
expect(tokens[13]).toEqual value: "#", scopes: ["source.gfm", "variable.issue.tag.gfm"]
expect(tokens[14]).toEqual value: "101", scopes: ["source.gfm", "string.issue.number.gfm"]
expect(tokens[15]).toEqual value: ", (", scopes: ["source.gfm"]
expect(tokens[16]).toEqual value: "#", scopes: ["source.gfm", "variable.issue.tag.gfm"]
expect(tokens[17]).toEqual value: "101", scopes: ["source.gfm", "string.issue.number.gfm"]
expect(tokens[18]).toEqual value: ") [", scopes: ["source.gfm"]
expect(tokens[19]).toEqual value: "#", scopes: ["source.gfm", "variable.issue.tag.gfm"]
expect(tokens[20]).toEqual value: "101", scopes: ["source.gfm", "string.issue.number.gfm"]
expect(tokens[21]).toEqual value: "]", scopes: ["source.gfm"]
{tokens} = grammar.tokenizeLine('"#101"')
expect(tokens[0]).toEqual value: '"', scopes: ["source.gfm"]
expect(tokens[1]).toEqual value: "#", scopes: ["source.gfm", "variable.issue.tag.gfm"]
expect(tokens[2]).toEqual value: "101", scopes: ["source.gfm", "string.issue.number.gfm"]
expect(tokens[3]).toEqual value: '"', scopes: ["source.gfm"]
{tokens} = grammar.tokenizeLine("sentence with a space before #123i and a character after")
expect(tokens[0]).toEqual value: "sentence with a space before #123i and a character after", scopes: ["source.gfm"]
{tokens} = grammar.tokenizeLine("sentence with a space before #123 that continues")
expect(tokens[0]).toEqual value: "sentence with a space before ", scopes: ["source.gfm"]
expect(tokens[1]).toEqual value: "#", scopes: ["source.gfm", "variable.issue.tag.gfm"]
expect(tokens[2]).toEqual value: "123", scopes: ["source.gfm", "string.issue.number.gfm"]
expect(tokens[3]).toEqual value: " that continues", scopes: ["source.gfm"]
{tokens} = grammar.tokenizeLine(" #123's")
expect(tokens[1]).toEqual value: "#", scopes: ["source.gfm", "variable.issue.tag.gfm"]
expect(tokens[2]).toEqual value: "123", scopes: ["source.gfm", "string.issue.number.gfm"]
expect(tokens[3]).toEqual value: "'s", scopes: ["source.gfm"]
it "tokenizes unordered lists", ->
{tokens} = grammar.tokenizeLine("*Item 1")
expect(tokens[0]).not.toEqual value: "*Item 1", scopes: ["source.gfm", "variable.unordered.list.gfm"]
{tokens} = grammar.tokenizeLine(" * Item 1")
expect(tokens[0]).toEqual value: " ", scopes: ["source.gfm"]
expect(tokens[1]).toEqual value: "*", scopes: ["source.gfm", "variable.unordered.list.gfm"]
expect(tokens[2]).toEqual value: " ", scopes: ["source.gfm"]
expect(tokens[3]).toEqual value: "Item 1", scopes: ["source.gfm"]
{tokens} = grammar.tokenizeLine(" + Item 2")
expect(tokens[0]).toEqual value: " ", scopes: ["source.gfm"]
expect(tokens[1]).toEqual value: "+", scopes: ["source.gfm", "variable.unordered.list.gfm"]
expect(tokens[2]).toEqual value: " ", scopes: ["source.gfm"]
expect(tokens[3]).toEqual value: "Item 2", scopes: ["source.gfm"]
{tokens} = grammar.tokenizeLine(" - Item 3")
expect(tokens[0]).toEqual value: " ", scopes: ["source.gfm"]
expect(tokens[1]).toEqual value: "-", scopes: ["source.gfm", "variable.unordered.list.gfm"]
expect(tokens[2]).toEqual value: " ", scopes: ["source.gfm"]
expect(tokens[3]).toEqual value: "Item 3", scopes: ["source.gfm"]
it "tokenizes ordered lists", ->
{tokens} = grammar.tokenizeLine("1.First Item")
expect(tokens[0]).toEqual value: "1.First Item", scopes: ["source.gfm"]
{tokens} = grammar.tokenizeLine(" 1. First Item")
expect(tokens[0]).toEqual value: " ", scopes: ["source.gfm"]
expect(tokens[1]).toEqual value: "1.", scopes: ["source.gfm", "variable.ordered.list.gfm"]
expect(tokens[2]).toEqual value: " ", scopes: ["source.gfm"]
expect(tokens[3]).toEqual value: "First Item", scopes: ["source.gfm"]
{tokens} = grammar.tokenizeLine(" 10. Tenth Item")
expect(tokens[0]).toEqual value: " ", scopes: ["source.gfm"]
expect(tokens[1]).toEqual value: "10.", scopes: ["source.gfm", "variable.ordered.list.gfm"]
expect(tokens[2]).toEqual value: " ", scopes: ["source.gfm"]
expect(tokens[3]).toEqual value: "Tenth Item", scopes: ["source.gfm"]
{tokens} = grammar.tokenizeLine(" 111. Hundred and eleventh item")
expect(tokens[0]).toEqual value: " ", scopes: ["source.gfm"]
expect(tokens[1]).toEqual value: "111.", scopes: ["source.gfm", "variable.ordered.list.gfm"]
expect(tokens[2]).toEqual value: " ", scopes: ["source.gfm"]
expect(tokens[3]).toEqual value: "Hundred and eleventh item", scopes: ["source.gfm"]
it "tokenizes > quoted text", ->
{tokens} = grammar.tokenizeLine("> Quotation :+1:")
expect(tokens[0]).toEqual value: ">", scopes: ["source.gfm", "comment.quote.gfm", "support.quote.gfm"]
expect(tokens[1]).toEqual value: " Quotation :+1:", scopes: ["source.gfm", "comment.quote.gfm"]
it "tokenizes HTML entities", ->
{tokens} = grammar.tokenizeLine("&trade; &#8482; &a1; &#xb3;")
expect(tokens[0]).toEqual value: "&", scopes: ["source.gfm", "constant.character.entity.gfm", "punctuation.definition.entity.gfm"]
expect(tokens[1]).toEqual value: "trade", scopes: ["source.gfm", "constant.character.entity.gfm"]
expect(tokens[2]).toEqual value: ";", scopes: ["source.gfm", "constant.character.entity.gfm", "punctuation.definition.entity.gfm"]
expect(tokens[3]).toEqual value: " ", scopes: ["source.gfm"]
expect(tokens[4]).toEqual value: "&", scopes: ["source.gfm", "constant.character.entity.gfm", "punctuation.definition.entity.gfm"]
expect(tokens[5]).toEqual value: "#8482", scopes: ["source.gfm", "constant.character.entity.gfm"]
expect(tokens[6]).toEqual value: ";", scopes: ["source.gfm", "constant.character.entity.gfm", "punctuation.definition.entity.gfm"]
expect(tokens[7]).toEqual value: " ", scopes: ["source.gfm"]
expect(tokens[8]).toEqual value: "&", scopes: ["source.gfm", "constant.character.entity.gfm", "punctuation.definition.entity.gfm"]
expect(tokens[9]).toEqual value: "a1", scopes: ["source.gfm", "constant.character.entity.gfm"]
expect(tokens[10]).toEqual value: ";", scopes: ["source.gfm", "constant.character.entity.gfm", "punctuation.definition.entity.gfm"]
expect(tokens[11]).toEqual value: " ", scopes: ["source.gfm"]
expect(tokens[12]).toEqual value: "&", scopes: ["source.gfm", "constant.character.entity.gfm", "punctuation.definition.entity.gfm"]
expect(tokens[13]).toEqual value: "#xb3", scopes: ["source.gfm", "constant.character.entity.gfm"]
expect(tokens[14]).toEqual value: ";", scopes: ["source.gfm", "constant.character.entity.gfm", "punctuation.definition.entity.gfm"]
it "tokenizes HTML entities in *italic* text", ->
{tokens} = grammar.tokenizeLine("*&trade; &#8482; &#xb3;*")
expect(tokens[0]).toEqual value: "*", scopes: [ 'source.gfm', 'markup.italic.gfm', 'punctuation.definition.entity.gfm' ]
expect(tokens[1]).toEqual value: "&", scopes: ["source.gfm", "markup.italic.gfm", "constant.character.entity.gfm", "punctuation.definition.entity.gfm"]
expect(tokens[2]).toEqual value: "trade", scopes: ["source.gfm", "markup.italic.gfm", "constant.character.entity.gfm"]
expect(tokens[3]).toEqual value: ";", scopes: ["source.gfm", "markup.italic.gfm", "constant.character.entity.gfm", "punctuation.definition.entity.gfm"]
expect(tokens[4]).toEqual value: " ", scopes: ["source.gfm", "markup.italic.gfm"]
expect(tokens[5]).toEqual value: "&", scopes: ["source.gfm", "markup.italic.gfm", "constant.character.entity.gfm", "punctuation.definition.entity.gfm"]
expect(tokens[6]).toEqual value: "#8482", scopes: ["source.gfm", "markup.italic.gfm", "constant.character.entity.gfm"]
expect(tokens[7]).toEqual value: ";", scopes: ["source.gfm", "markup.italic.gfm", "constant.character.entity.gfm", "punctuation.definition.entity.gfm"]
expect(tokens[8]).toEqual value: " ", scopes: ["source.gfm", "markup.italic.gfm"]
expect(tokens[9]).toEqual value: "&", scopes: ["source.gfm", "markup.italic.gfm", "constant.character.entity.gfm", "punctuation.definition.entity.gfm"]
expect(tokens[10]).toEqual value: "#xb3", scopes: ["source.gfm", "markup.italic.gfm", "constant.character.entity.gfm"]
expect(tokens[11]).toEqual value: ";", scopes: ["source.gfm", "markup.italic.gfm", "constant.character.entity.gfm", "punctuation.definition.entity.gfm"]
expect(tokens[12]).toEqual value: "*", scopes: [ 'source.gfm', 'markup.italic.gfm', 'punctuation.definition.entity.gfm' ]
{tokens} = grammar.tokenizeLine("_&trade; &#8482; &#xb3;_")
expect(tokens[0]).toEqual value: "_", scopes: [ 'source.gfm', 'markup.italic.gfm', 'punctuation.definition.entity.gfm' ]
expect(tokens[1]).toEqual value: "&", scopes: ["source.gfm", "markup.italic.gfm", "constant.character.entity.gfm", "punctuation.definition.entity.gfm"]
expect(tokens[2]).toEqual value: "trade", scopes: ["source.gfm", "markup.italic.gfm", "constant.character.entity.gfm"]
expect(tokens[3]).toEqual value: ";", scopes: ["source.gfm", "markup.italic.gfm", "constant.character.entity.gfm", "punctuation.definition.entity.gfm"]
expect(tokens[4]).toEqual value: " ", scopes: ["source.gfm", "markup.italic.gfm"]
expect(tokens[5]).toEqual value: "&", scopes: ["source.gfm", "markup.italic.gfm", "constant.character.entity.gfm", "punctuation.definition.entity.gfm"]
expect(tokens[6]).toEqual value: "#8482", scopes: ["source.gfm", "markup.italic.gfm", "constant.character.entity.gfm"]
expect(tokens[7]).toEqual value: ";", scopes: ["source.gfm", "markup.italic.gfm", "constant.character.entity.gfm", "punctuation.definition.entity.gfm"]
expect(tokens[8]).toEqual value: " ", scopes: ["source.gfm", "markup.italic.gfm"]
expect(tokens[9]).toEqual value: "&", scopes: ["source.gfm", "markup.italic.gfm", "constant.character.entity.gfm", "punctuation.definition.entity.gfm"]
expect(tokens[10]).toEqual value: "#xb3", scopes: ["source.gfm", "markup.italic.gfm", "constant.character.entity.gfm"]
expect(tokens[11]).toEqual value: ";", scopes: ["source.gfm", "markup.italic.gfm", "constant.character.entity.gfm", "punctuation.definition.entity.gfm"]
expect(tokens[12]).toEqual value: "_", scopes: [ 'source.gfm', 'markup.italic.gfm', 'punctuation.definition.entity.gfm' ]
it "tokenizes HTML entities in **bold** text", ->
{tokens} = grammar.tokenizeLine("**&trade; &#8482; &#xb3;**")
expect(tokens[0]).toEqual value: "**", scopes: ["source.gfm", "markup.bold.gfm", "punctuation.definition.entity.gfm"]
expect(tokens[1]).toEqual value: "&", scopes: ["source.gfm", "markup.bold.gfm", "constant.character.entity.gfm", "punctuation.definition.entity.gfm"]
expect(tokens[2]).toEqual value: "trade", scopes: ["source.gfm", "markup.bold.gfm", "constant.character.entity.gfm"]
expect(tokens[3]).toEqual value: ";", scopes: ["source.gfm", "markup.bold.gfm", "constant.character.entity.gfm", "punctuation.definition.entity.gfm"]
expect(tokens[4]).toEqual value: " ", scopes: ["source.gfm", "markup.bold.gfm"]
expect(tokens[5]).toEqual value: "&", scopes: ["source.gfm", "markup.bold.gfm", "constant.character.entity.gfm", "punctuation.definition.entity.gfm"]
expect(tokens[6]).toEqual value: "#8482", scopes: ["source.gfm", "markup.bold.gfm", "constant.character.entity.gfm"]
expect(tokens[7]).toEqual value: ";", scopes: ["source.gfm", "markup.bold.gfm", "constant.character.entity.gfm", "punctuation.definition.entity.gfm"]
expect(tokens[8]).toEqual value: " ", scopes: ["source.gfm", "markup.bold.gfm"]
expect(tokens[9]).toEqual value: "&", scopes: ["source.gfm", "markup.bold.gfm", "constant.character.entity.gfm", "punctuation.definition.entity.gfm"]
expect(tokens[10]).toEqual value: "#xb3", scopes: ["source.gfm", "markup.bold.gfm", "constant.character.entity.gfm"]
expect(tokens[11]).toEqual value: ";", scopes: ["source.gfm", "markup.bold.gfm", "constant.character.entity.gfm", "punctuation.definition.entity.gfm"]
expect(tokens[12]).toEqual value: "**", scopes: ["source.gfm", "markup.bold.gfm", "punctuation.definition.entity.gfm"]
{tokens} = grammar.tokenizeLine("__&trade; &#8482; &#xb3;__")
expect(tokens[0]).toEqual value: "__", scopes: ["source.gfm", "markup.bold.gfm", "punctuation.definition.entity.gfm"]
expect(tokens[1]).toEqual value: "&", scopes: ["source.gfm", "markup.bold.gfm", "constant.character.entity.gfm", "punctuation.definition.entity.gfm"]
expect(tokens[2]).toEqual value: "trade", scopes: ["source.gfm", "markup.bold.gfm", "constant.character.entity.gfm"]
expect(tokens[3]).toEqual value: ";", scopes: ["source.gfm", "markup.bold.gfm", "constant.character.entity.gfm", "punctuation.definition.entity.gfm"]
expect(tokens[4]).toEqual value: " ", scopes: ["source.gfm", "markup.bold.gfm"]
expect(tokens[5]).toEqual value: "&", scopes: ["source.gfm", "markup.bold.gfm", "constant.character.entity.gfm", "punctuation.definition.entity.gfm"]
expect(tokens[6]).toEqual value: "#8482", scopes: ["source.gfm", "markup.bold.gfm", "constant.character.entity.gfm"]
expect(tokens[7]).toEqual value: ";", scopes: ["source.gfm", "markup.bold.gfm", "constant.character.entity.gfm", "punctuation.definition.entity.gfm"]
expect(tokens[8]).toEqual value: " ", scopes: ["source.gfm", "markup.bold.gfm"]
expect(tokens[9]).toEqual value: "&", scopes: ["source.gfm", "markup.bold.gfm", "constant.character.entity.gfm", "punctuation.definition.entity.gfm"]
expect(tokens[10]).toEqual value: "#xb3", scopes: ["source.gfm", "markup.bold.gfm", "constant.character.entity.gfm"]
expect(tokens[11]).toEqual value: ";", scopes: ["source.gfm", "markup.bold.gfm", "constant.character.entity.gfm", "punctuation.definition.entity.gfm"]
expect(tokens[12]).toEqual value: "__", scopes: ["source.gfm", "markup.bold.gfm", "punctuation.definition.entity.gfm"]
it "tokenizes HTML entities in ***bold italic*** text", ->
{tokens} = grammar.tokenizeLine("***&trade; &#8482; &#xb3;***")
expect(tokens[0]).toEqual value: "***", scopes: ["source.gfm", "markup.bold.italic.gfm"]
expect(tokens[1]).toEqual value: "&", scopes: ["source.gfm", "markup.bold.italic.gfm", "constant.character.entity.gfm", "punctuation.definition.entity.gfm"]
expect(tokens[2]).toEqual value: "trade", scopes: [ "source.gfm", "markup.bold.italic.gfm", "constant.character.entity.gfm", "punctuation.definition.entity.gfm" ]
expect(tokens[3]).toEqual value: ";", scopes: ["source.gfm", "markup.bold.italic.gfm", "constant.character.entity.gfm"]
expect(tokens[4]).toEqual value: " ", scopes: ["source.gfm", "markup.bold.italic.gfm"]
expect(tokens[5]).toEqual value: "&", scopes: ["source.gfm", "markup.bold.italic.gfm", "constant.character.entity.gfm", "punctuation.definition.entity.gfm"]
expect(tokens[6]).toEqual value: "#8482", scopes: [ "source.gfm", "markup.bold.italic.gfm", "constant.character.entity.gfm", "punctuation.definition.entity.gfm" ]
expect(tokens[7]).toEqual value: ";", scopes: ["source.gfm", "markup.bold.italic.gfm", "constant.character.entity.gfm"]
expect(tokens[8]).toEqual value: " ", scopes: ["source.gfm", "markup.bold.italic.gfm"]
expect(tokens[9]).toEqual value: "&", scopes: ["source.gfm", "markup.bold.italic.gfm", "constant.character.entity.gfm", "punctuation.definition.entity.gfm"]
expect(tokens[10]).toEqual value: "#xb3", scopes: ["source.gfm", "markup.bold.italic.gfm", "constant.character.entity.gfm", "punctuation.definition.entity.gfm"]
expect(tokens[11]).toEqual value: ";", scopes: ["source.gfm", "markup.bold.italic.gfm", "constant.character.entity.gfm"]
expect(tokens[12]).toEqual value: "***", scopes: ["source.gfm", "markup.bold.italic.gfm"]
{tokens} = grammar.tokenizeLine("___&trade; &#8482; &#xb3;___")
expect(tokens[0]).toEqual value: "___", scopes: ["source.gfm", "markup.bold.italic.gfm"]
expect(tokens[1]).toEqual value: "&", scopes: ["source.gfm", "markup.bold.italic.gfm", "constant.character.entity.gfm", "punctuation.definition.entity.gfm"]
expect(tokens[2]).toEqual value: "trade", scopes: ["source.gfm", "markup.bold.italic.gfm", "constant.character.entity.gfm", "punctuation.definition.entity.gfm"]
expect(tokens[3]).toEqual value: ";", scopes: ["source.gfm", "markup.bold.italic.gfm", "constant.character.entity.gfm"]
expect(tokens[4]).toEqual value: " ", scopes: ["source.gfm", "markup.bold.italic.gfm"]
expect(tokens[5]).toEqual value: "&", scopes: ["source.gfm", "markup.bold.italic.gfm", "constant.character.entity.gfm", "punctuation.definition.entity.gfm"]
expect(tokens[6]).toEqual value: "#8482", scopes: ["source.gfm", "markup.bold.italic.gfm", "constant.character.entity.gfm", "punctuation.definition.entity.gfm"]
expect(tokens[7]).toEqual value: ";", scopes: ["source.gfm", "markup.bold.italic.gfm", "constant.character.entity.gfm"]
expect(tokens[8]).toEqual value: " ", scopes: ["source.gfm", "markup.bold.italic.gfm"]
expect(tokens[9]).toEqual value: "&", scopes: ["source.gfm", "markup.bold.italic.gfm", "constant.character.entity.gfm", "punctuation.definition.entity.gfm"]
expect(tokens[10]).toEqual value: "#xb3", scopes: ["source.gfm", "markup.bold.italic.gfm", "constant.character.entity.gfm", "punctuation.definition.entity.gfm"]
expect(tokens[11]).toEqual value: ";", scopes: ["source.gfm", "markup.bold.italic.gfm", "constant.character.entity.gfm"]
expect(tokens[12]).toEqual value: "___", scopes: ["source.gfm", "markup.bold.italic.gfm"]
it "tokenizes HTML entities in strikethrough text", ->
{tokens} = grammar.tokenizeLine("~~&trade; &#8482; &#xb3;~~")
expect(tokens[0]).toEqual value: "~~", scopes: ["source.gfm", "markup.strike.gfm"]
expect(tokens[1]).toEqual value: "&", scopes: ["source.gfm", "markup.strike.gfm", "constant.character.entity.gfm", "punctuation.definition.entity.gfm"]
expect(tokens[2]).toEqual value: "trade", scopes: ["source.gfm", "markup.strike.gfm", "constant.character.entity.gfm"]
expect(tokens[3]).toEqual value: ";", scopes: ["source.gfm", "markup.strike.gfm", "constant.character.entity.gfm", "punctuation.definition.entity.gfm"]
expect(tokens[4]).toEqual value: " ", scopes: ["source.gfm", "markup.strike.gfm"]
expect(tokens[5]).toEqual value: "&", scopes: ["source.gfm", "markup.strike.gfm", "constant.character.entity.gfm", "punctuation.definition.entity.gfm"]
expect(tokens[6]).toEqual value: "#8482", scopes: ["source.gfm", "markup.strike.gfm", "constant.character.entity.gfm"]
expect(tokens[7]).toEqual value: ";", scopes: ["source.gfm", "markup.strike.gfm", "constant.character.entity.gfm", "punctuation.definition.entity.gfm"]
expect(tokens[8]).toEqual value: " ", scopes: ["source.gfm", "markup.strike.gfm"]
expect(tokens[9]).toEqual value: "&", scopes: ["source.gfm", "markup.strike.gfm", "constant.character.entity.gfm", "punctuation.definition.entity.gfm"]
expect(tokens[10]).toEqual value: "#xb3", scopes: ["source.gfm", "markup.strike.gfm", "constant.character.entity.gfm"]
expect(tokens[11]).toEqual value: ";", scopes: ["source.gfm", "markup.strike.gfm", "constant.character.entity.gfm", "punctuation.definition.entity.gfm"]
expect(tokens[12]).toEqual value: "~~", scopes: ["source.gfm", "markup.strike.gfm"]
it "tokenizes HTML comments", ->
{tokens} = grammar.tokenizeLine("<!-- a comment -->")
expect(tokens[0]).toEqual value: "<!--", scopes: ["source.gfm", "comment.block.gfm", "punctuation.definition.comment.gfm"]
expect(tokens[1]).toEqual value: " a comment ", scopes: ["source.gfm", "comment.block.gfm"]
expect(tokens[2]).toEqual value: "-->", scopes: ["source.gfm", "comment.block.gfm", "punctuation.definition.comment.gfm"]
it "tokenizes YAML front matter", ->
[firstLineTokens, secondLineTokens, thirdLineTokens] = grammar.tokenizeLines """
---
front: matter
---
"""
expect(firstLineTokens[0]).toEqual value: "---", scopes: ["source.gfm", "front-matter.yaml.gfm", "comment.hr.gfm"]
expect(secondLineTokens[0]).toEqual value: "front: matter", scopes: ["source.gfm", "front-matter.yaml.gfm"]
expect(thirdLineTokens[0]).toEqual value: "---", scopes: ["source.gfm", "front-matter.yaml.gfm", "comment.hr.gfm"]
it "tokenizes linebreaks", ->
{tokens} = grammar.tokenizeLine("line ")
expect(tokens[0]).toEqual value: "line", scopes: ["source.gfm"]
expect(tokens[1]).toEqual value: " ", scopes: ["source.gfm", "linebreak.gfm"]
it "tokenizes tables", ->
[headerTokens, alignTokens, contentTokens] = grammar.tokenizeLines """
| Column 1 | Column 2 |
|:----------|:---------:|
| Content 1 | Content 2 |
"""
# Header line
expect(headerTokens[0]).toEqual value: "|", scopes: ["source.gfm", "table.gfm", "border.pipe.outer"]
expect(headerTokens[1]).toEqual value: " Column 1 ", scopes: ["source.gfm", "table.gfm"]
expect(headerTokens[2]).toEqual value: "|", scopes: ["source.gfm", "table.gfm", "border.pipe.inner"]
expect(headerTokens[3]).toEqual value: " Column 2 ", scopes: ["source.gfm", "table.gfm"]
expect(headerTokens[4]).toEqual value: "|", scopes: ["source.gfm", "table.gfm", "border.pipe.outer"]
# Alignment line
expect(alignTokens[0]).toEqual value: "|", scopes: ["source.gfm", "table.gfm", "border.pipe.outer"]
expect(alignTokens[1]).toEqual value: ":", scopes: ["source.gfm", "table.gfm", "border.alignment"]
expect(alignTokens[2]).toEqual value: "----------", scopes: ["source.gfm", "table.gfm", "border.header"]
expect(alignTokens[3]).toEqual value: "|", scopes: ["source.gfm", "table.gfm", "border.pipe.inner"]
expect(alignTokens[4]).toEqual value: ":", scopes: ["source.gfm", "table.gfm", "border.alignment"]
expect(alignTokens[5]).toEqual value: "---------", scopes: ["source.gfm", "table.gfm", "border.header"]
expect(alignTokens[6]).toEqual value: ":", scopes: ["source.gfm", "table.gfm", "border.alignment"]
expect(alignTokens[7]).toEqual value: "|", scopes: ["source.gfm", "table.gfm", "border.pipe.outer"]
# Content line
expect(contentTokens[0]).toEqual value: "|", scopes: ["source.gfm", "table.gfm", "border.pipe.outer"]
expect(contentTokens[1]).toEqual value: " Content 1 ", scopes: ["source.gfm", "table.gfm"]
expect(contentTokens[2]).toEqual value: "|", scopes: ["source.gfm", "table.gfm", "border.pipe.inner"]
expect(contentTokens[3]).toEqual value: " Content 2 ", scopes: ["source.gfm", "table.gfm"]
expect(contentTokens[4]).toEqual value: "|", scopes: ["source.gfm", "table.gfm", "border.pipe.outer"]
[headerTokens, emptyLineTokens, headingTokens] = grammar.tokenizeLines """
| Column 1 | Column 2\t
# Heading
"""
expect(headerTokens[0]).toEqual value: "|", scopes: ["source.gfm", "table.gfm", "border.pipe.outer"]
expect(headerTokens[1]).toEqual value: " Column 1 ", scopes: ["source.gfm", "table.gfm"]
expect(headerTokens[2]).toEqual value: "|", scopes: ["source.gfm", "table.gfm", "border.pipe.inner"]
expect(headerTokens[3]).toEqual value: " Column 2", scopes: ["source.gfm", "table.gfm"]
expect(headerTokens[4]).toEqual value: "\t", scopes: ["source.gfm", "table.gfm"]
expect(headingTokens[0]).toEqual value: "#", scopes: ["source.gfm", "markup.heading.heading-1.gfm", "markup.heading.marker.gfm"]
expect(headingTokens[1]).toEqual value: " ", scopes: ["source.gfm", "markup.heading.heading-1.gfm", "markup.heading.space.gfm"]
expect(headingTokens[2]).toEqual value: "Heading", scopes: ["source.gfm", "markup.heading.heading-1.gfm"]
it "tokenizes criticmarkup", ->
[addToken, delToken, hlToken, subToken] = grammar.tokenizeLines """
Add{++ some text++}
Delete{-- some text--}
Highlight {==some text==}{>>with comment<<}
Replace {~~this~>by that~~}
"""
# Addition
expect(addToken[0]).toEqual value: "Add", scopes: ["source.gfm"]
expect(addToken[1]).toEqual value: "{++", scopes: ["source.gfm", "markup.inserted.critic.gfm.addition", "punctuation.definition.inserted.critic.gfm.addition.marker"]
expect(addToken[2]).toEqual value: " some text", scopes: ["source.gfm", "markup.inserted.critic.gfm.addition"]
expect(addToken[3]).toEqual value: "++}", scopes: ["source.gfm", "markup.inserted.critic.gfm.addition", "punctuation.definition.inserted.critic.gfm.addition.marker"]
# Deletion
expect(delToken[0]).toEqual value: "Delete", scopes: ["source.gfm"]
expect(delToken[1]).toEqual value: "{--", scopes: ["source.gfm", "markup.deleted.critic.gfm.deletion", "punctuation.definition.deleted.critic.gfm.deletion.marker"]
expect(delToken[2]).toEqual value: " some text", scopes: ["source.gfm", "markup.deleted.critic.gfm.deletion"]
expect(delToken[3]).toEqual value: "--}", scopes: ["source.gfm", "markup.deleted.critic.gfm.deletion", "punctuation.definition.deleted.critic.gfm.deletion.marker"]
# Comment and highlight
expect(hlToken[0]).toEqual value: "Highlight ", scopes: ["source.gfm"]
expect(hlToken[1]).toEqual value: "{==", scopes: ["source.gfm", "critic.gfm.highlight", "critic.gfm.highlight.marker"]
expect(hlToken[2]).toEqual value: "some text", scopes: ["source.gfm", "critic.gfm.highlight"]
expect(hlToken[3]).toEqual value: "==}", scopes: ["source.gfm", "critic.gfm.highlight", "critic.gfm.highlight.marker"]
expect(hlToken[4]).toEqual value: "{>>", scopes: ["source.gfm", "critic.gfm.comment", "critic.gfm.comment.marker"]
expect(hlToken[5]).toEqual value: "with comment", scopes: ["source.gfm", "critic.gfm.comment"]
expect(hlToken[6]).toEqual value: "<<}", scopes: ["source.gfm", "critic.gfm.comment", "critic.gfm.comment.marker"]
# Replace
expect(subToken[0]).toEqual value: "Replace ", scopes: ["source.gfm"]
expect(subToken[1]).toEqual value: "{~~", scopes: ["source.gfm", "markup.changed.critic.gfm.substitution", "punctuation.definition.changed.critic.gfm.substitution.marker"]
expect(subToken[2]).toEqual value: "this", scopes: ["source.gfm", "markup.changed.critic.gfm.substitution"]
expect(subToken[3]).toEqual value: "~>", scopes: ["source.gfm", "markup.changed.critic.gfm.substitution", "punctuation.definition.changed.critic.gfm.substitution.operator"]
expect(subToken[4]).toEqual value: "by that", scopes: ["source.gfm", "markup.changed.critic.gfm.substitution"]
expect(subToken[5]).toEqual value: "~~}", scopes: ["source.gfm", "markup.changed.critic.gfm.substitution", "punctuation.definition.changed.critic.gfm.substitution.marker"]

Some files were not shown because too many files have changed in this diff Show More