mirror of
https://github.com/github/semantic.git
synced 2025-01-05 22:28:10 +03:00
Merge remote-tracking branch 'origin/master' into codegen-cleanup
This commit is contained in:
commit
8d62ad3b91
15
.github/workflows/haskell.yml
vendored
15
.github/workflows/haskell.yml
vendored
@ -31,19 +31,19 @@ jobs:
|
||||
name: Cache ~/.cabal/packages
|
||||
with:
|
||||
path: ~/.cabal/packages
|
||||
key: ${{ runner.os }}-${{ matrix.ghc }}-cabal-packages
|
||||
key: ${{ runner.os }}-${{ matrix.ghc }}-v1-cabal-packages
|
||||
|
||||
- uses: actions/cache@v1
|
||||
name: Cache ~/.cabal/store
|
||||
with:
|
||||
path: ~/.cabal/store
|
||||
key: ${{ runner.os }}-${{ matrix.ghc }}-v10-cabal-store
|
||||
key: ${{ runner.os }}-${{ matrix.ghc }}-v11-cabal-store
|
||||
|
||||
- uses: actions/cache@v1
|
||||
name: Cache dist-newstyle
|
||||
with:
|
||||
path: dist-newstyle
|
||||
key: ${{ runner.os }}-${{ matrix.ghc }}-${{ matrix.cabal }}-semantic-dist
|
||||
key: ${{ runner.os }}-${{ matrix.ghc }}-${{ matrix.cabal }}-v1-semantic-dist
|
||||
|
||||
# - name: hlint
|
||||
# run: |
|
||||
@ -52,7 +52,7 @@ jobs:
|
||||
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
script/bootstrap
|
||||
cabal v2-update
|
||||
cabal v2-configure --project-file=cabal.project.ci --disable-optimization --enable-benchmarks --enable-tests --write-ghc-environment-files=always -j2
|
||||
cabal v2-build --project-file=cabal.project.ci all --only-dependencies
|
||||
|
||||
@ -61,7 +61,14 @@ jobs:
|
||||
cabal v2-build --project-file=cabal.project.ci
|
||||
cabal v2-run --project-file=cabal.project.ci semantic:test
|
||||
cabal v2-run --project-file=cabal.project.ci semantic-core:test
|
||||
cabal v2-run --project-file=cabal.project.ci semantic-go:test
|
||||
cabal v2-run --project-file=cabal.project.ci semantic-java:test
|
||||
cabal v2-run --project-file=cabal.project.ci semantic-json:test
|
||||
cabal v2-run --project-file=cabal.project.ci semantic-python:test
|
||||
cabal v2-run --project-file=cabal.project.ci semantic-python:test:compiling
|
||||
cabal v2-run --project-file=cabal.project.ci semantic-python:test:graphing
|
||||
cabal v2-run --project-file=cabal.project.ci semantic-ruby:test
|
||||
cabal v2-run --project-file=cabal.project.ci semantic-tsx:test
|
||||
cabal v2-run --project-file=cabal.project.ci semantic-typescript:test
|
||||
cd semantic-source; cabal v2-run --project-file=cabal.project.ci semantic-source:test; cd ..
|
||||
cd semantic-source; cabal v2-run --project-file=cabal.project.ci semantic-source:doctest -- src; cd ..
|
||||
|
@ -16,6 +16,9 @@ RUN go get github.com/golang/protobuf/proto && \
|
||||
|
||||
COPY --from=haskell /root/.cabal/bin/proto-lens-protoc /usr/local/bin/proto-lens-protoc
|
||||
|
||||
# Bit of a hack so that proto-lens-protoc actually runs
|
||||
COPY --from=haskell /opt/ghc/8.8.1/lib/ghc-8.8.1/* /opt/ghc/8.8.1/lib/ghc-8.8.1/
|
||||
|
||||
ENTRYPOINT ["/protobuf/bin/protoc", "-I/protobuf", "--plugin=protoc-gen-haskell=/usr/local/bin/proto-lens-protoc"]
|
||||
|
||||
# Build semantic
|
||||
|
@ -82,12 +82,13 @@ Available options:
|
||||
| 3 | TypeScript | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ | 🚧 | |
|
||||
| 4 | Python | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ | 🚧 | |
|
||||
| 5 | Go | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ | 🚧 | |
|
||||
| | PHP | 🚧 | 🚧 | 🚧 | 🚧| 🚧 | | | |
|
||||
| | Java | 🚧 | 🚧 | 🚧 | 🔶 | ✅ | | | |
|
||||
| | JSON | ✅ | ✅ | ✅ | N/A | N/A | N/A | N/A| |
|
||||
| | PHP | 🚧 | 🚧 | 🚧 | 🚧 | 🚧 | | | |
|
||||
| | Java | 🚧 | N/A | 🚧 | 🚧 | ✅ | | | |
|
||||
| | JSON | ✅ | N/A | ✅ | N/A | N/A | N/A | N/A| |
|
||||
| | JSX | ✅ | ✅ | ✅ | 🔶 | | | | |
|
||||
| | Haskell | 🚧 | 🚧 | 🚧 | 🔶 | 🚧 | | | |
|
||||
| | Markdown | ✅ | ✅ | ✅ | 🔶 | N/A | N/A | N/A | |
|
||||
| | Markdown | 🚧 | 🚧 | 🚧 | 🚧 | N/A | N/A | N/A | |
|
||||
| | CodeQL | 🚧 | N/A | 🚧 | 🚧 | 🚧 | | | |
|
||||
|
||||
* ✅ — Supported
|
||||
* 🔶 — Partial support
|
||||
|
@ -38,7 +38,7 @@ callGraphProject' :: ( Language.SLanguage lang
|
||||
callGraphProject' session proxy path
|
||||
| Just (SomeParser parser) <- parserForLanguage analysisParsers lang = fmap (bimap show (const ())) . runTask session $ do
|
||||
blob <- readBlobFromPath (Path.toAbsRel path)
|
||||
package <- fmap snd <$> runParse (Duration.fromSeconds 10) (parsePackage parser (Project (Path.toString (Path.takeDirectory path)) [blob] lang []))
|
||||
package <- fmap snd <$> runParse (Duration.fromSeconds 10) (parsePackage parser (Project (Path.toAbsRel (Path.takeDirectory path)) [blob] lang []))
|
||||
modules <- topologicalSort <$> runImportGraphToModules proxy package
|
||||
runCallGraph proxy False modules package
|
||||
| otherwise = error $ "Analysis not supported for: " <> show lang
|
||||
|
@ -28,8 +28,3 @@ source-repository-package
|
||||
type: git
|
||||
location: https://github.com/antitypical/fused-syntax.git
|
||||
tag: d11e14581217590a5c67f79cbaeee35ac8acee6a
|
||||
|
||||
source-repository-package
|
||||
type: git
|
||||
location: https://github.com/fused-effects/fused-effects-readline.git
|
||||
tag: 7a96949c77c73c6e5975c8d6171ffb63eb76b467
|
||||
|
@ -29,11 +29,6 @@ source-repository-package
|
||||
location: https://github.com/antitypical/fused-syntax.git
|
||||
tag: d11e14581217590a5c67f79cbaeee35ac8acee6a
|
||||
|
||||
source-repository-package
|
||||
type: git
|
||||
location: https://github.com/fused-effects/fused-effects-readline.git
|
||||
tag: 7a96949c77c73c6e5975c8d6171ffb63eb76b467
|
||||
|
||||
|
||||
-- Treat warnings as errors for CI builds
|
||||
package semantic
|
||||
|
119
docs/codegen.md
Normal file
119
docs/codegen.md
Normal file
@ -0,0 +1,119 @@
|
||||
@ -1,216 +0,0 @@
|
||||
# CodeGen Documentation
|
||||
|
||||
CodeGen is the process for auto-generating language-specific, strongly-typed ASTs to be used in [Semantic](https://github.com/github/semantic-code/blob/d9f91a05dc30a61b9ff8c536d75661d417f3c506/design-docs/precise-code-navigation.md).
|
||||
|
||||
### Prerequisites
|
||||
To get started, first make sure your language has:
|
||||
|
||||
1. An existing [tree-sitter](http://tree-sitter.github.io/tree-sitter/) parser;
|
||||
2. An existing Cabal package in [tree-sitter](http://tree-sitter.github.io/tree-sitter/) for said language. This will provide an interface into tree-sitter's C source. [Here](https://github.com/tree-sitter/haskell-tree-sitter/tree/master/tree-sitter-python) is an example of a library for Python, a supported language that the remaining documentation will refer to.
|
||||
|
||||
### CodeGen Pipeline
|
||||
|
||||
During parser generation, tree-sitter produces a JSON file that captures the structure of a language's grammar. Based on this, we're able to derive datatypes representing surface languages, and then use those datatypes to generically build ASTs. This automates the engineering effort [historically required for adding a new language](https://github.com/github/semantic/blob/master/docs/adding-new-languages.md).
|
||||
|
||||
The following steps provide a high-level outline of the process:
|
||||
|
||||
1. [**Deserialize.**](https://github.com/github/semantic/blob/master/semantic-ast/src/AST/Deserialize.hs) First, we deserialize the `node-types.json` file for a given language into the desired shape of datatypes via parsing capabilities afforded by the [Aeson](http://hackage.haskell.org/package/aeson) library. There are four distinct types represented in the node-types.json file takes on: sums, products, named leaves and anonymous leaves.
|
||||
2. [**Generate Syntax.**](https://github.com/github/semantic/blob/master/semantic-ast/src/AST/GenerateSyntax.hs) We then use Template Haskell to auto-generate language-specific, strongly-typed datatypes that represent various language constructs. This API exports the top-level function `astDeclarationsForLanguage` to auto-generate datatypes at compile-time, which is is invoked by a given language [AST](https://github.com/github/semantic/blob/master/semantic-python/src/Language/Python/AST.hs) module.
|
||||
3. [**Unmarshal.**](https://github.com/github/semantic/blob/master/semantic-ast/src/AST/Unmarshal.hs) Unmarshaling is the process of iterating over tree-sitter’s parse trees using its tree cursor API, and producing Haskell ASTs for the relevant nodes. We parse source code from tree-sitter and unmarshal the data we get to build these ASTs generically. This file exports the top-level function `parseByteString`, which takes source code and a language as arguments, and produces an AST.
|
||||
|
||||
Here is an example that describes the relationship between a Python identifier represented in the tree-sitter generated JSON file, and a datatype generated by Template Haskell based on the provided JSON:
|
||||
|
||||
| Type | JSON | TH-generated code |
|
||||
|----------|--------------|------------|
|
||||
|Named leaf|<code>{<br>"type": "identifier",<br>"named": true<br>}|<code>data TreeSitter.Python.AST.Identifier a<br>= TreeSitter.Python.AST.Identifier {TreeSitter.Python.AST.ann :: a,<br>TreeSitter.Python.AST.bytes :: text-1.2.3.1:Data.Text.Internal.Text} -- Defined at TreeSitter/Python/AST.hs:10:1<br>instance Show a => Show (TreeSitter.Python.AST.Identifier a) -- Defined at TreeSitter/Python/AST.hs:10:1<br>instance Ord a => Ord (TreeSitter.Python.AST.Identifier a) -- Defined at TreeSitter/Python/AST.hs:10:1<br>instance Eq a => Eq (TreeSitter.Python.AST.Identifier a) -- Defined at TreeSitter/Python/AST.hs:10:1<br>instance Traversable TreeSitter.Python.AST.Identifier -- Defined at TreeSitter/Python/AST.hs:10:1<br>instance Functor TreeSitter.Python.AST.Identifier -- Defined at TreeSitter/Python/AST.hs:10:1<br>instance Foldable TreeSitter.Python.AST.Identifier -- Defined at TreeSitter/Python/AST.hs:10:1<br>instance Unmarshal TreeSitter.Python.AST.Identifier -- Defined at TreeSitter/Python/AST.hs:10:1<br>instance SymbolMatching TreeSitter.Python.AST.Identifier -- Defined at TreeSitter/Python/AST.hs:10:1|
|
||||
|
||||
The remaining document provides more details on generating ASTs, inspecting datatypes, tests, and information on decisions pertaining to relevant APIs.
|
||||
___
|
||||
|
||||
### Table of Contents
|
||||
- [CodeGen Documentation](#codegen-documentation)
|
||||
- [Prerequisites](#prerequisites)
|
||||
- [CodeGen Pipeline](#codegen-pipeline)
|
||||
- [Table of Contents](#table-of-contents)
|
||||
- [Generating ASTs](#generating-asts)
|
||||
- [Inspecting auto-generated datatypes](#inspecting-auto-generated-datatypes)
|
||||
- [Tests](#tests)
|
||||
- [Additional notes](#additional-notes)
|
||||
___
|
||||
|
||||
### Generating ASTs
|
||||
|
||||
To parse source code and produce ASTs locally:
|
||||
|
||||
1. Load the REPL for a given language package:
|
||||
|
||||
```
|
||||
cabal new-repl lib:semantic-python
|
||||
```
|
||||
|
||||
2. Set language extensions, `OverloadedStrings` and `TypeApplications`, and import relevant modules, `AST.Unmarshal`, `Source.Range` and `Source.Span`:
|
||||
|
||||
```
|
||||
:seti -XOverloadedStrings
|
||||
:seti -XTypeApplications
|
||||
|
||||
import Source.Span
|
||||
import Source.Range
|
||||
import AST.Unmarshal
|
||||
```
|
||||
|
||||
3. You can now call `parseByteString`, passing in the desired language you wish to parse (in this case Python is given by the argument `Language.Python.Grammar.tree_sitter_python`), and the source code (in this case an integer `1`). Since the function is constrained by `(Unmarshal t, UnmarshalAnn a)`, you can use type applications to provide a top-level node `t`, an entry point into the tree, in addition to a polymorphic annotation `a` used to represent range and span. In this case, that top-level root node is `Module`, and the annotation is given by `Span` and `Range` as defined in the [semantic-source](https://github.com/github/semantic/tree/master/semantic-source/src/Source) package:
|
||||
|
||||
```
|
||||
TS.parseByteString @Language.Python.AST.Module @(Source.Span.Span, Source.Range.Range) Language.Python.Grammar.tree_sitter_python "1"
|
||||
```
|
||||
|
||||
This generates the following AST:
|
||||
|
||||
```
|
||||
Right (Module {ann = (Span {start = Pos {line = 0, column = 0}, end = Pos {line = 0, column = 1}},Range {start = 0, end = 1}), extraChildren = [R1 (SimpleStatement {getSimpleStatement = L1 (R1 (R1 (L1 (ExpressionStatement {ann = (Span {start = Pos {line = 0, column = 0}, end = Pos {line = 0, column = 1}},Range {start = 0, end = 1}), extraChildren = L1 (L1 (Expression {getExpression = L1 (L1 (L1 (PrimaryExpression {getPrimaryExpression = R1 (L1 (L1 (L1 (Integer {ann = (Span {start = Pos {line = 0, column = 0}, end = Pos {line = 0, column = 1}},Range {start = 0, end = 1}), text = "1"}))))})))})) :| []}))))})]})
|
||||
```
|
||||
|
||||
### Inspecting auto-generated datatypes
|
||||
|
||||
Datatypes are derived from a language and its `node-types.json` file using the `GenerateSyntax` API. These datatypes can be viewed in the REPL just as they would for any other datatype, using `:i` after loading the language-specific `AST.hs` module for a given language.
|
||||
|
||||
```
|
||||
:l semantic-python/src/Language/Python/AST.hs
|
||||
Ok, six modules loaded.
|
||||
*Language.Python.AST Source.Span Source.Range> :i Module
|
||||
```
|
||||
|
||||
This shows us the auto-generated `Module` datatype:
|
||||
|
||||
```Haskell
|
||||
data Module a
|
||||
= Module {Language.Python.AST.ann :: a,
|
||||
Language.Python.AST.extraChildren :: [(GHC.Generics.:+:)
|
||||
CompoundStatement SimpleStatement a]}
|
||||
-- Defined at /Users/aymannadeem/github/semantic/semantic-python/src/Language/Python/AST.hs:23:1
|
||||
instance Show a => Show (Module a)
|
||||
-- Defined at /Users/aymannadeem/github/semantic/semantic-python/src/Language/Python/AST.hs:23:1
|
||||
instance Ord a => Ord (Module a)
|
||||
-- Defined at /Users/aymannadeem/github/semantic/semantic-python/src/Language/Python/AST.hs:23:1
|
||||
instance Eq a => Eq (Module a)
|
||||
-- Defined at /Users/aymannadeem/github/semantic/semantic-python/src/Language/Python/AST.hs:23:1
|
||||
instance Traversable Module
|
||||
-- Defined at /Users/aymannadeem/github/semantic/semantic-python/src/Language/Python/AST.hs:23:1
|
||||
instance Functor Module
|
||||
-- Defined at /Users/aymannadeem/github/semantic/semantic-python/src/Language/Python/AST.hs:23:1
|
||||
instance Foldable Module
|
||||
-- Defined at /Users/aymannadeem/github/semantic/semantic-python/src/Language/Python/AST.hs:23:1
|
||||
```
|
||||
|
||||
### Tests
|
||||
|
||||
As of right now, Hedgehog tests are minimal and only in place for the Python library.
|
||||
|
||||
To run tests:
|
||||
|
||||
`cabal v2-test semantic-python`
|
||||
|
||||
### Additional notes
|
||||
|
||||
- [GenerateSyntax](https://github.com/tree-sitter/haskell-tree-sitter/blob/master/tree-sitter/src/TreeSitter/GenerateSyntax.hs) provides a way to pre-define certain datatypes for which Template Haskell is not used. Any datatypes among the node types which have already been defined in the module where the splice is run will be skipped, allowing customization of the representation of parts of the tree. While this gives us flexibility, we encourage that this is used sparingly, as it imposes extra maintenance burden, particularly when the grammar is changed. This may be used to e.g. parse literals into Haskell equivalents (e.g. parsing the textual contents of integer literals into `Integer`s), and may require defining `TS.UnmarshalAnn` or `TS.SymbolMatching` instances for (parts of) the custom datatypes, depending on where and how the datatype occurs in the generated tree, in addition to the usual `Foldable`, `Functor`, etc. instances provided for generated datatypes.
|
||||
- Annotations are captured by a polymorphic parameter `a`
|
||||
- [Unmarshal](https://github.com/tree-sitter/haskell-tree-sitter/blob/master/tree-sitter/src/TreeSitter/Unmarshal.hs) defines both generic and non-generic classes. This is because generic behaviors are different than what we get non-generically, and in the case of ` Maybe`, `[]`—we actually preference doing things non-generically. Since `[]` is a sum, the generic behavior for `:+:` would be invoked and expect that we’d have repetitions represented in the parse tree as right-nested singly-linked lists (ex., `(a (b (c (d…))))`) rather than as just consecutive sibling nodes (ex., `(a b c ...d)`, which is what our trees have). We want to match the latter.
|
@ -27,6 +27,14 @@ message ParseTreeGraphResponse {
|
||||
repeated ParseTreeFileGraph files = 1;
|
||||
}
|
||||
|
||||
message StackGraphRequest {
|
||||
repeated Blob blobs = 1;
|
||||
}
|
||||
|
||||
message StackGraphResponse {
|
||||
repeated StackGraphFile files = 1;
|
||||
}
|
||||
|
||||
message ParseTreeFileGraph {
|
||||
string path = 1;
|
||||
string language = 2;
|
||||
@ -174,3 +182,37 @@ message Span {
|
||||
Position start = 1;
|
||||
Position end = 2;
|
||||
}
|
||||
|
||||
message StackGraphFile {
|
||||
string path = 1;
|
||||
string language = 2;
|
||||
repeated StackGraphNode nodes = 3;
|
||||
repeated StackGraphPath paths = 4;
|
||||
repeated ParseError errors = 5;
|
||||
}
|
||||
|
||||
message StackGraphNode {
|
||||
int64 id = 1;
|
||||
string name = 2;
|
||||
string line = 3;
|
||||
string kind = 4;
|
||||
Span span = 5;
|
||||
enum NodeType {
|
||||
ROOT_SCOPE = 0;
|
||||
JUMP_TO_SCOPE = 1;
|
||||
EXPORTED_SCOPE = 2;
|
||||
DEFINITION = 3;
|
||||
REFERENCE = 4;
|
||||
}
|
||||
NodeType node_type = 6;
|
||||
}
|
||||
|
||||
message StackGraphPath {
|
||||
repeated string starting_symbol_stack = 1;
|
||||
int64 starting_scope_stack_size = 2;
|
||||
int64 from = 3;
|
||||
string edges = 4;
|
||||
int64 to = 5;
|
||||
repeated int64 ending_scope_stack = 6;
|
||||
repeated string ending_symbol_stack = 7;
|
||||
}
|
||||
|
@ -1,4 +1,3 @@
|
||||
#!/bin/bash
|
||||
#!/usr/bin/env bash
|
||||
|
||||
git submodule sync --recursive && git submodule update --init --recursive --force
|
||||
cabal v2-update
|
||||
|
106
script/build-and-upload
Executable file
106
script/build-and-upload
Executable file
@ -0,0 +1,106 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
# Usage: script/build-and-upload PROJECT_NAME
|
||||
# where PROJECT_NAME is one of the packages present in this repo:
|
||||
# semantic-source, etc.
|
||||
|
||||
set -e
|
||||
|
||||
PROJECT="$1"
|
||||
ROOT_DIR="$(dirname "$0")/.."
|
||||
CABAL_PATH="$ROOT_DIR/$PROJECT/$PROJECT.cabal"
|
||||
|
||||
if [ -z "$PROJECT" ]
|
||||
then echo "USAGE: build_and_upload PROJECT_NAME"; exit 1
|
||||
fi
|
||||
|
||||
if [ ! -f "$CABAL_PATH" ]
|
||||
then echo "Couldn't find .cabal file at $CABAL_PATH; is $PROJECT a valid package?"; exit 1
|
||||
fi
|
||||
|
||||
set -x
|
||||
|
||||
cabal v2-build "$PROJECT"
|
||||
TGZ_LOC="$(cabal v2-sdist "$PROJECT" | tail -n 1)"
|
||||
DOCS_LOC="$(cabal v2-haddock --haddock-for-hackage "$PROJECT" | tail -n 1)"
|
||||
PACKAGE_VERSION="$(basename "$TGZ_LOC" .tar.gz)"
|
||||
|
||||
if [ ! -f "$TGZ_LOC" ]
|
||||
then echo "Bug in build_and_upload: $PACKAGE_FN doesn't point to a valid path"; exit 1
|
||||
fi
|
||||
|
||||
set +x
|
||||
|
||||
echo "You are planning to upload '$PACKAGE_VERSION'."
|
||||
read -rp "Is this correct? [y/n] " choice
|
||||
if [ "$choice" != "y" ]
|
||||
then echo "Aborting."; exit 1
|
||||
fi
|
||||
|
||||
echo "Attempting to build $PACKAGE_VERSION from source"
|
||||
TEMP_PATH=$(mktemp -d)
|
||||
tar -xvf "$TGZ_LOC" -C "$TEMP_PATH"
|
||||
|
||||
set -x
|
||||
(
|
||||
cd "$TEMP_PATH/$PACKAGE_VERSION"
|
||||
pwd
|
||||
|
||||
cabal v2-update
|
||||
cabal v2-build --disable-optimization
|
||||
)
|
||||
set +x
|
||||
|
||||
if wget -q --spider "https://hackage.haskell.org/package/$PACKAGE_VERSION"
|
||||
then
|
||||
echo "The package $PACKAGE_VERSION already exists on Hackage."
|
||||
echo "If you need to upload code changes, then bump the version number in $PROJECT/$PROJECT.cabal, make a PR, and run this script again."
|
||||
echo "Otherwise, if you need _only_ to loosen existing constraints in $PROJECT.cabal file, then you can create a new revision of this package on Hackage."
|
||||
echo "You'll need to make your changes by hand. Be sure to click the 'Review changes' button to check your work."
|
||||
read -rp "Do you want to open a browser so as to do this? [y/N]" choice
|
||||
if [ "$choice" == "y" ]
|
||||
then
|
||||
echo "Opening…"
|
||||
sleep 1
|
||||
open "https://hackage.haskell.org/package/$PACKAGE_VERSION/$PROJECT.cabal/edit"
|
||||
exit 0
|
||||
else
|
||||
echo "Aborting"
|
||||
exit 1
|
||||
fi
|
||||
fi
|
||||
|
||||
echo "******************"
|
||||
echo "Uploading packages"
|
||||
echo "******************"
|
||||
|
||||
echo -n "Hackage username: "
|
||||
read HACKAGE_USER
|
||||
echo
|
||||
echo -n "Hackage password: "
|
||||
read -s HACKAGE_PASS
|
||||
|
||||
cabal upload --username="$HACKAGE_USER" --password="$HACKAGE_PASS" "$TGZ_LOC"
|
||||
cabal upload --username="$HACKAGE_USER" --password="$HACKAGE_PASS" --documentation "$DOCS_LOC"
|
||||
|
||||
URL="https://hackage.haskell.org/package/$PACKAGE_VERSION/candidate"
|
||||
|
||||
echo "Opening candidate URL in browser…"
|
||||
sleep 1
|
||||
open "$URL"
|
||||
|
||||
echo "About to upload final version. Do you want to proceed?"
|
||||
echo "Full-fledged package uploads cannot be undone!"
|
||||
read -rp "Type 'yes' to continue. " choice
|
||||
if [ "$choice" != "yes" ]
|
||||
then echo "Aborting."; exit 1
|
||||
fi
|
||||
|
||||
set -x
|
||||
|
||||
cabal upload --username="$HACKAGE_USER" --password="$HACKAGE_PASS" --publish "$TGZ_LOC"
|
||||
cabal upload --username="$HACKAGE_USER" --password="$HACKAGE_PASS" --publish --documentation "$DOCS_LOC"
|
||||
|
||||
echo "Tagging $PACKAGE_VERSION"
|
||||
git tag "$PACKAGE_VERSION"
|
||||
git push --tags
|
@ -1,4 +1,4 @@
|
||||
#!/bin/bash
|
||||
#!/usr/bin/env bash
|
||||
#/ Usage: script/clone-example-repos
|
||||
#/
|
||||
#/ Clone some example repositories for smoke testing parsing, assignment, and precise ASTs.
|
||||
|
@ -1,4 +1,4 @@
|
||||
#!/bin/bash
|
||||
#!/usr/bin/env bash
|
||||
|
||||
store_dir="$HOME/.cabal/store/ghc-$(ghc --numeric-version)"
|
||||
|
||||
|
@ -1,4 +1,4 @@
|
||||
#!/bin/bash
|
||||
#!/usr/bin/env bash
|
||||
#/ Usage: script/generate-example fileA fileB
|
||||
#/ script/generate-example directory
|
||||
#/
|
||||
|
@ -1,4 +1,4 @@
|
||||
#!/bin/bash
|
||||
#!/usr/bin/env bash
|
||||
# Computes the flags for ghcide to pass to ghci. You probably won’t be running this yourself, but rather ghcide will via configuration in hie.yaml.
|
||||
|
||||
set -e
|
||||
@ -70,6 +70,7 @@ function flags {
|
||||
|
||||
# disable automatic selection of packages
|
||||
echo "-hide-all-packages"
|
||||
echo "-package proto-lens-jsonpb"
|
||||
|
||||
# run cabal and emit package flags from the environment file, removing comments & prefixing with -
|
||||
cabal v2-exec -v0 bash -- -c 'cat "$GHC_ENVIRONMENT"' | grep -v '^--' | sed -e 's/^/-/'
|
||||
|
@ -1,4 +1,4 @@
|
||||
#!/bin/bash
|
||||
#!/usr/bin/env bash
|
||||
# Computes the paths to files causing changes to the ghci flags. You probably won’t be running this yourself, but rather ghcide will via configuration in hie.yaml.
|
||||
|
||||
set -e
|
||||
|
@ -1,4 +1,4 @@
|
||||
#!/bin/bash
|
||||
#!/usr/bin/env bash
|
||||
# Usage: script/profile FILE_A FILE_B
|
||||
# Builds and runs semantic on the given files with profiling enabled.
|
||||
|
||||
|
@ -12,7 +12,8 @@ PARENT_DIR=$(dirname $(pwd))
|
||||
|
||||
export PROJECT="github.com/github/semantic"
|
||||
|
||||
# Generate Haskell for semantic's protobuf types
|
||||
# Generate Haskell for semantic's protobuf types. See the entrypoint in
|
||||
# Dockerfile for where the protoc pluggins are configured.
|
||||
docker run --rm --user $(id -u):$(id -g) -v $(pwd):/go/src/$PROJECT -w /go/src/$PROJECT \
|
||||
semantic-protoc --proto_path=proto \
|
||||
--haskell_out=./src \
|
||||
|
@ -1,4 +1,4 @@
|
||||
#!/bin/bash
|
||||
#!/usr/bin/env bash
|
||||
#/ Usage: script/publish
|
||||
#/
|
||||
#/ Build a docker image of the semantic CLI and publish to the GitHub Package Registry
|
||||
|
@ -1,4 +1,4 @@
|
||||
#!/bin/bash
|
||||
#!/usr/bin/env bash
|
||||
# Usage: script/repl [ARGS...]
|
||||
# Run a repl session capable of loading all of the packages and their individual components. Any passed arguments, e.g. module names or flags, will be passed to ghci.
|
||||
|
||||
|
@ -65,7 +65,7 @@ library
|
||||
, containers ^>= 0.6
|
||||
, filepath
|
||||
, fused-effects ^>= 1.0
|
||||
, fused-effects-readline
|
||||
, fused-effects-readline ^>= 0
|
||||
, fused-syntax
|
||||
, hashable
|
||||
, haskeline ^>= 0.7.5
|
||||
@ -74,7 +74,7 @@ library
|
||||
, pathtype ^>= 0.8.1
|
||||
, prettyprinter >= 1.2 && < 2
|
||||
, prettyprinter-ansi-terminal ^>= 1.1.1
|
||||
, semantic-source ^>= 0.0.2
|
||||
, semantic-source ^>= 0.1.0
|
||||
, semilattices
|
||||
, terminal-size ^>= 0.3
|
||||
, text ^>= 1.2.3.1
|
||||
|
@ -4,6 +4,7 @@ module Analysis.Blob
|
||||
, fromSource
|
||||
, blobLanguage
|
||||
, blobPath
|
||||
, blobFilePath
|
||||
, nullBlob
|
||||
) where
|
||||
|
||||
@ -38,8 +39,12 @@ fromSource filepath language source
|
||||
blobLanguage :: Blob -> Language
|
||||
blobLanguage = Analysis.File.fileBody . blobFile
|
||||
|
||||
blobPath :: Blob -> FilePath
|
||||
blobPath = Path.toString . Analysis.File.filePath . blobFile
|
||||
blobPath :: Blob -> Path.AbsRelFile
|
||||
blobPath = Analysis.File.filePath . blobFile
|
||||
|
||||
-- | Show FilePath for error or json outputs.
|
||||
blobFilePath :: Blob -> String
|
||||
blobFilePath = Path.toString . blobPath
|
||||
|
||||
nullBlob :: Blob -> Bool
|
||||
nullBlob = Source.null . blobSource
|
||||
|
@ -7,7 +7,6 @@ module Analysis.File
|
||||
) where
|
||||
|
||||
import Data.Maybe (fromJust, listToMaybe)
|
||||
import Data.Semilattice.Lower
|
||||
import GHC.Stack
|
||||
import Source.Language as Language
|
||||
import Source.Span
|
||||
@ -16,7 +15,7 @@ import qualified System.Path.PartClass as Path.PartClass
|
||||
|
||||
data File a = File
|
||||
{ filePath :: !Path.AbsRelFile
|
||||
, fileSpan :: {-# UNPACK #-} !Span
|
||||
, fileSpan :: Span
|
||||
, fileBody :: !a
|
||||
}
|
||||
deriving (Eq, Foldable, Functor, Ord, Show, Traversable)
|
||||
@ -30,4 +29,4 @@ fileLanguage :: File a -> Language
|
||||
fileLanguage = Language.forPath . filePath
|
||||
|
||||
fromPath :: Path.PartClass.AbsRel ar => Path.File ar -> File Language
|
||||
fromPath p = File (Path.toAbsRel p) lowerBound (Language.forPath p)
|
||||
fromPath p = File (Path.toAbsRel p) (point (Pos 0 0)) (Language.forPath p)
|
||||
|
@ -12,19 +12,19 @@ import Analysis.File
|
||||
import Data.Text (Text)
|
||||
import qualified Data.Text as T
|
||||
import Source.Language
|
||||
import System.FilePath.Posix
|
||||
import qualified System.Path as Path
|
||||
|
||||
-- | A 'Project' contains all the information that semantic needs
|
||||
-- to execute an analysis, diffing, or graphing pass.
|
||||
data Project = Project
|
||||
{ projectRootDir :: FilePath
|
||||
{ projectRootDir :: Path.AbsRelDir
|
||||
, projectBlobs :: [Blob]
|
||||
, projectLanguage :: Language
|
||||
, projectExcludeDirs :: [FilePath]
|
||||
, projectExcludeDirs :: [Path.AbsRelDir]
|
||||
} deriving (Eq, Show)
|
||||
|
||||
projectName :: Project -> Text
|
||||
projectName = T.pack . dropExtensions . takeFileName . projectRootDir
|
||||
projectName = T.pack . maybe "" Path.toString . Path.takeDirName . projectRootDir
|
||||
|
||||
projectExtensions :: Project -> [String]
|
||||
projectExtensions = extensionsForLanguage . projectLanguage
|
||||
|
@ -1,217 +1,5 @@
|
||||
@ -1,216 +0,0 @@
|
||||
# CodeGen Documentation
|
||||
# semantic-ast
|
||||
|
||||
CodeGen is the process for auto-generating language-specific, strongly-typed ASTs to be used in [Semantic](https://github.com/github/semantic-code/blob/d9f91a05dc30a61b9ff8c536d75661d417f3c506/design-docs/precise-code-navigation.md).
|
||||
This package is concerned with the CodeGen generation of strongly-typed ASTs.
|
||||
|
||||
_Note: This project was recently moved from `tree-sitter` into `Semantic`. These docs are in the process of being updated to reflect changes._
|
||||
|
||||
### Prerequisites
|
||||
To get started, first make sure your language has:
|
||||
|
||||
1. An existing [tree-sitter](http://tree-sitter.github.io/tree-sitter/) parser;
|
||||
2. An existing Cabal package in this repository for said language. This will provide an interface into tree-sitter's C source. [Here](https://github.com/tree-sitter/haskell-tree-sitter/tree/master/tree-sitter-python) is an example of a library for Python, a supported language that the remaining documentation will refer to.
|
||||
|
||||
### CodeGen Pipeline
|
||||
|
||||
During parser generation, tree-sitter produces a JSON file that captures the structure of a language's grammar. Based on this, we're able to derive datatypes representing surface languages, and then use those datatypes to generically build ASTs. This automates the engineering effort [historically required for adding a new language](https://github.com/github/semantic/blob/master/docs/adding-new-languages.md).
|
||||
|
||||
The following steps provide a high-level outline of the process:
|
||||
|
||||
1. [**Deserialize.**](https://github.com/github/semantic/blob/master/semantic-ast/src/AST/Deserialize.hs) First, we deserialize the `node-types.json` file for a given language into the desired shape of datatypes via parsing capabilities afforded by the [Aeson](http://hackage.haskell.org/package/aeson) library. There are four distinct types represented in the node-types.json file takes on: sums, products, named leaves and anonymous leaves.
|
||||
2. [**Generate Syntax.**](https://github.com/github/semantic/blob/master/semantic-ast/src/AST/GenerateSyntax.hs) We then use Template Haskell to auto-generate language-specific, strongly-typed datatypes that represent various language constructs. This API exports the top-level function `astDeclarationsForLanguage` to auto-generate datatypes at compile-time, which is is invoked by a given language [AST](https://github.com/github/semantic/blob/master/semantic-python/src/Language/Python/AST.hs) module.
|
||||
3. [**Unmarshal.**](https://github.com/github/semantic/blob/master/semantic-ast/src/AST/Unmarshal.hs) Unmarshaling is the process of iterating over tree-sitter’s parse trees using its tree cursor API, and producing Haskell ASTs for the relevant nodes. We parse source code from tree-sitter and unmarshal the data we get to build these ASTs generically. This file exports the top-level function `parseByteString`, which takes source code and a language as arguments, and produces an AST.
|
||||
|
||||
Here is an example that describes the relationship between a Python identifier represented in the tree-sitter generated JSON file, and a datatype generated by Template Haskell based on the provided JSON:
|
||||
|
||||
| Type | JSON | TH-generated code |
|
||||
|----------|--------------|------------|
|
||||
|Named leaf|<code>{<br>"type": "identifier",<br>"named": true<br>}|<code>data TreeSitter.Python.AST.Identifier a<br>= TreeSitter.Python.AST.Identifier {TreeSitter.Python.AST.ann :: a,<br>TreeSitter.Python.AST.bytes :: text-1.2.3.1:Data.Text.Internal.Text} -- Defined at TreeSitter/Python/AST.hs:10:1<br>instance Show a => Show (TreeSitter.Python.AST.Identifier a) -- Defined at TreeSitter/Python/AST.hs:10:1<br>instance Ord a => Ord (TreeSitter.Python.AST.Identifier a) -- Defined at TreeSitter/Python/AST.hs:10:1<br>instance Eq a => Eq (TreeSitter.Python.AST.Identifier a) -- Defined at TreeSitter/Python/AST.hs:10:1<br>instance Traversable TreeSitter.Python.AST.Identifier -- Defined at TreeSitter/Python/AST.hs:10:1<br>instance Functor TreeSitter.Python.AST.Identifier -- Defined at TreeSitter/Python/AST.hs:10:1<br>instance Foldable TreeSitter.Python.AST.Identifier -- Defined at TreeSitter/Python/AST.hs:10:1<br>instance Unmarshal TreeSitter.Python.AST.Identifier -- Defined at TreeSitter/Python/AST.hs:10:1<br>instance SymbolMatching TreeSitter.Python.AST.Identifier -- Defined at TreeSitter/Python/AST.hs:10:1|
|
||||
|
||||
The remaining document provides more details on generating ASTs, inspecting datatypes, tests, and information on decisions pertaining to relevant APIs.
|
||||
___
|
||||
|
||||
### Table of Contents
|
||||
- [CodeGen Documentation](#codegen-documentation)
|
||||
- [Prerequisites](#prerequisites)
|
||||
- [CodeGen Pipeline](#codegen-pipeline)
|
||||
- [Table of Contents](#table-of-contents)
|
||||
- [Generating ASTs](#generating-asts)
|
||||
- [Inspecting auto-generated datatypes](#inspecting-auto-generated-datatypes)
|
||||
- [Tests](#tests)
|
||||
- [Additional notes](#additional-notes)
|
||||
___
|
||||
|
||||
### Generating ASTs
|
||||
|
||||
To parse source code and produce ASTs locally:
|
||||
|
||||
1. Load the REPL for a given language:
|
||||
|
||||
```
|
||||
cabal new-repl lib:tree-sitter-python
|
||||
```
|
||||
|
||||
2. Set language extensions, `OverloadedStrings` and `TypeApplications`, and import relevant modules, `AST.Unmarshal`, `Source.Range` and `Source.Span`:
|
||||
|
||||
```
|
||||
:seti -XOverloadedStrings
|
||||
:seti -XTypeApplications
|
||||
|
||||
import Source.Span
|
||||
import Source.Range
|
||||
import AST.Unmarshal
|
||||
```
|
||||
|
||||
3. You can now call `parseByteString`, passing in the desired language you wish to parse (in this case Python exemplified by `tree_sitter_python`), and the source code (in this case an integer `1`). Since the function is constrained by `(Unmarshal t, UnmarshalAnn a)`, you can use type applications to provide a top-level node `t`, an entry point into the tree, in addition to a polymorphic annotation `a` used to represent range and span:
|
||||
|
||||
```
|
||||
parseByteString @TreeSitter.Python.AST.Module @(Source.Span.Span, Source.Range.Range) tree_sitter_python "1"
|
||||
```
|
||||
|
||||
This generates the following AST:
|
||||
|
||||
```
|
||||
Right
|
||||
( Module
|
||||
{ ann =
|
||||
( Range
|
||||
{ start = 0
|
||||
, end = 1
|
||||
}
|
||||
, Span
|
||||
{ start = Pos
|
||||
{ line = 0
|
||||
, column = 0
|
||||
}
|
||||
, end = Pos
|
||||
{ line = 0
|
||||
, column = 1
|
||||
}
|
||||
}
|
||||
)
|
||||
, extraChildren =
|
||||
[ R1
|
||||
( SimpleStatement
|
||||
( L1
|
||||
( R1
|
||||
( R1
|
||||
( L1
|
||||
( ExpressionStatement
|
||||
{ ann =
|
||||
( Range
|
||||
{ start = 0
|
||||
, end = 1
|
||||
}
|
||||
, Span
|
||||
{ start = Pos
|
||||
{ line = 0
|
||||
, column = 0
|
||||
}
|
||||
, end = Pos
|
||||
{ line = 0
|
||||
, column = 1
|
||||
}
|
||||
}
|
||||
)
|
||||
, extraChildren = L1
|
||||
( L1
|
||||
( Expression
|
||||
( L1
|
||||
( L1
|
||||
( L1
|
||||
( PrimaryExpression
|
||||
( R1
|
||||
( L1
|
||||
( L1
|
||||
( L1
|
||||
( Integer
|
||||
{ ann =
|
||||
( Range
|
||||
{ start = 0
|
||||
, end = 1
|
||||
}
|
||||
, Span
|
||||
{ start = Pos
|
||||
{ line = 0
|
||||
, column = 0
|
||||
}
|
||||
, end = Pos
|
||||
{ line = 0
|
||||
, column = 1
|
||||
}
|
||||
}
|
||||
)
|
||||
, text = "1"
|
||||
}
|
||||
)
|
||||
)
|
||||
)
|
||||
)
|
||||
)
|
||||
)
|
||||
)
|
||||
)
|
||||
)
|
||||
)
|
||||
) :| []
|
||||
}
|
||||
)
|
||||
)
|
||||
)
|
||||
)
|
||||
)
|
||||
)
|
||||
]
|
||||
}
|
||||
)
|
||||
```
|
||||
|
||||
### Inspecting auto-generated datatypes
|
||||
|
||||
Datatypes are derived from a language and its `node-types.json` file using the GenerateSyntax API. Definition can be viewed in the REPL just as they would for any other datatype, using `:i`:
|
||||
|
||||
```
|
||||
:i TreeSitter.Python.AST.Module
|
||||
```
|
||||
|
||||
This shows us the auto-generated `Module` datatype:
|
||||
|
||||
```Haskell
|
||||
data TreeSitter.Python.AST.Module a
|
||||
= TreeSitter.Python.AST.Module {TreeSitter.Python.AST.ann :: a,
|
||||
TreeSitter.Python.AST.extraChildren :: [(GHC.Generics.:+:)
|
||||
TreeSitter.Python.AST.CompoundStatement
|
||||
TreeSitter.Python.AST.SimpleStatement
|
||||
a]}
|
||||
-- Defined at TreeSitter/Python/AST.hs:10:1
|
||||
instance Show a => Show (TreeSitter.Python.AST.Module a)
|
||||
-- Defined at TreeSitter/Python/AST.hs:10:1
|
||||
instance Ord a => Ord (TreeSitter.Python.AST.Module a)
|
||||
-- Defined at TreeSitter/Python/AST.hs:10:1
|
||||
instance Eq a => Eq (TreeSitter.Python.AST.Module a)
|
||||
-- Defined at TreeSitter/Python/AST.hs:10:1
|
||||
instance Traversable TreeSitter.Python.AST.Module
|
||||
-- Defined at TreeSitter/Python/AST.hs:10:1
|
||||
instance Functor TreeSitter.Python.AST.Module
|
||||
-- Defined at TreeSitter/Python/AST.hs:10:1
|
||||
instance Foldable TreeSitter.Python.AST.Module
|
||||
-- Defined at TreeSitter/Python/AST.hs:10:1
|
||||
instance Unmarshal TreeSitter.Python.AST.Module
|
||||
-- Defined at TreeSitter/Python/AST.hs:10:1
|
||||
instance SymbolMatching TreeSitter.Python.AST.Module
|
||||
-- Defined at TreeSitter/Python/AST.hs:10:1
|
||||
```
|
||||
|
||||
### Tests
|
||||
|
||||
As of right now, Hedgehog tests are minimal and only in place for the Python library.
|
||||
|
||||
To run tests:
|
||||
|
||||
`cabal v2-test tree-sitter-python`
|
||||
|
||||
### Additional notes
|
||||
|
||||
- [GenerateSyntax](https://github.com/tree-sitter/haskell-tree-sitter/blob/master/tree-sitter/src/TreeSitter/GenerateSyntax.hs) provides a way to pre-define certain datatypes for which Template Haskell is not used. Any datatypes among the node types which have already been defined in the module where the splice is run will be skipped, allowing customization of the representation of parts of the tree. While this gives us flexibility, we encourage that this is used sparingly, as it imposes extra maintenance burden, particularly when the grammar is changed. This may be used to e.g. parse literals into Haskell equivalents (e.g. parsing the textual contents of integer literals into `Integer`s), and may require defining `TS.UnmarshalAnn` or `TS.SymbolMatching` instances for (parts of) the custom datatypes, depending on where and how the datatype occurs in the generated tree, in addition to the usual `Foldable`, `Functor`, etc. instances provided for generated datatypes.
|
||||
- Annotations are captured by a polymorphic parameter `a`
|
||||
- [Unmarshal](https://github.com/tree-sitter/haskell-tree-sitter/blob/master/tree-sitter/src/TreeSitter/Unmarshal.hs) defines both generic and non-generic classes. This is because generic behaviors are different than what we get non-generically, and in the case of ` Maybe`, `[]`—we actually preference doing things non-generically. Since `[]` is a sum, the generic behavior for `:+:` would be invoked and expect that we’d have repetitions represented in the parse tree as right-nested singly-linked lists (ex., `(a (b (c (d…))))`) rather than as just consecutive sibling nodes (ex., `(a b c ...d)`, which is what our trees have). We want to match the latter.
|
||||
[Documentation](https://github.com/github/semantic/blob/master/docs/codegen.md)
|
||||
|
@ -55,13 +55,13 @@ library
|
||||
build-depends: base ^>= 4.13
|
||||
, aeson ^>= 1.4.2.0
|
||||
, aeson-pretty ^>= 0.8.8
|
||||
, bytestring ^>= 0.10.8.2
|
||||
, bytestring ^>= 0.10.9.2
|
||||
, containers >= 0.6.0.1
|
||||
, directory ^>= 1.3.3.2
|
||||
, filepath ^>= 1.4.1
|
||||
, fused-effects ^>= 1.0
|
||||
, tree-sitter ^>= 0.9.0.0
|
||||
, semantic-source ^>= 0.0.2
|
||||
, semantic-source ^>= 0.1.0
|
||||
, template-haskell ^>= 2.15
|
||||
, bytestring ^>= 0.10.8.2
|
||||
, optparse-applicative >= 0.14.3 && < 0.16
|
||||
|
@ -2,6 +2,7 @@
|
||||
module AST.TestHelpers
|
||||
( CorpusExample(..)
|
||||
, readCorpusFiles
|
||||
, readCorpusFiles'
|
||||
, parseCorpusFile
|
||||
, testCorpus
|
||||
) where
|
||||
@ -22,7 +23,7 @@ import System.FilePath.Glob
|
||||
import Test.Tasty
|
||||
import Test.Tasty.HUnit
|
||||
|
||||
testCorpus :: (ByteString -> IO (Either String (t a))) -> Path.RelFile -> IO TestTree
|
||||
testCorpus :: (ByteString -> IO (Either String (t a))) -> Path.AbsRelFile -> IO TestTree
|
||||
testCorpus parse path = do
|
||||
xs <- parseCorpusFile path
|
||||
case xs of
|
||||
@ -50,10 +51,15 @@ readCorpusFiles parent = do
|
||||
files <- globDir1 (compile "**/*.txt") (Path.toString dir)
|
||||
pure (Path.relPath <$> files)
|
||||
|
||||
readCorpusFiles' :: Path.AbsDir -> IO [Path.AbsRelFile]
|
||||
readCorpusFiles' dir = do
|
||||
files <- globDir1 (compile "**/*.txt") (Path.toString dir)
|
||||
pure (Path.file <$> files)
|
||||
|
||||
data CorpusExample = CorpusExample { name :: String, code :: ByteString }
|
||||
deriving (Eq, Show)
|
||||
|
||||
parseCorpusFile :: Path.RelFile -> IO (Either String [CorpusExample])
|
||||
parseCorpusFile :: Path.AbsRelFile -> IO (Either String [CorpusExample])
|
||||
parseCorpusFile path = do
|
||||
c <- Data.ByteString.readFile (Path.toString path)
|
||||
pure $ parseOnly corpusParser c
|
||||
|
@ -55,7 +55,7 @@ library
|
||||
, prettyprinter >= 1.2.1 && < 2
|
||||
, prettyprinter-ansi-terminal ^>= 1.1.1
|
||||
, semantic-analysis ^>= 0
|
||||
, semantic-source ^>= 0.0.2
|
||||
, semantic-source ^>= 0.1.0
|
||||
, text ^>= 1.2.3.1
|
||||
, trifecta >= 2 && < 2.2
|
||||
, unordered-containers ^>= 0.2.10
|
||||
|
@ -26,12 +26,12 @@ common haskell
|
||||
, parsers ^>= 0.12.10
|
||||
, semantic-ast
|
||||
, semantic-core ^>= 0.0
|
||||
, semantic-source ^>= 0.0.2
|
||||
, semantic-source ^>= 0.1.0
|
||||
, semantic-tags ^>= 0.0
|
||||
, template-haskell ^>= 2.15
|
||||
, text ^>= 1.2.3
|
||||
, tree-sitter ^>= 0.9
|
||||
, tree-sitter-go ^>= 0.5.0.0
|
||||
, tree-sitter-go ^>= 0.5.0.1
|
||||
|
||||
ghc-options:
|
||||
-Weverything
|
||||
@ -63,13 +63,12 @@ test-suite test
|
||||
hs-source-dirs: test
|
||||
main-is: Test.hs
|
||||
build-depends: base
|
||||
, tree-sitter
|
||||
, tree-sitter-go
|
||||
, semantic-ast
|
||||
, bytestring ^>= 0.10.8.2
|
||||
, hedgehog >= 0.6 && <2
|
||||
, pathtype ^>= 0.8.1
|
||||
, text
|
||||
, semantic-ast
|
||||
, semantic-go
|
||||
, tasty
|
||||
, tasty-hedgehog
|
||||
, tasty-hunit
|
||||
, text
|
||||
|
@ -12,11 +12,12 @@
|
||||
|
||||
module Language.Go.AST
|
||||
( module Language.Go.AST
|
||||
, Go.getTestCorpusDir
|
||||
) where
|
||||
|
||||
import Prelude hiding (False, Float, Integer, Rational, String, True)
|
||||
import AST.GenerateSyntax
|
||||
import Language.Haskell.TH.Syntax (runIO)
|
||||
import qualified TreeSitter.Go as Go (getNodeTypesPath, tree_sitter_go)
|
||||
import qualified TreeSitter.Go as Go (getNodeTypesPath, getTestCorpusDir, tree_sitter_go)
|
||||
|
||||
runIO Go.getNodeTypesPath >>= astDeclarationsForLanguage Go.tree_sitter_go
|
||||
|
@ -7,10 +7,15 @@ import Language.Go.Grammar
|
||||
import qualified Language.Go.AST as Go
|
||||
import AST.TestHelpers
|
||||
import AST.Unmarshal
|
||||
import qualified Language.Go.AST as Go
|
||||
import Language.Go.Grammar
|
||||
import qualified System.Path as Path
|
||||
import Test.Tasty
|
||||
|
||||
main :: IO ()
|
||||
main
|
||||
= readCorpusFiles (Path.relDir "tree-sitter-go/vendor/tree-sitter-go/corpus")
|
||||
= Path.absDir <$> Go.getTestCorpusDir
|
||||
>>= readCorpusFiles'
|
||||
>>= traverse (testCorpus parse)
|
||||
>>= defaultMain . tests
|
||||
where parse = parseByteString @Go.SourceFile @() tree_sitter_go
|
||||
|
1
semantic-go/vendor/tree-sitter-go
vendored
1
semantic-go/vendor/tree-sitter-go
vendored
@ -1 +0,0 @@
|
||||
Subproject commit 689cc8fbdc0613d267434f221af85aff91a31f8c
|
@ -26,12 +26,12 @@ common haskell
|
||||
, parsers ^>= 0.12.10
|
||||
, semantic-ast
|
||||
, semantic-core ^>= 0.0
|
||||
, semantic-source ^>= 0.0.2
|
||||
, semantic-source ^>= 0.1.0
|
||||
, semantic-tags ^>= 0.0
|
||||
, template-haskell ^>= 2.15
|
||||
, text ^>= 1.2.3
|
||||
, tree-sitter ^>= 0.9
|
||||
, tree-sitter-java ^>= 0.7.0.0
|
||||
, tree-sitter-java ^>= 0.7.0.1
|
||||
|
||||
ghc-options:
|
||||
-Weverything
|
||||
@ -63,11 +63,10 @@ test-suite test
|
||||
hs-source-dirs: test
|
||||
main-is: Test.hs
|
||||
build-depends: base
|
||||
, tree-sitter
|
||||
, tree-sitter-java
|
||||
, semantic-ast
|
||||
, bytestring ^>= 0.10.8.2
|
||||
, pathtype ^>= 0.8.1
|
||||
, text
|
||||
, semantic-ast
|
||||
, semantic-java
|
||||
, tasty
|
||||
, tasty-hunit
|
||||
, text
|
||||
|
@ -12,11 +12,12 @@
|
||||
|
||||
module Language.Java.AST
|
||||
( module Language.Java.AST
|
||||
, Java.getTestCorpusDir
|
||||
) where
|
||||
|
||||
import AST.GenerateSyntax
|
||||
import AST.Token
|
||||
import Language.Haskell.TH.Syntax (runIO)
|
||||
import qualified TreeSitter.Java as Java (getNodeTypesPath, tree_sitter_java)
|
||||
import qualified TreeSitter.Java as Java (getNodeTypesPath, getTestCorpusDir, tree_sitter_java)
|
||||
|
||||
runIO Java.getNodeTypesPath >>= astDeclarationsForLanguage Java.tree_sitter_java
|
||||
|
@ -7,10 +7,15 @@ import TreeSitter.Java
|
||||
import qualified TreeSitter.Java.AST as Java
|
||||
import AST.TestHelpers
|
||||
import AST.Unmarshal
|
||||
import qualified Language.Java.AST as Java
|
||||
import Language.Java.Grammar
|
||||
import qualified System.Path as Path
|
||||
import Test.Tasty
|
||||
|
||||
main :: IO ()
|
||||
main
|
||||
= readCorpusFiles (Path.relDir "tree-sitter-java/vendor/tree-sitter-java/corpus")
|
||||
= Path.absDir <$> Java.getTestCorpusDir
|
||||
>>= readCorpusFiles'
|
||||
>>= traverse (testCorpus parse)
|
||||
>>= defaultMain . tests
|
||||
where
|
||||
|
1
semantic-java/vendor/tree-sitter-java
vendored
1
semantic-java/vendor/tree-sitter-java
vendored
@ -1 +0,0 @@
|
||||
Subproject commit afc4cec799f6594390aeb0ca5e16ec89e73d488e
|
@ -26,12 +26,12 @@ common haskell
|
||||
, parsers ^>= 0.12.10
|
||||
, semantic-ast
|
||||
, semantic-core ^>= 0.0
|
||||
, semantic-source ^>= 0.0.2
|
||||
, semantic-source ^>= 0.1.0
|
||||
, semantic-tags ^>= 0.0
|
||||
, template-haskell ^>= 2.15
|
||||
, text ^>= 1.2.3
|
||||
, tree-sitter ^>= 0.9
|
||||
, tree-sitter-json ^>= 0.7.0.0
|
||||
, tree-sitter-json ^>= 0.7.0.1
|
||||
|
||||
ghc-options:
|
||||
-Weverything
|
||||
@ -63,13 +63,12 @@ test-suite test
|
||||
hs-source-dirs: test
|
||||
main-is: Test.hs
|
||||
build-depends: base
|
||||
, tree-sitter
|
||||
, tree-sitter-json
|
||||
, semantic-ast
|
||||
, bytestring ^>= 0.10.8.2
|
||||
, hedgehog >= 0.6 && <2
|
||||
, pathtype ^>= 0.8.1
|
||||
, text
|
||||
, semantic-ast
|
||||
, semantic-json
|
||||
, tasty
|
||||
, tasty-hedgehog
|
||||
, tasty-hunit
|
||||
, text
|
||||
|
@ -11,11 +11,12 @@
|
||||
{-# LANGUAGE TypeApplications #-}
|
||||
module Language.JSON.AST
|
||||
( module Language.JSON.AST
|
||||
, JSON.getTestCorpusDir
|
||||
) where
|
||||
|
||||
import Prelude hiding (String)
|
||||
import AST.GenerateSyntax
|
||||
import Language.Haskell.TH.Syntax (runIO)
|
||||
import qualified TreeSitter.JSON as JSON (getNodeTypesPath, tree_sitter_json)
|
||||
import qualified TreeSitter.JSON as JSON (getNodeTypesPath, getTestCorpusDir, tree_sitter_json)
|
||||
|
||||
runIO JSON.getNodeTypesPath >>= astDeclarationsForLanguage JSON.tree_sitter_json
|
||||
|
@ -7,10 +7,15 @@ import TreeSitter.JSON
|
||||
import qualified TreeSitter.JSON.AST as JSON
|
||||
import AST.TestHelpers
|
||||
import AST.Unmarshal
|
||||
import qualified Language.JSON.AST as JSON
|
||||
import Language.JSON.Grammar
|
||||
import qualified System.Path as Path
|
||||
import Test.Tasty
|
||||
|
||||
main :: IO ()
|
||||
main
|
||||
= readCorpusFiles (Path.relDir "tree-sitter-json/vendor/tree-sitter-json/corpus")
|
||||
= Path.absDir <$> JSON.getTestCorpusDir
|
||||
>>= readCorpusFiles'
|
||||
>>= traverse (testCorpus parse)
|
||||
>>= defaultMain . tests
|
||||
where parse = parseByteString @JSON.Document @() tree_sitter_json
|
||||
|
1
semantic-json/vendor/tree-sitter-json
vendored
1
semantic-json/vendor/tree-sitter-json
vendored
@ -1 +0,0 @@
|
||||
Subproject commit 7b6a33f300e3e88c3017e0a9d88c77b50ea6d149
|
@ -14,7 +14,7 @@ import Options.Applicative hiding (style)
|
||||
import Text.Pretty.Simple (pPrint, pPrintNoColor)
|
||||
import Data.Foldable (traverse_)
|
||||
import Control.Monad ((>=>))
|
||||
import Marshal.JSON (marshal)
|
||||
import AST.Marshal.JSON (marshal)
|
||||
import Data.ByteString.Lazy.Char8 (putStrLn)
|
||||
import Data.Aeson.Encode.Pretty (encodePretty)
|
||||
|
||||
|
@ -43,7 +43,7 @@ executable semantic-parse
|
||||
build-depends: base
|
||||
, semantic-ast
|
||||
, tree-sitter ^>= 0.9.0.0
|
||||
, semantic-source
|
||||
, semantic-source ^>= 0.1.0
|
||||
, tree-sitter-python ^>= 0.9.0.1
|
||||
, bytestring
|
||||
, optparse-applicative
|
||||
|
@ -26,7 +26,7 @@ common haskell
|
||||
, parsers ^>= 0.12.10
|
||||
, semantic-ast
|
||||
, semantic-core ^>= 0.0
|
||||
, semantic-source ^>= 0.0.2
|
||||
, semantic-source ^>= 0.1.0
|
||||
, semantic-tags ^>= 0.0
|
||||
, template-haskell ^>= 2.15
|
||||
, text ^>= 1.2.3
|
||||
@ -54,4 +54,5 @@ library
|
||||
Language.PHP
|
||||
Language.PHP.AST
|
||||
Language.PHP.Grammar
|
||||
Language.PHP.Tags
|
||||
hs-source-dirs: src
|
||||
|
@ -1,14 +1,15 @@
|
||||
-- | Semantic functionality for JSON programs.
|
||||
-- | Semantic functionality for PHP programs.
|
||||
module Language.PHP
|
||||
( Term(..)
|
||||
, TreeSitter.PHP.tree_sitter_php
|
||||
) where
|
||||
|
||||
import qualified AST.Unmarshal as TS
|
||||
import Data.Proxy
|
||||
import qualified Language.PHP.AST as PHP
|
||||
import qualified Language.PHP.Tags as PHPTags
|
||||
import qualified Tags.Tagging.Precise as Tags
|
||||
import qualified TreeSitter.PHP (tree_sitter_php)
|
||||
import qualified AST.Unmarshal as TS
|
||||
|
||||
newtype Term a = Term { getTerm :: PHP.Program a }
|
||||
|
||||
@ -19,6 +20,5 @@ instance TS.SymbolMatching Term where
|
||||
instance TS.Unmarshal Term where
|
||||
matchers = fmap (fmap (TS.hoist Term)) TS.matchers
|
||||
|
||||
-- | Tags aren’t really meaningful for JSON, but by implementing this we can avoid having to customize the set of parsers used for computing tags.
|
||||
instance Tags.ToTags Term where
|
||||
tags _ _ = []
|
||||
tags src = Tags.runTagging src . PHPTags.tags . getTerm
|
||||
|
213
semantic-php/src/Language/PHP/Tags.hs
Normal file
213
semantic-php/src/Language/PHP/Tags.hs
Normal file
@ -0,0 +1,213 @@
|
||||
{-# LANGUAGE DefaultSignatures #-}
|
||||
{-# LANGUAGE DisambiguateRecordFields #-}
|
||||
{-# LANGUAGE FlexibleContexts #-}
|
||||
{-# LANGUAGE NamedFieldPuns #-}
|
||||
{-# LANGUAGE OverloadedLists #-}
|
||||
{-# LANGUAGE TypeApplications #-}
|
||||
{-# LANGUAGE TypeOperators #-}
|
||||
module Language.PHP.Tags (tags) where
|
||||
|
||||
import AST.Element
|
||||
import AST.Token
|
||||
import AST.Traversable1
|
||||
import Control.Effect.Reader
|
||||
import Control.Effect.Writer
|
||||
import Data.Text (Text)
|
||||
import qualified Language.PHP.AST as PHP
|
||||
import Source.Loc
|
||||
import Source.Source as Source
|
||||
import Tags.Tag
|
||||
import qualified Tags.Tagging.Precise as Tags
|
||||
|
||||
class ToTags t where
|
||||
tags
|
||||
:: ( Has (Reader Source) sig m
|
||||
, Has (Writer Tags.Tags) sig m
|
||||
)
|
||||
=> t Loc
|
||||
-> m ()
|
||||
default tags
|
||||
:: ( Has (Reader Source) sig m
|
||||
, Has (Writer Tags.Tags) sig m
|
||||
, Traversable1 ToTags t
|
||||
)
|
||||
=> t Loc
|
||||
-> m ()
|
||||
tags = gtags
|
||||
|
||||
instance ToTags (Token sym n) where tags _ = pure ()
|
||||
|
||||
instance (ToTags l, ToTags r) => ToTags (l :+: r) where
|
||||
tags (L1 l) = tags l
|
||||
tags (R1 r) = tags r
|
||||
|
||||
gtags
|
||||
:: ( Has (Reader Source) sig m
|
||||
, Has (Writer Tags.Tags) sig m
|
||||
, Traversable1 ToTags t
|
||||
)
|
||||
=> t Loc
|
||||
-> m ()
|
||||
gtags = traverse1_ @ToTags (const (pure ())) tags
|
||||
|
||||
yieldTag :: (Has (Reader Source) sig m, Has (Writer Tags.Tags) sig m) => Text -> Kind -> Loc -> Range -> m ()
|
||||
yieldTag name kind loc range = do
|
||||
src <- ask @Source
|
||||
Tags.yield (Tag name kind loc (Tags.firstLine src range) Nothing)
|
||||
|
||||
|
||||
instance ToTags PHP.FunctionDefinition where
|
||||
tags t@PHP.FunctionDefinition
|
||||
{ PHP.ann = loc@Loc { byteRange }
|
||||
, PHP.name = PHP.Name { text }
|
||||
} = yieldTag text Method loc byteRange >> gtags t
|
||||
|
||||
instance ToTags PHP.MethodDeclaration where
|
||||
tags t@PHP.MethodDeclaration
|
||||
{ PHP.ann = loc@Loc { byteRange }
|
||||
, PHP.name = PHP.Name { text }
|
||||
} = yieldTag text Function loc byteRange >> gtags t
|
||||
|
||||
instance ToTags PHP.FunctionCallExpression where
|
||||
tags t@PHP.FunctionCallExpression
|
||||
{ PHP.ann = loc@Loc { byteRange }
|
||||
, PHP.function = func
|
||||
} = match func
|
||||
where
|
||||
yield name = yieldTag name Call loc byteRange >> gtags t
|
||||
match expr = case expr of
|
||||
Prj (PHP.VariableName { extraChildren = PHP.Name { text } })
|
||||
-> yield text *> gtags t
|
||||
Prj (PHP.QualifiedName { extraChildren = [Prj (PHP.Name { text })] })
|
||||
-> yield text *> gtags t
|
||||
_
|
||||
-> gtags t
|
||||
|
||||
instance ToTags PHP.MemberCallExpression where
|
||||
tags t@PHP.MemberCallExpression
|
||||
{ PHP.ann = loc@Loc { byteRange }
|
||||
, PHP.name = item
|
||||
} = case item of
|
||||
Prj (PHP.Name { text }) -> yieldTag text Call loc byteRange >> gtags t
|
||||
_ -> gtags t
|
||||
|
||||
|
||||
instance ToTags PHP.AnonymousFunctionCreationExpression
|
||||
instance ToTags PHP.AnonymousFunctionUseClause
|
||||
instance ToTags PHP.Arguments
|
||||
instance ToTags PHP.ArrayCreationExpression
|
||||
instance ToTags PHP.ArrayElementInitializer
|
||||
instance ToTags PHP.AssignmentExpression
|
||||
instance ToTags PHP.AugmentedAssignmentExpression
|
||||
instance ToTags PHP.BinaryExpression
|
||||
instance ToTags PHP.Boolean
|
||||
instance ToTags PHP.BreakStatement
|
||||
instance ToTags PHP.CaseStatement
|
||||
instance ToTags PHP.CastExpression
|
||||
instance ToTags PHP.CastType
|
||||
instance ToTags PHP.CatchClause
|
||||
instance ToTags PHP.ClassBaseClause
|
||||
instance ToTags PHP.ClassConstantAccessExpression
|
||||
instance ToTags PHP.ClassDeclaration
|
||||
instance ToTags PHP.ClassInterfaceClause
|
||||
instance ToTags PHP.ClassModifier
|
||||
instance ToTags PHP.CloneExpression
|
||||
instance ToTags PHP.ColonBlock
|
||||
instance ToTags PHP.CompoundStatement
|
||||
instance ToTags PHP.ConditionalExpression
|
||||
instance ToTags PHP.ConstDeclaration
|
||||
instance ToTags PHP.ConstElement
|
||||
instance ToTags PHP.ContinueStatement
|
||||
instance ToTags PHP.DeclarationList
|
||||
instance ToTags PHP.DeclareDirective
|
||||
instance ToTags PHP.DeclareStatement
|
||||
instance ToTags PHP.DefaultStatement
|
||||
instance ToTags PHP.DoStatement
|
||||
instance ToTags PHP.DynamicVariableName
|
||||
instance ToTags PHP.EchoStatement
|
||||
instance ToTags PHP.ElseClause
|
||||
instance ToTags PHP.ElseIfClause
|
||||
instance ToTags PHP.EmptyStatement
|
||||
instance ToTags PHP.ExponentiationExpression
|
||||
instance ToTags PHP.Expression
|
||||
instance ToTags PHP.ExpressionStatement
|
||||
instance ToTags PHP.FinallyClause
|
||||
instance ToTags PHP.Float
|
||||
instance ToTags PHP.ForStatement
|
||||
instance ToTags PHP.ForeachStatement
|
||||
instance ToTags PHP.FormalParameters
|
||||
instance ToTags PHP.FunctionStaticDeclaration
|
||||
instance ToTags PHP.GlobalDeclaration
|
||||
instance ToTags PHP.GotoStatement
|
||||
instance ToTags PHP.Heredoc
|
||||
instance ToTags PHP.IfStatement
|
||||
instance ToTags PHP.IncludeExpression
|
||||
instance ToTags PHP.IncludeOnceExpression
|
||||
instance ToTags PHP.InterfaceBaseClause
|
||||
instance ToTags PHP.InterfaceDeclaration
|
||||
instance ToTags PHP.ListLiteral
|
||||
instance ToTags PHP.Literal
|
||||
instance ToTags PHP.MemberAccessExpression
|
||||
instance ToTags PHP.Name
|
||||
instance ToTags PHP.NamedLabelStatement
|
||||
instance ToTags PHP.NamespaceAliasingClause
|
||||
instance ToTags PHP.NamespaceDefinition
|
||||
instance ToTags PHP.NamespaceFunctionOrConst
|
||||
instance ToTags PHP.NamespaceName
|
||||
instance ToTags PHP.NamespaceNameAsPrefix
|
||||
instance ToTags PHP.NamespaceUseClause
|
||||
instance ToTags PHP.NamespaceUseDeclaration
|
||||
instance ToTags PHP.NamespaceUseGroup
|
||||
instance ToTags PHP.NamespaceUseGroupClause
|
||||
instance ToTags PHP.NewVariable
|
||||
instance ToTags PHP.Null
|
||||
instance ToTags PHP.ObjectCreationExpression
|
||||
instance ToTags PHP.OptionalType
|
||||
instance ToTags PHP.Pair
|
||||
instance ToTags PHP.ParenthesizedExpression
|
||||
instance ToTags PHP.PhpTag
|
||||
instance ToTags PHP.PrimaryExpression
|
||||
instance ToTags PHP.PrimitiveType
|
||||
instance ToTags PHP.PrintIntrinsic
|
||||
instance ToTags PHP.Program
|
||||
instance ToTags PHP.PropertyDeclaration
|
||||
instance ToTags PHP.PropertyElement
|
||||
instance ToTags PHP.PropertyInitializer
|
||||
instance ToTags PHP.QualifiedName
|
||||
instance ToTags PHP.RelativeScope
|
||||
instance ToTags PHP.RequireExpression
|
||||
instance ToTags PHP.RequireOnceExpression
|
||||
instance ToTags PHP.ReturnStatement
|
||||
instance ToTags PHP.ScopedCallExpression
|
||||
instance ToTags PHP.ScopedPropertyAccessExpression
|
||||
instance ToTags PHP.SequenceExpression
|
||||
instance ToTags PHP.ShellCommandExpression
|
||||
instance ToTags PHP.SimpleParameter
|
||||
instance ToTags PHP.Statement
|
||||
instance ToTags PHP.StaticModifier
|
||||
instance ToTags PHP.StaticVariableDeclaration
|
||||
instance ToTags PHP.String
|
||||
instance ToTags PHP.SubscriptExpression
|
||||
instance ToTags PHP.SwitchBlock
|
||||
instance ToTags PHP.SwitchStatement
|
||||
instance ToTags PHP.Text
|
||||
instance ToTags PHP.ThrowStatement
|
||||
instance ToTags PHP.TraitDeclaration
|
||||
instance ToTags PHP.TryStatement
|
||||
instance ToTags PHP.Type
|
||||
instance ToTags PHP.TypeName
|
||||
instance ToTags PHP.UnaryOpExpression
|
||||
instance ToTags PHP.UnsetStatement
|
||||
instance ToTags PHP.UpdateExpression
|
||||
instance ToTags PHP.UseAsClause
|
||||
instance ToTags PHP.UseDeclaration
|
||||
instance ToTags PHP.UseInsteadOfClause
|
||||
instance ToTags PHP.UseList
|
||||
instance ToTags PHP.VarModifier
|
||||
instance ToTags PHP.VariableName
|
||||
instance ToTags PHP.VariadicParameter
|
||||
instance ToTags PHP.VariadicUnpacking
|
||||
instance ToTags PHP.VisibilityModifier
|
||||
instance ToTags PHP.WhileStatement
|
||||
instance ToTags PHP.YieldExpression
|
||||
instance ToTags PHP.Integer
|
1
semantic-php/vendor/tree-sitter-php
vendored
1
semantic-php/vendor/tree-sitter-php
vendored
@ -1 +0,0 @@
|
||||
Subproject commit 6bb7e04216eae8314e0b893029590247418b5998
|
@ -8,8 +8,8 @@ import qualified Data.ByteString as B
|
||||
import Gauge
|
||||
import System.Exit (die)
|
||||
import System.Environment (getArgs)
|
||||
import Language.Python.Grammar
|
||||
import qualified Language.Python.AST as Py
|
||||
import Language.Python.Grammar
|
||||
import AST.Unmarshal
|
||||
|
||||
main :: IO ()
|
||||
|
@ -27,14 +27,14 @@ common haskell
|
||||
, semantic-analysis ^>= 0
|
||||
, semantic-ast
|
||||
, semantic-core ^>= 0.0
|
||||
, semantic-source ^>= 0.0.2
|
||||
, semantic-source ^>= 0.1.0
|
||||
, semantic-tags ^>= 0.0
|
||||
, semantic-scope-graph ^>= 0.0
|
||||
, semilattices ^>= 0
|
||||
, template-haskell ^>= 2.15
|
||||
, text ^>= 1.2.3
|
||||
, tree-sitter ^>= 0.9
|
||||
, tree-sitter-python ^>= 0.9.0.1
|
||||
, tree-sitter-python ^>= 0.9.0.2
|
||||
, containers
|
||||
ghc-options:
|
||||
-Weverything
|
||||
@ -116,18 +116,17 @@ test-suite test
|
||||
import: haskell
|
||||
type: exitcode-stdio-1.0
|
||||
hs-source-dirs: test
|
||||
main-is: Test.hs
|
||||
main-is: PreciseTest.hs
|
||||
build-depends: base
|
||||
, tree-sitter
|
||||
, tree-sitter-python
|
||||
, semantic-ast
|
||||
, bytestring ^>= 0.10.8.2
|
||||
, hedgehog >= 0.6 && <2
|
||||
, pathtype ^>= 0.8.1
|
||||
, text
|
||||
, semantic-ast
|
||||
, semantic-python
|
||||
, tasty
|
||||
, tasty-hedgehog
|
||||
, tasty-hunit
|
||||
, text
|
||||
|
||||
executable benchmark
|
||||
import: haskell
|
||||
@ -138,5 +137,4 @@ executable benchmark
|
||||
base
|
||||
, gauge ^>= 0.2.5
|
||||
, bytestring
|
||||
, tree-sitter
|
||||
, tree-sitter-python
|
||||
, semantic-python
|
||||
|
@ -12,11 +12,12 @@
|
||||
|
||||
module Language.Python.AST
|
||||
( module Language.Python.AST
|
||||
, Python.getTestCorpusDir
|
||||
) where
|
||||
|
||||
import Prelude hiding (False, Float, Integer, String, True)
|
||||
import AST.GenerateSyntax
|
||||
import Language.Haskell.TH.Syntax (runIO)
|
||||
import qualified TreeSitter.Python as Python (getNodeTypesPath, tree_sitter_python)
|
||||
import qualified TreeSitter.Python as Python (getNodeTypesPath, getTestCorpusDir, tree_sitter_python)
|
||||
|
||||
runIO Python.getNodeTypesPath >>= astDeclarationsForLanguage Python.tree_sitter_python
|
||||
|
@ -33,7 +33,6 @@ import Data.List.NonEmpty (NonEmpty (..))
|
||||
import Data.Maybe
|
||||
import Data.Monoid
|
||||
import qualified Data.ScopeGraph as ScopeGraph
|
||||
import Data.Semilattice.Lower
|
||||
import Data.Traversable
|
||||
import GHC.Records
|
||||
import GHC.TypeLits
|
||||
@ -42,7 +41,7 @@ import Language.Python.Patterns
|
||||
import Scope.Graph.Convert (Result (..), complete, todo)
|
||||
import Scope.Types
|
||||
import Source.Loc (Loc)
|
||||
import Source.Span (Span, span_)
|
||||
import Source.Span (Span, Pos (..), span_, point)
|
||||
|
||||
-- This typeclass is internal-only, though it shares the same interface
|
||||
-- as the one defined in semantic-scope-graph. The somewhat-unconventional
|
||||
@ -197,7 +196,7 @@ instance ToScopeGraph Py.FunctionDefinition where
|
||||
{ Props.kind = ScopeGraph.Parameter
|
||||
, Props.relation = ScopeGraph.Default
|
||||
, Props.associatedScope = Nothing
|
||||
, Props.span = lowerBound
|
||||
, Props.span = point (Pos 0 0)
|
||||
}
|
||||
let param (Py.Parameter (Prj (Py.Identifier pann pname))) = Just (pann, Name.name pname)
|
||||
param _ = Nothing
|
||||
|
@ -14,6 +14,7 @@ import AST.Token
|
||||
import AST.Traversable1
|
||||
import Control.Effect.Reader
|
||||
import Control.Effect.Writer
|
||||
import Data.Foldable
|
||||
import Data.List.NonEmpty (NonEmpty (..))
|
||||
import Data.Maybe (listToMaybe)
|
||||
import Data.Text as Text
|
||||
@ -54,8 +55,15 @@ keywordFunctionCall
|
||||
=> t Loc -> Loc -> Range -> Text -> m ()
|
||||
keywordFunctionCall t loc range name = yieldTag name Function loc range Nothing >> gtags t
|
||||
|
||||
instance ToTags Py.String where
|
||||
tags Py.String { extraChildren } = for_ extraChildren $ \ x -> case x of
|
||||
Prj t@Py.Interpolation { } -> tags t
|
||||
_ -> pure ()
|
||||
|
||||
instance ToTags Py.Interpolation where
|
||||
tags Py.Interpolation { } = pure ()
|
||||
tags Py.Interpolation { extraChildren } = for_ extraChildren $ \ x -> case x of
|
||||
Prj (Py.Expression expr) -> tags expr
|
||||
_ -> pure ()
|
||||
|
||||
instance ToTags Py.AssertStatement where
|
||||
tags t@Py.AssertStatement { ann = loc@Loc { byteRange } } = keywordFunctionCall t loc byteRange "assert"
|
||||
@ -211,7 +219,6 @@ instance ToTags Py.Set
|
||||
instance ToTags Py.SetComprehension
|
||||
instance ToTags Py.SimpleStatement
|
||||
instance ToTags Py.Slice
|
||||
instance ToTags Py.String
|
||||
instance ToTags Py.Subscript
|
||||
instance ToTags Py.True
|
||||
instance ToTags Py.TryStatement
|
||||
|
@ -58,7 +58,7 @@ The graph should be
|
||||
runScopeGraph :: ToScopeGraph t => Path.AbsRelFile -> Source.Source -> t Loc -> (ScopeGraph.ScopeGraph Name, Result)
|
||||
runScopeGraph p _src item = run . runSketch info $ scopeGraph item
|
||||
where
|
||||
info = ModuleInfo (Path.toString p) "Python" mempty
|
||||
info = ModuleInfo p "Python" mempty
|
||||
|
||||
sampleGraphThing :: ScopeGraphEff sig m => m Result
|
||||
sampleGraphThing = do
|
||||
@ -66,24 +66,24 @@ sampleGraphThing = do
|
||||
declare "goodbye" (Props.Declaration ScopeGraph.Assignment ScopeGraph.Default Nothing (Span (Pos 3 0) (Pos 3 12)))
|
||||
pure Complete
|
||||
|
||||
graphFile :: FilePath -> IO (ScopeGraph.ScopeGraph Name, Result)
|
||||
graphFile :: Path.AbsRelFile -> IO (ScopeGraph.ScopeGraph Name, Result)
|
||||
graphFile fp = do
|
||||
file <- ByteString.readFile fp
|
||||
file <- ByteString.readFile $ Path.toString fp
|
||||
tree <- TS.parseByteString @Py.Term @Loc TSP.tree_sitter_python file
|
||||
pyModule <- either die pure tree
|
||||
pure $ runScopeGraph (Path.absRel fp) (Source.fromUTF8 file) pyModule
|
||||
pure $ runScopeGraph fp (Source.fromUTF8 file) pyModule
|
||||
|
||||
|
||||
assertSimpleAssignment :: HUnit.Assertion
|
||||
assertSimpleAssignment = do
|
||||
let path = "semantic-python/test/fixtures/1-04-toplevel-assignment.py"
|
||||
let path = Path.absRel "semantic-python/test/fixtures/1-04-toplevel-assignment.py"
|
||||
(result, Complete) <- graphFile path
|
||||
(expecto, Complete) <- runM $ runSketch (ModuleInfo path "Python" mempty) sampleGraphThing
|
||||
HUnit.assertEqual "Should work for simple case" expecto result
|
||||
|
||||
assertSimpleReference :: HUnit.Assertion
|
||||
assertSimpleReference = do
|
||||
let path = "semantic-python/test/fixtures/5-01-simple-reference.py"
|
||||
let path = Path.absRel "semantic-python/test/fixtures/5-01-simple-reference.py"
|
||||
(result, Complete) <- graphFile path
|
||||
(expecto, Complete) <- runM $ runSketch (ModuleInfo path "Python" mempty) expectedReference
|
||||
|
||||
@ -115,7 +115,7 @@ expectedImportHole = do
|
||||
|
||||
assertLexicalScope :: HUnit.Assertion
|
||||
assertLexicalScope = do
|
||||
let path = "semantic-python/test/fixtures/5-02-simple-function.py"
|
||||
let path = Path.absRel "semantic-python/test/fixtures/5-02-simple-function.py"
|
||||
let info = ModuleInfo path "Python" mempty
|
||||
(graph, _) <- graphFile path
|
||||
case run (runSketch info expectedLexicalScope) of
|
||||
@ -132,7 +132,7 @@ expectedLexicalScope = do
|
||||
|
||||
assertFunctionArg :: HUnit.Assertion
|
||||
assertFunctionArg = do
|
||||
let path = "semantic-python/test/fixtures/5-03-function-argument.py"
|
||||
let path = Path.absRel "semantic-python/test/fixtures/5-03-function-argument.py"
|
||||
(graph, _) <- graphFile path
|
||||
let info = ModuleInfo path "Python" mempty
|
||||
case run (runSketch info expectedFunctionArg) of
|
||||
@ -154,7 +154,7 @@ expectedFunctionArg = do
|
||||
|
||||
assertImportHole :: HUnit.Assertion
|
||||
assertImportHole = do
|
||||
let path = "semantic-python/test/fixtures/cheese/6-01-imports.py"
|
||||
let path = Path.absRel "semantic-python/test/fixtures/cheese/6-01-imports.py"
|
||||
(graph, _) <- graphFile path
|
||||
let info = ModuleInfo path "Python" mempty
|
||||
case run (runSketch info expectedImportHole) of
|
||||
@ -163,7 +163,7 @@ assertImportHole = do
|
||||
|
||||
assertQualifiedImport :: HUnit.Assertion
|
||||
assertQualifiedImport = do
|
||||
let path = "semantic-python/test/fixtures/cheese/6-01-qualified-imports.py"
|
||||
let path = Path.absRel "semantic-python/test/fixtures/cheese/6-01-qualified-imports.py"
|
||||
(graph, _) <- graphFile path
|
||||
let info = ModuleInfo path "Python" mempty
|
||||
case run (runSketch info expectedQualifiedImport) of
|
||||
|
@ -10,7 +10,8 @@ import AST.Unmarshal
|
||||
|
||||
main :: IO ()
|
||||
main
|
||||
= readCorpusFiles (Path.relDir "tree-sitter-python/vendor/tree-sitter-python/test/corpus")
|
||||
= Path.absDir <$> Py.getTestCorpusDir
|
||||
>>= readCorpusFiles'
|
||||
>>= traverse (testCorpus parse)
|
||||
>>= defaultMain . tests
|
||||
where parse = parseByteString @Py.Module @() tree_sitter_python
|
||||
|
1
semantic-python/vendor/tree-sitter-python
vendored
1
semantic-python/vendor/tree-sitter-python
vendored
@ -1 +0,0 @@
|
||||
Subproject commit 899ac8d5d6c883b2f05362c9953e14e78aac474c
|
@ -26,12 +26,12 @@ common haskell
|
||||
, parsers ^>= 0.12.10
|
||||
, semantic-ast
|
||||
, semantic-core ^>= 0.0
|
||||
, semantic-source ^>= 0.0.2
|
||||
, semantic-source ^>= 0.1.0
|
||||
, semantic-tags ^>= 0.0
|
||||
, template-haskell ^>= 2.15
|
||||
, text ^>= 1.2.3
|
||||
, tree-sitter ^>= 0.9
|
||||
, tree-sitter-ruby ^>= 0.5.0.1
|
||||
, tree-sitter-ruby ^>= 0.5.0.2
|
||||
|
||||
ghc-options:
|
||||
-Weverything
|
||||
@ -63,16 +63,15 @@ test-suite test
|
||||
hs-source-dirs: test
|
||||
main-is: Test.hs
|
||||
build-depends: base
|
||||
, tree-sitter
|
||||
, tree-sitter-ruby
|
||||
, semantic-ast
|
||||
, bytestring ^>= 0.10.8.2
|
||||
, hedgehog >= 0.6 && <2
|
||||
, pathtype ^>= 0.8.1
|
||||
, text
|
||||
, semantic-ast
|
||||
, semantic-ruby
|
||||
, tasty
|
||||
, tasty-hedgehog
|
||||
, tasty-hunit
|
||||
, text
|
||||
|
||||
executable benchmarks
|
||||
import: haskell
|
||||
@ -87,8 +86,7 @@ executable benchmarks
|
||||
, Glob
|
||||
, lens >= 4.17 && < 4.19
|
||||
, pathtype ^>= 0.8.1
|
||||
, tree-sitter
|
||||
, tree-sitter-ruby
|
||||
, semantic-ruby
|
||||
|
||||
executable tree-sitter-ruby
|
||||
import: haskell
|
||||
@ -97,5 +95,4 @@ executable tree-sitter-ruby
|
||||
main-is: Main.hs
|
||||
build-depends: base
|
||||
, bytestring
|
||||
, tree-sitter
|
||||
, tree-sitter-ruby
|
||||
, semantic-ruby
|
||||
|
@ -12,11 +12,12 @@
|
||||
|
||||
module Language.Ruby.AST
|
||||
( module Language.Ruby.AST
|
||||
, Ruby.getTestCorpusDir
|
||||
) where
|
||||
|
||||
import Prelude hiding (False, Float, Integer, Rational, String, True)
|
||||
import AST.GenerateSyntax
|
||||
import Language.Haskell.TH.Syntax (runIO)
|
||||
import qualified TreeSitter.Ruby as Ruby (getNodeTypesPath, tree_sitter_ruby)
|
||||
import qualified TreeSitter.Ruby as Ruby (getNodeTypesPath, getTestCorpusDir, tree_sitter_ruby)
|
||||
|
||||
runIO Ruby.getNodeTypesPath >>= astDeclarationsForLanguage Ruby.tree_sitter_ruby
|
||||
|
@ -7,10 +7,15 @@ import TreeSitter.Ruby
|
||||
import qualified TreeSitter.Ruby.AST as Rb
|
||||
import AST.TestHelpers
|
||||
import AST.Unmarshal
|
||||
import qualified Language.Ruby.AST as Rb
|
||||
import Language.Ruby.Grammar
|
||||
import qualified System.Path as Path
|
||||
import Test.Tasty
|
||||
|
||||
main :: IO ()
|
||||
main
|
||||
= readCorpusFiles (Path.relDir "tree-sitter-ruby/vendor/tree-sitter-ruby/test/corpus")
|
||||
= Path.absDir <$> Rb.getTestCorpusDir
|
||||
>>= readCorpusFiles'
|
||||
>>= traverse (testCorpus parse)
|
||||
>>= defaultMain . tests
|
||||
where parse = parseByteString @Rb.Program @() tree_sitter_ruby
|
||||
|
1
semantic-ruby/vendor/tree-sitter-ruby
vendored
1
semantic-ruby/vendor/tree-sitter-ruby
vendored
@ -1 +0,0 @@
|
||||
Subproject commit eb2b6225bfb80010f2e4cbd27db8c6f3775230b5
|
@ -47,7 +47,7 @@ library
|
||||
, lens
|
||||
, pathtype
|
||||
, semantic-analysis
|
||||
, semantic-source ^>= 0.0.2
|
||||
, semantic-source ^>= 0.1.0
|
||||
, semilattices
|
||||
, text ^>= 1.2.3.1
|
||||
hs-source-dirs: src
|
||||
|
@ -14,6 +14,7 @@ import Data.Maybe
|
||||
import Data.Semilattice.Lower
|
||||
import Data.Text (Text)
|
||||
import GHC.Stack
|
||||
import qualified System.Path as Path
|
||||
|
||||
data Module body = Module { moduleInfo :: ModuleInfo, moduleBody :: body }
|
||||
deriving (Eq, Foldable, Functor, Ord, Traversable)
|
||||
@ -22,20 +23,20 @@ instance Show body => Show (Module body) where
|
||||
showsPrec d Module{..} = showsBinaryWith showsPrec showsPrec "Module" d (modulePath moduleInfo) moduleBody
|
||||
|
||||
|
||||
type ModulePath = FilePath
|
||||
type ModulePath = Path.AbsRelFile
|
||||
|
||||
data ModuleInfo = ModuleInfo { modulePath :: ModulePath, moduleLanguage :: Text, moduleOid :: Text }
|
||||
deriving (Eq, Ord)
|
||||
|
||||
instance Lower ModuleInfo where
|
||||
lowerBound = ModuleInfo mempty "Unknown" mempty
|
||||
lowerBound = ModuleInfo (Path.toAbsRel Path.emptyFile) "Unknown" mempty
|
||||
|
||||
instance Show ModuleInfo where
|
||||
showsPrec d = showsUnaryWith showsPrec "ModuleInfo" d . modulePath
|
||||
|
||||
moduleInfoFromSrcLoc :: SrcLoc -> ModuleInfo
|
||||
moduleInfoFromSrcLoc loc = ModuleInfo (srcLocModule loc) "Unknown" mempty
|
||||
moduleInfoFromSrcLoc loc = ModuleInfo (Path.absRel $ srcLocModule loc) "Unknown" mempty
|
||||
|
||||
-- | Produce 'ModuleInfo' from the top location on the Haskell call stack (i.e. the file where the call to 'moduleInfoFromCallStack' was made).
|
||||
moduleInfoFromCallStack :: HasCallStack => ModuleInfo
|
||||
moduleInfoFromCallStack = maybe (ModuleInfo "?" "Unknown" mempty) (moduleInfoFromSrcLoc . snd) (listToMaybe (getCallStack callStack))
|
||||
moduleInfoFromCallStack = maybe (ModuleInfo (Path.absRel "?") "Unknown" mempty) (moduleInfoFromSrcLoc . snd) (listToMaybe (getCallStack callStack))
|
||||
|
@ -37,7 +37,7 @@ instance HasSpan (Info scopeAddress) where
|
||||
{-# INLINE span_ #-}
|
||||
|
||||
instance Lower (Info scopeAddress) where
|
||||
lowerBound = Info lowerBound lowerBound lowerBound Public lowerBound lowerBound Nothing
|
||||
lowerBound = Info lowerBound lowerBound lowerBound Public (point (Pos 0 0)) lowerBound Nothing
|
||||
|
||||
instance AbstractHole (Info address) where
|
||||
hole = lowerBound
|
||||
|
@ -1,3 +1,9 @@
|
||||
# 0.1.0.0
|
||||
|
||||
- Adds `CodeQL` language constructor.
|
||||
- Bumps `lingo-haskell` to 0.3.2.
|
||||
- Removes Span and Pos lower bound instances. This makes callers responsible for defining whether Span / Pos are 0 or 1 indexed.
|
||||
|
||||
# 0.0.2.0
|
||||
|
||||
- Adds `Source.Language`.
|
||||
|
@ -55,7 +55,7 @@ library
|
||||
, containers ^>= 0.6.2
|
||||
, generic-monoid ^>= 0.1.0.0
|
||||
, hashable >= 1.2.7 && < 1.4
|
||||
, lingo ^>= 0.3
|
||||
, lingo ^>= 0.3.2.0
|
||||
, pathtype ^>= 0.8.1
|
||||
, semilattices ^>= 0.0.0.3
|
||||
, text ^>= 1.2.3.1
|
||||
|
@ -33,11 +33,12 @@ data Language
|
||||
| JSON
|
||||
| JSX
|
||||
| Markdown
|
||||
| PHP
|
||||
| Python
|
||||
| Ruby
|
||||
| TypeScript
|
||||
| PHP
|
||||
| TSX
|
||||
| CodeQL
|
||||
deriving (Eq, Generic, Ord, Read, Show, Bounded, Hashable, ToJSON, Enum)
|
||||
|
||||
-- | Reifies a proxied type-level 'Language' to a value.
|
||||
@ -47,6 +48,9 @@ class SLanguage (lang :: Language) where
|
||||
instance SLanguage 'Unknown where
|
||||
reflect _ = Unknown
|
||||
|
||||
instance SLanguage 'CodeQL where
|
||||
reflect _ = CodeQL
|
||||
|
||||
instance SLanguage 'Go where
|
||||
reflect _ = Go
|
||||
|
||||
@ -68,6 +72,9 @@ instance SLanguage 'JSX where
|
||||
instance SLanguage 'Markdown where
|
||||
reflect _ = Markdown
|
||||
|
||||
instance SLanguage 'PHP where
|
||||
reflect _ = PHP
|
||||
|
||||
instance SLanguage 'Python where
|
||||
reflect _ = Python
|
||||
|
||||
@ -77,9 +84,6 @@ instance SLanguage 'Ruby where
|
||||
instance SLanguage 'TypeScript where
|
||||
reflect _ = TypeScript
|
||||
|
||||
instance SLanguage 'PHP where
|
||||
reflect _ = PHP
|
||||
|
||||
instance FromJSON Language where
|
||||
parseJSON = withText "Language" $ \l ->
|
||||
pure $ textToLanguage l
|
||||
@ -106,6 +110,7 @@ forPath path =
|
||||
languageToText :: Language -> T.Text
|
||||
languageToText = \case
|
||||
Unknown -> "Unknown"
|
||||
CodeQL -> "CodeQL"
|
||||
Go -> "Go"
|
||||
Haskell -> "Haskell"
|
||||
Java -> "Java"
|
||||
@ -113,14 +118,15 @@ languageToText = \case
|
||||
JSON -> "JSON"
|
||||
JSX -> "JSX"
|
||||
Markdown -> "Markdown"
|
||||
PHP -> "PHP"
|
||||
Python -> "Python"
|
||||
Ruby -> "Ruby"
|
||||
TypeScript -> "TypeScript"
|
||||
TSX -> "TSX"
|
||||
PHP -> "PHP"
|
||||
|
||||
textToLanguage :: T.Text -> Language
|
||||
textToLanguage = \case
|
||||
"CodeQL" -> CodeQL
|
||||
"Go" -> Go
|
||||
"Haskell" -> Haskell
|
||||
"Java" -> Java
|
||||
@ -128,9 +134,9 @@ textToLanguage = \case
|
||||
"JSON" -> JSON
|
||||
"JSX" -> JSX
|
||||
"Markdown" -> Markdown
|
||||
"PHP" -> PHP
|
||||
"Python" -> Python
|
||||
"Ruby" -> Ruby
|
||||
"TypeScript" -> TypeScript
|
||||
"TSX" -> TSX
|
||||
"PHP" -> PHP
|
||||
_ -> Unknown
|
||||
|
@ -25,7 +25,7 @@ library
|
||||
build-depends:
|
||||
base >= 4.13 && < 5
|
||||
, fused-effects ^>= 1.0
|
||||
, semantic-source ^>= 0.0.2
|
||||
, semantic-source ^>= 0.1.0
|
||||
, text ^>= 1.2.3.1
|
||||
hs-source-dirs: src
|
||||
default-language: Haskell2010
|
||||
|
@ -26,12 +26,12 @@ common haskell
|
||||
, parsers ^>= 0.12.10
|
||||
, semantic-ast
|
||||
, semantic-core ^>= 0.0
|
||||
, semantic-source ^>= 0.0.2
|
||||
, semantic-source ^>= 0.1.0
|
||||
, semantic-tags ^>= 0.0
|
||||
, template-haskell ^>= 2.15
|
||||
, text ^>= 1.2.3
|
||||
, tree-sitter ^>= 0.9
|
||||
, tree-sitter-tsx ^>= 0.5.0.0
|
||||
, tree-sitter-tsx ^>= 0.5.0.1
|
||||
|
||||
ghc-options:
|
||||
-Weverything
|
||||
@ -63,13 +63,12 @@ test-suite test
|
||||
hs-source-dirs: test
|
||||
main-is: Test.hs
|
||||
build-depends: base
|
||||
, tree-sitter
|
||||
, tree-sitter-tsx
|
||||
, semantic-ast
|
||||
, bytestring ^>= 0.10.8.2
|
||||
, hedgehog >= 0.6 && <2
|
||||
, pathtype ^>= 0.8.1
|
||||
, text
|
||||
, semantic-ast
|
||||
, semantic-tsx
|
||||
, tasty
|
||||
, tasty-hedgehog
|
||||
, tasty-hunit
|
||||
, text
|
||||
|
@ -12,11 +12,12 @@
|
||||
|
||||
module Language.TSX.AST
|
||||
( module Language.TSX.AST
|
||||
, TSX.getTestCorpusDir
|
||||
) where
|
||||
|
||||
import Prelude hiding (False, Float, Integer, String, True)
|
||||
import AST.GenerateSyntax
|
||||
import Language.Haskell.TH.Syntax (runIO)
|
||||
import qualified TreeSitter.TSX as TSX (getNodeTypesPath, tree_sitter_tsx)
|
||||
import qualified TreeSitter.TSX as TSX (getNodeTypesPath, getTestCorpusDir, tree_sitter_tsx)
|
||||
|
||||
runIO TSX.getNodeTypesPath >>= astDeclarationsForLanguage TSX.tree_sitter_tsx
|
||||
|
@ -68,6 +68,18 @@ instance ToTags Tsx.MethodDefinition where
|
||||
where
|
||||
yield name = yieldTag name Method loc byteRange >> gtags t
|
||||
|
||||
instance ToTags Tsx.Pair where
|
||||
tags t@Tsx.Pair
|
||||
{ ann = loc@Loc { byteRange }
|
||||
, key
|
||||
, value = Tsx.Expression expr
|
||||
} = case (key, expr) of
|
||||
(Prj Tsx.PropertyIdentifier { text }, Prj Tsx.Function{}) -> yield text
|
||||
(Prj Tsx.PropertyIdentifier { text }, Prj Tsx.ArrowFunction{}) -> yield text
|
||||
_ -> gtags t
|
||||
where
|
||||
yield text = yieldTag text Function loc byteRange >> gtags t
|
||||
|
||||
instance ToTags Tsx.ClassDeclaration where
|
||||
tags t@Tsx.ClassDeclaration
|
||||
{ ann = loc@Loc { byteRange }
|
||||
@ -112,6 +124,34 @@ instance ToTags Tsx.Module where
|
||||
_ -> gtags t
|
||||
yield text = yieldTag text Module loc byteRange >> gtags t
|
||||
|
||||
instance ToTags Tsx.VariableDeclarator where
|
||||
tags t@Tsx.VariableDeclarator
|
||||
{ ann = loc@Loc { byteRange }
|
||||
, name
|
||||
, value = Just (Tsx.Expression expr)
|
||||
} = case (expr, name) of
|
||||
(Prj Tsx.Function{}, Prj Tsx.Identifier { text }) -> yield text
|
||||
(Prj Tsx.ArrowFunction{}, Prj Tsx.Identifier { text }) -> yield text
|
||||
_ -> gtags t
|
||||
where
|
||||
yield text = yieldTag text Function loc byteRange >> gtags t
|
||||
tags t = gtags t
|
||||
|
||||
instance ToTags Tsx.AssignmentExpression where
|
||||
tags t@Tsx.AssignmentExpression
|
||||
{ ann = loc@Loc { byteRange }
|
||||
, left
|
||||
, right = (Tsx.Expression expr)
|
||||
} = case (left, expr) of
|
||||
(Prj Tsx.Identifier { text }, Prj Tsx.Function{}) -> yield text
|
||||
(Prj Tsx.Identifier { text }, Prj Tsx.ArrowFunction{}) -> yield text
|
||||
(Prj Tsx.MemberExpression { property = Tsx.PropertyIdentifier { text } }, Prj Tsx.Function{}) -> yield text
|
||||
(Prj Tsx.MemberExpression { property = Tsx.PropertyIdentifier { text } }, Prj Tsx.ArrowFunction{}) -> yield text
|
||||
_ -> gtags t
|
||||
where
|
||||
yield text = yieldTag text Function loc byteRange >> gtags t
|
||||
|
||||
|
||||
instance (ToTags l, ToTags r) => ToTags (l :+: r) where
|
||||
tags (L1 l) = tags l
|
||||
tags (R1 r) = tags r
|
||||
@ -151,7 +191,7 @@ instance ToTags Tsx.ArrayPattern
|
||||
instance ToTags Tsx.ArrayType
|
||||
instance ToTags Tsx.ArrowFunction
|
||||
instance ToTags Tsx.AsExpression
|
||||
instance ToTags Tsx.AssignmentExpression
|
||||
-- instance ToTags Tsx.AssignmentExpression
|
||||
instance ToTags Tsx.AssignmentPattern
|
||||
instance ToTags Tsx.AugmentedAssignmentExpression
|
||||
instance ToTags Tsx.AwaitExpression
|
||||
@ -246,7 +286,7 @@ instance ToTags Tsx.Object
|
||||
instance ToTags Tsx.ObjectPattern
|
||||
instance ToTags Tsx.ObjectType
|
||||
instance ToTags Tsx.OptionalParameter
|
||||
instance ToTags Tsx.Pair
|
||||
-- instance ToTags Tsx.Pair
|
||||
instance ToTags Tsx.ParenthesizedExpression
|
||||
instance ToTags Tsx.ParenthesizedType
|
||||
instance ToTags Tsx.PredefinedType
|
||||
@ -295,7 +335,7 @@ instance ToTags Tsx.Undefined
|
||||
instance ToTags Tsx.UnionType
|
||||
instance ToTags Tsx.UpdateExpression
|
||||
instance ToTags Tsx.VariableDeclaration
|
||||
instance ToTags Tsx.VariableDeclarator
|
||||
-- instance ToTags Tsx.VariableDeclarator
|
||||
instance ToTags Tsx.WhileStatement
|
||||
instance ToTags Tsx.WithStatement
|
||||
instance ToTags Tsx.YieldExpression
|
||||
|
@ -7,13 +7,18 @@ import TreeSitter.TSX
|
||||
import qualified TreeSitter.TSX.AST as Ts
|
||||
import AST.TestHelpers
|
||||
import AST.Unmarshal
|
||||
import qualified Language.TSX.AST as Tsx
|
||||
import Language.TSX.Grammar
|
||||
import qualified System.Path as Path
|
||||
import Test.Tasty
|
||||
|
||||
main :: IO ()
|
||||
main
|
||||
= readCorpusFiles (Path.relDir "tree-sitter-tsx/vendor/tree-sitter-typescript/tsx/corpus")
|
||||
= Path.absDir <$> Tsx.getTestCorpusDir
|
||||
>>= readCorpusFiles'
|
||||
>>= traverse (testCorpus parse)
|
||||
>>= defaultMain . tests
|
||||
where parse = parseByteString @Ts.Program @() tree_sitter_tsx
|
||||
where parse = parseByteString @Tsx.Program @() tree_sitter_tsx
|
||||
|
||||
tests :: [TestTree] -> TestTree
|
||||
tests = testGroup "tree-sitter-tsx corpus tests"
|
||||
|
1
semantic-tsx/vendor/tree-sitter-typescript
vendored
1
semantic-tsx/vendor/tree-sitter-typescript
vendored
@ -1 +0,0 @@
|
||||
Subproject commit aa950f58ea8aa112bc72f3481b98fc2d3c07b3e0
|
@ -26,12 +26,12 @@ common haskell
|
||||
, parsers ^>= 0.12.10
|
||||
, semantic-ast
|
||||
, semantic-core ^>= 0.0
|
||||
, semantic-source ^>= 0.0.2
|
||||
, semantic-source ^>= 0.1.0
|
||||
, semantic-tags ^>= 0.0
|
||||
, template-haskell ^>= 2.15
|
||||
, text ^>= 1.2.3
|
||||
, tree-sitter ^>= 0.9.0.0
|
||||
, tree-sitter-typescript ^>= 0.5.0.0
|
||||
, tree-sitter-typescript ^>= 0.5.0.1
|
||||
|
||||
ghc-options:
|
||||
-Weverything
|
||||
@ -63,13 +63,12 @@ test-suite test
|
||||
hs-source-dirs: test
|
||||
main-is: Test.hs
|
||||
build-depends: base
|
||||
, tree-sitter
|
||||
, tree-sitter-typescript
|
||||
, semantic-ast
|
||||
, bytestring ^>= 0.10.8.2
|
||||
, hedgehog >= 0.6 && <2
|
||||
, pathtype ^>= 0.8.1
|
||||
, text
|
||||
, semantic-ast
|
||||
, semantic-typescript
|
||||
, tasty
|
||||
, tasty-hedgehog
|
||||
, tasty-hunit
|
||||
, text
|
||||
|
@ -12,11 +12,12 @@
|
||||
|
||||
module Language.TypeScript.AST
|
||||
( module Language.TypeScript.AST
|
||||
, TypeScript.getTestCorpusDir
|
||||
) where
|
||||
|
||||
import Prelude hiding (False, Float, Integer, String, True)
|
||||
import AST.GenerateSyntax
|
||||
import Language.Haskell.TH.Syntax (runIO)
|
||||
import qualified TreeSitter.TypeScript as TypeScript (getNodeTypesPath, tree_sitter_typescript)
|
||||
import qualified TreeSitter.TypeScript as TypeScript (getNodeTypesPath, getTestCorpusDir, tree_sitter_typescript)
|
||||
|
||||
runIO TypeScript.getNodeTypesPath >>= astDeclarationsForLanguage TypeScript.tree_sitter_typescript
|
||||
|
@ -68,6 +68,18 @@ instance ToTags Ts.MethodDefinition where
|
||||
where
|
||||
yield name = yieldTag name Method loc byteRange >> gtags t
|
||||
|
||||
instance ToTags Ts.Pair where
|
||||
tags t@Ts.Pair
|
||||
{ ann = loc@Loc { byteRange }
|
||||
, key
|
||||
, value = Ts.Expression expr
|
||||
} = case (key, expr) of
|
||||
(Prj Ts.PropertyIdentifier { text }, Prj Ts.Function{}) -> yield text
|
||||
(Prj Ts.PropertyIdentifier { text }, Prj Ts.ArrowFunction{}) -> yield text
|
||||
_ -> gtags t
|
||||
where
|
||||
yield text = yieldTag text Function loc byteRange >> gtags t
|
||||
|
||||
instance ToTags Ts.ClassDeclaration where
|
||||
tags t@Ts.ClassDeclaration
|
||||
{ ann = loc@Loc { byteRange }
|
||||
@ -101,10 +113,38 @@ instance ToTags Ts.Module where
|
||||
match expr = case expr of
|
||||
Prj Ts.Identifier { text } -> yield text
|
||||
-- TODO: Handle NestedIdentifiers and Strings
|
||||
-- Prj Tsx.NestedIdentifier { extraChildren } -> match
|
||||
-- Prj Ts.NestedIdentifier { extraChildren } -> match
|
||||
_ -> gtags t
|
||||
yield text = yieldTag text Module loc byteRange >> gtags t
|
||||
|
||||
instance ToTags Ts.VariableDeclarator where
|
||||
tags t@Ts.VariableDeclarator
|
||||
{ ann = loc@Loc { byteRange }
|
||||
, name
|
||||
, value = Just (Ts.Expression expr)
|
||||
} = case (expr, name) of
|
||||
(Prj Ts.Function{}, Prj Ts.Identifier { text }) -> yield text
|
||||
(Prj Ts.ArrowFunction{}, Prj Ts.Identifier { text }) -> yield text
|
||||
_ -> gtags t
|
||||
where
|
||||
yield text = yieldTag text Function loc byteRange >> gtags t
|
||||
tags t = gtags t
|
||||
|
||||
instance ToTags Ts.AssignmentExpression where
|
||||
tags t@Ts.AssignmentExpression
|
||||
{ ann = loc@Loc { byteRange }
|
||||
, left
|
||||
, right = (Ts.Expression expr)
|
||||
} = case (left, expr) of
|
||||
(Prj Ts.Identifier { text }, Prj Ts.Function{}) -> yield text
|
||||
(Prj Ts.Identifier { text }, Prj Ts.ArrowFunction{}) -> yield text
|
||||
(Prj Ts.MemberExpression { property = Ts.PropertyIdentifier { text } }, Prj Ts.Function{}) -> yield text
|
||||
(Prj Ts.MemberExpression { property = Ts.PropertyIdentifier { text } }, Prj Ts.ArrowFunction{}) -> yield text
|
||||
_ -> gtags t
|
||||
where
|
||||
yield text = yieldTag text Function loc byteRange >> gtags t
|
||||
|
||||
|
||||
instance (ToTags l, ToTags r) => ToTags (l :+: r) where
|
||||
tags (L1 l) = tags l
|
||||
tags (R1 r) = tags r
|
||||
@ -144,7 +184,7 @@ instance ToTags Ts.ArrayPattern
|
||||
instance ToTags Ts.ArrayType
|
||||
instance ToTags Ts.ArrowFunction
|
||||
instance ToTags Ts.AsExpression
|
||||
instance ToTags Ts.AssignmentExpression
|
||||
-- instance ToTags Ts.AssignmentExpression
|
||||
instance ToTags Ts.AssignmentPattern
|
||||
instance ToTags Ts.AugmentedAssignmentExpression
|
||||
instance ToTags Ts.AwaitExpression
|
||||
@ -239,7 +279,7 @@ instance ToTags Ts.Object
|
||||
instance ToTags Ts.ObjectPattern
|
||||
instance ToTags Ts.ObjectType
|
||||
instance ToTags Ts.OptionalParameter
|
||||
instance ToTags Ts.Pair
|
||||
-- instance ToTags Ts.Pair
|
||||
instance ToTags Ts.ParenthesizedExpression
|
||||
instance ToTags Ts.ParenthesizedType
|
||||
instance ToTags Ts.PredefinedType
|
||||
@ -289,7 +329,7 @@ instance ToTags Ts.Undefined
|
||||
instance ToTags Ts.UnionType
|
||||
instance ToTags Ts.UpdateExpression
|
||||
instance ToTags Ts.VariableDeclaration
|
||||
instance ToTags Ts.VariableDeclarator
|
||||
-- instance ToTags Ts.VariableDeclarator
|
||||
instance ToTags Ts.WhileStatement
|
||||
instance ToTags Ts.WithStatement
|
||||
instance ToTags Ts.YieldExpression
|
||||
|
@ -7,10 +7,15 @@ import TreeSitter.TypeScript
|
||||
import qualified TreeSitter.TypeScript.AST as Ts
|
||||
import AST.TestHelpers
|
||||
import AST.Unmarshal
|
||||
import qualified Language.TypeScript.AST as Ts
|
||||
import Language.TypeScript.Grammar
|
||||
import qualified System.Path as Path
|
||||
import Test.Tasty
|
||||
|
||||
main :: IO ()
|
||||
main
|
||||
= readCorpusFiles (Path.relDir "tree-sitter-typescript/vendor/tree-sitter-typescript/typescript/corpus")
|
||||
= Path.absDir <$> Ts.getTestCorpusDir
|
||||
>>= readCorpusFiles'
|
||||
>>= traverse (testCorpus parse)
|
||||
>>= defaultMain . tests
|
||||
where parse = parseByteString @Ts.Program @() tree_sitter_typescript
|
||||
|
@ -1 +0,0 @@
|
||||
Subproject commit 40320d8e0953db5e173e6dfbb596d375a085ca65
|
@ -59,7 +59,7 @@ common dependencies
|
||||
, fused-effects-exceptions ^>= 1
|
||||
, fused-effects-resumable ^>= 0.1
|
||||
, hashable >= 1.2.7 && < 1.4
|
||||
, tree-sitter ^>= 0.9.0.0
|
||||
, tree-sitter ^>= 0.9.0.1
|
||||
, mtl ^>= 2.2.2
|
||||
, network ^>= 2.8.0.0
|
||||
, pathtype ^>= 0.8.1
|
||||
@ -69,7 +69,7 @@ common dependencies
|
||||
, safe-exceptions ^>= 0.1.7.0
|
||||
, semantic-analysis ^>= 0
|
||||
, semantic-ast
|
||||
, semantic-source ^>= 0.0.2
|
||||
, semantic-source ^>= 0.1.0
|
||||
, semilattices ^>= 0.0.0.3
|
||||
, streaming ^>= 0.2.2.0
|
||||
, text ^>= 1.2.3.1
|
||||
@ -119,6 +119,7 @@ library
|
||||
, Control.Effect.Parse
|
||||
, Control.Effect.REPL
|
||||
, Control.Effect.Sum.Project
|
||||
, Control.Effect.Timeout
|
||||
-- Datatypes for abstract interpretation
|
||||
, Data.Abstract.Address.Hole
|
||||
, Data.Abstract.Address.Monovariant
|
||||
@ -180,9 +181,6 @@ library
|
||||
, Diffing.Algorithm.SES
|
||||
, Diffing.Interpreter
|
||||
-- Language-specific grammar/syntax types, & assignments
|
||||
, Language.Markdown.Assignment
|
||||
, Language.Markdown.Syntax
|
||||
, Language.Markdown.Term
|
||||
, Language.Go.Assignment
|
||||
, Language.Go.Syntax
|
||||
, Language.Go.Term
|
||||
@ -209,7 +207,6 @@ library
|
||||
, Language.Python.Term
|
||||
, Numeric.Exts
|
||||
-- Parser glue
|
||||
, Parsing.CMark
|
||||
, Parsing.Parser
|
||||
, Parsing.TreeSitter
|
||||
-- Rendering formats
|
||||
@ -226,6 +223,7 @@ library
|
||||
, Semantic.Api.Bridge
|
||||
, Semantic.Api.Diffs
|
||||
, Semantic.Api.LegacyTypes
|
||||
, Semantic.Api.StackGraph
|
||||
, Semantic.Api.Symbols
|
||||
, Semantic.Api.Terms
|
||||
, Semantic.Api.TOCSummaries
|
||||
@ -243,7 +241,6 @@ library
|
||||
, Semantic.Telemetry.Error
|
||||
, Semantic.Telemetry.Log
|
||||
, Semantic.Telemetry.Stat
|
||||
, Semantic.Timeout
|
||||
, Semantic.Util
|
||||
, Semantic.Util.Pretty
|
||||
, Semantic.Version
|
||||
@ -260,7 +257,6 @@ library
|
||||
, ansi-terminal >= 0.8.2 && <1
|
||||
, array ^>= 0.5.3.0
|
||||
, attoparsec ^>= 0.13.2.2
|
||||
, cmark-gfm == 0.1.8
|
||||
, deepseq ^>= 1.4.4.0
|
||||
, directory-tree ^>= 0.12.1
|
||||
, filepath ^>= 1.4.2.1
|
||||
|
@ -132,9 +132,8 @@ graphingModules recur m = do
|
||||
where
|
||||
-- NB: path is null for Languages like Ruby that have module imports that require concrete value semantics.
|
||||
includeModule path
|
||||
= let path' = if Prelude.null path then "unknown, concrete semantics required" else path
|
||||
info = moduleInfo m
|
||||
in moduleInclusion (moduleVertex (ModuleInfo path' (moduleLanguage info) (moduleOid info)))
|
||||
= let info = moduleInfo m
|
||||
in moduleInclusion (moduleVertex (ModuleInfo path (moduleLanguage info) (moduleOid info)))
|
||||
|
||||
-- | Add vertices to the graph for imported modules.
|
||||
graphingModuleInfo :: ( Has (Reader ModuleInfo) sig m
|
||||
|
@ -28,15 +28,12 @@ import qualified Data.Error as Error
|
||||
import Data.Flag
|
||||
import Data.Foldable (toList)
|
||||
import Data.Language as Language
|
||||
import Data.List.NonEmpty (nonEmpty)
|
||||
import Data.Semigroup (sconcat)
|
||||
import Data.Sum
|
||||
import qualified Data.Syntax as Syntax
|
||||
import qualified Data.Syntax.Declaration as Declaration
|
||||
import Data.Term
|
||||
import Data.Text (Text)
|
||||
import qualified Data.Text as T
|
||||
import qualified Language.Markdown.Syntax as Markdown
|
||||
import Source.Loc as Loc
|
||||
import Source.Range
|
||||
import Source.Source as Source
|
||||
@ -110,16 +107,6 @@ class HasDeclarationBy (strategy :: Strategy) syntax where
|
||||
instance HasDeclarationBy 'Default syntax where
|
||||
toDeclarationBy _ _ _ = Nothing
|
||||
|
||||
|
||||
-- | Produce a 'Heading' from the first line of the heading of a 'Markdown.Heading' node.
|
||||
instance HasDeclarationBy 'Custom Markdown.Heading where
|
||||
toDeclarationBy blob@Blob{..} ann (Markdown.Heading level terms _)
|
||||
= Just $ Declaration (Heading level) (headingText terms) (Loc.span ann) (blobLanguage blob)
|
||||
where headingText terms = getSource $ maybe (byteRange ann) sconcat (nonEmpty (headingByteRange <$> toList terms))
|
||||
headingByteRange (t, _) = byteRange (termAnnotation t)
|
||||
getSource = firstLine . toText . Source.slice blobSource
|
||||
firstLine = T.takeWhile (/= '\n')
|
||||
|
||||
-- | Produce an 'Error' for 'Syntax.Error' nodes.
|
||||
instance HasDeclarationBy 'Custom Syntax.Error where
|
||||
toDeclarationBy blob@Blob{..} ann err@Syntax.Error{}
|
||||
@ -167,7 +154,6 @@ data Strategy = Default | Custom
|
||||
type family DeclarationStrategy syntax where
|
||||
DeclarationStrategy Declaration.Function = 'Custom
|
||||
DeclarationStrategy Declaration.Method = 'Custom
|
||||
DeclarationStrategy Markdown.Heading = 'Custom
|
||||
DeclarationStrategy Syntax.Error = 'Custom
|
||||
DeclarationStrategy (Sum _) = 'Custom
|
||||
DeclarationStrategy _ = 'Default
|
||||
|
@ -77,7 +77,6 @@ module Assigning.Assignment
|
||||
, MonadError(..)
|
||||
, MonadFail(..)
|
||||
, location
|
||||
, currentNode
|
||||
, symbol
|
||||
, rawSource
|
||||
, source
|
||||
@ -111,7 +110,6 @@ import Data.ByteString (ByteString)
|
||||
import Data.Error
|
||||
import Data.Foldable
|
||||
import Data.Function
|
||||
import Data.Functor.Classes
|
||||
import Data.Ix
|
||||
import Data.List.NonEmpty (NonEmpty (..), nonEmpty)
|
||||
import Data.Maybe
|
||||
@ -131,79 +129,74 @@ import TreeSitter.Language
|
||||
-- | Assignment from an AST with some set of 'symbol's onto some other value.
|
||||
--
|
||||
-- This is essentially a parser.
|
||||
type Assignment ast grammar = Freer (Tracing (AssignmentF ast grammar))
|
||||
type Assignment grammar = Freer (Tracing (AssignmentF grammar))
|
||||
|
||||
data AssignmentF ast grammar a where
|
||||
End :: AssignmentF ast grammar ()
|
||||
Loc :: AssignmentF ast grammar L.Loc
|
||||
CurrentNode :: AssignmentF ast grammar (TermF ast (Node grammar) ())
|
||||
Source :: AssignmentF ast grammar ByteString
|
||||
Children :: Assignment ast grammar a -> AssignmentF ast grammar a
|
||||
Choose :: Table.Table grammar (Assignment ast grammar a) -> Maybe (Assignment ast grammar a) -> Maybe (Error (Either String grammar) -> Assignment ast grammar a) -> AssignmentF ast grammar a
|
||||
Many :: Assignment ast grammar a -> AssignmentF ast grammar [a]
|
||||
Alt :: [a] -> AssignmentF ast grammar a
|
||||
Label :: Assignment ast grammar a -> String -> AssignmentF ast grammar a
|
||||
Fail :: String -> AssignmentF ast grammar a
|
||||
GetLocals :: AssignmentF ast grammar [Text]
|
||||
PutLocals :: [Text] -> AssignmentF ast grammar ()
|
||||
data AssignmentF grammar a where
|
||||
End :: AssignmentF grammar ()
|
||||
Loc :: AssignmentF grammar L.Loc
|
||||
Source :: AssignmentF grammar ByteString
|
||||
Children :: Assignment grammar a -> AssignmentF grammar a
|
||||
Choose :: Table.Table grammar (Assignment grammar a) -> Maybe (Assignment grammar a) -> Maybe (Error (Either String grammar) -> Assignment grammar a) -> AssignmentF grammar a
|
||||
Many :: Assignment grammar a -> AssignmentF grammar [a]
|
||||
Alt :: [a] -> AssignmentF grammar a
|
||||
Label :: Assignment grammar a -> String -> AssignmentF grammar a
|
||||
Fail :: String -> AssignmentF grammar a
|
||||
GetLocals :: AssignmentF grammar [Text]
|
||||
PutLocals :: [Text] -> AssignmentF grammar ()
|
||||
|
||||
data Tracing f a where
|
||||
Tracing :: { tracingCallSite :: Maybe (String, SrcLoc), runTracing :: f a } -> Tracing f a
|
||||
|
||||
assignmentCallSite :: Assignment ast grammar a -> Maybe (String, SrcLoc)
|
||||
assignmentCallSite :: Assignment grammar a -> Maybe (String, SrcLoc)
|
||||
assignmentCallSite (Tracing site _ `Then` _) = site
|
||||
assignmentCallSite _ = Nothing
|
||||
|
||||
tracing :: HasCallStack => f a -> Tracing f a
|
||||
tracing f = case getCallStack callStack of
|
||||
(_ : site : _) -> Tracing (Just site) f
|
||||
_ -> Tracing Nothing f
|
||||
_ : site : _ -> Tracing (Just site) f
|
||||
_ -> Tracing Nothing f
|
||||
|
||||
-- | Zero-width production of the current location.
|
||||
--
|
||||
-- If assigning at the end of input or at the end of a list of children, the location will be returned as an empty Range and Span at the current offset. Otherwise, it will be the Range and Span of the current node.
|
||||
location :: Assignment ast grammar L.Loc
|
||||
location :: Assignment grammar L.Loc
|
||||
location = tracing Loc `Then` pure
|
||||
|
||||
getLocals :: HasCallStack => Assignment ast grammar [Text]
|
||||
getLocals :: HasCallStack => Assignment grammar [Text]
|
||||
getLocals = tracing GetLocals `Then` pure
|
||||
|
||||
putLocals :: HasCallStack => [Text] -> Assignment ast grammar ()
|
||||
putLocals :: HasCallStack => [Text] -> Assignment grammar ()
|
||||
putLocals l = tracing (PutLocals l) `Then` pure
|
||||
|
||||
-- | Zero-width production of the current node.
|
||||
currentNode :: HasCallStack => Assignment ast grammar (TermF ast (Node grammar) ())
|
||||
currentNode = tracing CurrentNode `Then` pure
|
||||
|
||||
-- | Zero-width match of a node with the given symbol, producing the current node’s location.
|
||||
symbol :: (Enum grammar, HasCallStack) => grammar -> Assignment ast grammar L.Loc
|
||||
symbol :: (Enum grammar, HasCallStack) => grammar -> Assignment grammar L.Loc
|
||||
symbol s = tracing (Choose (Table.singleton s location) Nothing Nothing) `Then` pure
|
||||
|
||||
-- | A rule to produce a node’s source as a ByteString.
|
||||
-- You probably want to use 'source', unless you're throwing away the result.
|
||||
rawSource :: HasCallStack => Assignment ast grammar ByteString
|
||||
rawSource :: HasCallStack => Assignment grammar ByteString
|
||||
rawSource = tracing Source `Then` pure
|
||||
|
||||
-- | A rule to produce a node's source as Text. Fails if the node's source can't be parsed as UTF-8.
|
||||
source :: HasCallStack => Assignment ast grammar Text
|
||||
source :: HasCallStack => Assignment grammar Text
|
||||
source = fmap decodeUtf8' rawSource >>= either (\e -> fail ("UTF-8 decoding failed: " <> show e)) pure
|
||||
|
||||
-- | Match a node by applying an assignment to its children.
|
||||
children :: HasCallStack => Assignment ast grammar a -> Assignment ast grammar a
|
||||
children :: HasCallStack => Assignment grammar a -> Assignment grammar a
|
||||
children child = tracing (Children child) `Then` pure
|
||||
|
||||
-- | Advance past the current node.
|
||||
advance :: HasCallStack => Assignment ast grammar ()
|
||||
advance :: HasCallStack => Assignment grammar ()
|
||||
advance = () <$ source
|
||||
|
||||
-- | Construct a committed choice table from a list of alternatives. Use this to efficiently select between long lists of rules.
|
||||
choice :: (Enum grammar, Eq1 ast, Ix grammar, HasCallStack) => [Assignment ast grammar a] -> Assignment ast grammar a
|
||||
choice :: (Enum grammar, Ix grammar, HasCallStack) => [Assignment grammar a] -> Assignment grammar a
|
||||
choice [] = empty
|
||||
choice alternatives
|
||||
| null choices = asum alternatives
|
||||
| otherwise = tracing (Choose (Table.fromListWith (<|>) choices) ((`Then` id) . tracing . Alt . toList <$> nonEmpty atEnd) (mergeHandlers handlers)) `Then` pure
|
||||
where (choices, atEnd, handlers) = foldMap toChoices alternatives
|
||||
toChoices :: (Enum grammar, Ix grammar) => Assignment ast grammar a -> ([(grammar, Assignment ast grammar a)], [Assignment ast grammar a], [Error (Either String grammar) -> Assignment ast grammar a])
|
||||
toChoices :: (Enum grammar, Ix grammar) => Assignment grammar a -> ([(grammar, Assignment grammar a)], [Assignment grammar a], [Error (Either String grammar) -> Assignment grammar a])
|
||||
toChoices rule = case rule of
|
||||
Tracing _ (Choose t a h) `Then` continue -> (Table.toPairs (fmap (>>= continue) t), toList ((>>= continue) <$> a), toList ((continue <=<) <$> h))
|
||||
Tracing _ (Many child) `Then` _ -> let (c, _, _) = toChoices child in (fmap (rule <$) c, [rule], [])
|
||||
@ -215,7 +208,7 @@ choice alternatives
|
||||
mergeHandlers hs = Just (\ err -> asum (hs <*> [err]))
|
||||
|
||||
-- | Match and advance past a node with the given symbol.
|
||||
token :: (Enum grammar, HasCallStack) => grammar -> Assignment ast grammar L.Loc
|
||||
token :: (Enum grammar, HasCallStack) => grammar -> Assignment grammar L.Loc
|
||||
token s = symbol s <* advance
|
||||
|
||||
|
||||
@ -229,7 +222,7 @@ nodeError :: CallStack -> [Either String grammar] -> Node grammar -> Error (Eith
|
||||
nodeError cs expected n@Node{..} = Error (nodeSpan n) expected (Just (Right nodeSymbol)) cs
|
||||
|
||||
|
||||
firstSet :: (Enum grammar, Ix grammar) => Assignment ast grammar a -> [grammar]
|
||||
firstSet :: (Enum grammar, Ix grammar) => Assignment grammar a -> [grammar]
|
||||
firstSet = iterFreer (\ _ (Tracing _ assignment) -> case assignment of
|
||||
Choose table _ _ -> Table.tableAddresses table
|
||||
Label child _ -> firstSet child
|
||||
@ -237,34 +230,35 @@ firstSet = iterFreer (\ _ (Tracing _ assignment) -> case assignment of
|
||||
|
||||
|
||||
-- | Run an assignment over an AST exhaustively.
|
||||
assign :: (Symbol grammar, Eq1 ast, Foldable ast, Functor ast)
|
||||
=> Source.Source -- ^ The source for the parse tree.
|
||||
-> Assignment ast grammar a -- ^ The 'Assignment to run.
|
||||
-> AST ast grammar -- ^ The root of the ast.
|
||||
-> Either (Error String) a -- ^ 'Either' an 'Error' or an assigned value.
|
||||
assign :: Symbol grammar
|
||||
=> Source.Source -- ^ The source for the parse tree.
|
||||
-> Assignment grammar a -- ^ The 'Assignment to run.
|
||||
-> AST grammar -- ^ The root of the ast.
|
||||
-> Either (Error String) a -- ^ 'Either' an 'Error' or an assigned value.
|
||||
assign source assignment ast = bimap (fmap (either id show)) fst (runAssignment source assignment (makeState [ast]))
|
||||
{-# INLINE assign #-}
|
||||
|
||||
-- | Run an assignment of nodes in a grammar onto terms in a syntax over an AST exhaustively.
|
||||
runAssignment :: forall grammar a ast. (Symbol grammar, Eq1 ast, Foldable ast, Functor ast)
|
||||
=> Source.Source -- ^ The source for the parse tree.
|
||||
-> Assignment ast grammar a -- ^ The 'Assignment' to run.
|
||||
-> State ast grammar -- ^ The current state.
|
||||
-> Either (Error (Either String grammar)) (a, State ast grammar) -- ^ 'Either' an 'Error' or an assigned value & updated state.
|
||||
runAssignment :: forall grammar a . (Symbol grammar)
|
||||
=> Source.Source -- ^ The source for the parse tree.
|
||||
-> Assignment grammar a -- ^ The 'Assignment' to run.
|
||||
-> State grammar -- ^ The current state.
|
||||
-> Either (Error (Either String grammar)) (a, State grammar) -- ^ 'Either' an 'Error' or an assigned value & updated state.
|
||||
runAssignment source = \ assignment state -> go assignment state >>= requireExhaustive (assignmentCallSite assignment)
|
||||
-- Note: We explicitly bind source above in order to ensure that the where clause can close over them; they don’t change through the course of the run, so holding one reference is sufficient. On the other hand, we don’t want to accidentally capture the assignment and state in the where clause, since they change at every step—and capturing when you meant to shadow is an easy mistake to make, & results in hard-to-debug errors. Binding them in a lambda avoids that problem while also being easier to follow than a pointfree definition.
|
||||
where go :: Assignment ast grammar result -> State ast grammar -> Either (Error (Either String grammar)) (result, State ast grammar)
|
||||
where go :: Assignment grammar result -> State grammar -> Either (Error (Either String grammar)) (result, State grammar)
|
||||
go assignment = iterFreer run ((pure .) . (,) <$> assignment)
|
||||
{-# INLINE go #-}
|
||||
|
||||
run :: (x -> State ast grammar -> Either (Error (Either String grammar)) (result, State ast grammar))
|
||||
-> Tracing (AssignmentF ast grammar) x
|
||||
-> State ast grammar
|
||||
-> Either (Error (Either String grammar)) (result, State ast grammar)
|
||||
run :: (x -> State grammar -> Either (Error (Either String grammar)) (result, State grammar))
|
||||
-> Tracing (AssignmentF grammar) x
|
||||
-> State grammar
|
||||
-> Either (Error (Either String grammar)) (result, State grammar)
|
||||
run yield t initialState = state `seq` maybe (anywhere Nothing) atNode (listToMaybe stateNodes)
|
||||
where atNode (Term (In node f)) = case runTracing t of
|
||||
Loc -> yield (nodeLocation node) state
|
||||
CurrentNode -> yield (In node (() <$ f)) state
|
||||
GetLocals -> yield stateLocals state
|
||||
PutLocals l -> yield () (state { stateLocals = l })
|
||||
Source -> yield (Source.bytes (Source.slice source (nodeByteRange node))) (advanceState state)
|
||||
Children child -> do
|
||||
(a, state') <- go child state { stateNodes = toList f, stateCallSites = maybe id (:) (tracingCallSite t) stateCallSites } >>= requireExhaustive (tracingCallSite t)
|
||||
@ -293,7 +287,7 @@ runAssignment source = \ assignment state -> go assignment state >>= requireExha
|
||||
(Error (Span statePos statePos) (fmap Right expectedSymbols) Nothing assignmentStack)
|
||||
(nodeError assignmentStack (fmap Right expectedSymbols))
|
||||
|
||||
requireExhaustive :: Symbol grammar => Maybe (String, SrcLoc) -> (result, State ast grammar) -> Either (Error (Either String grammar)) (result, State ast grammar)
|
||||
requireExhaustive :: Symbol grammar => Maybe (String, SrcLoc) -> (result, State grammar) -> Either (Error (Either String grammar)) (result, State grammar)
|
||||
requireExhaustive callSite (a, state) =
|
||||
let state' = skipTokens state
|
||||
stack = fromCallSiteList (maybe id (:) callSite (stateCallSites state))
|
||||
@ -301,42 +295,40 @@ requireExhaustive callSite (a, state) =
|
||||
[] -> Right (a, state')
|
||||
Term (In node _) : _ -> Left (nodeError stack [] node)
|
||||
|
||||
skipTokens :: Symbol grammar => State ast grammar -> State ast grammar
|
||||
skipTokens :: Symbol grammar => State grammar -> State grammar
|
||||
skipTokens state = state { stateNodes = dropWhile ((/= Regular) . symbolType . nodeSymbol . termAnnotation) (stateNodes state) }
|
||||
|
||||
-- | Advances the state past the current (head) node (if any), dropping it off stateNodes, and updating stateOffset & statePos to its end; or else returns the state unchanged.
|
||||
advanceState :: State ast grammar -> State ast grammar
|
||||
advanceState :: State grammar -> State grammar
|
||||
advanceState state@State{..}
|
||||
| Term (In node _) : rest <- stateNodes = State (Range.end (nodeByteRange node)) (Span.end (nodeSpan node)) stateCallSites rest stateLocals
|
||||
| otherwise = state
|
||||
|
||||
-- | State kept while running 'Assignment's.
|
||||
data State ast grammar = State
|
||||
data State grammar = State
|
||||
{ stateOffset :: {-# UNPACK #-} !Int -- ^ The offset into the Source thus far reached, measured in bytes.
|
||||
, statePos :: {-# UNPACK #-} !Pos -- ^ The (1-indexed) line/column position in the Source thus far reached.
|
||||
, stateCallSites :: ![(String, SrcLoc)] -- ^ The symbols & source locations of the calls thus far.
|
||||
, stateNodes :: ![AST ast grammar] -- ^ The remaining nodes to assign. Note that 'children' rules recur into subterms, and thus this does not necessarily reflect all of the terms remaining to be assigned in the overall algorithm, only those “in scope.”
|
||||
, stateNodes :: ![AST grammar] -- ^ The remaining nodes to assign. Note that 'children' rules recur into subterms, and thus this does not necessarily reflect all of the terms remaining to be assigned in the overall algorithm, only those “in scope.”
|
||||
, stateLocals :: ![Text] -- Special state necessary for the Ruby assignment. When we refactor Assignment to use effects we should pull this out into Language.Ruby.Assignment.
|
||||
}
|
||||
deriving (Eq, Show)
|
||||
|
||||
deriving instance (Eq grammar, Eq1 ast) => Eq (State ast grammar)
|
||||
deriving instance (Show grammar, Show1 ast) => Show (State ast grammar)
|
||||
|
||||
makeState :: [AST ast grammar] -> State ast grammar
|
||||
makeState :: [AST grammar] -> State grammar
|
||||
makeState ns = State 0 (Pos 1 1) [] ns []
|
||||
|
||||
|
||||
-- Instances
|
||||
|
||||
instance (Enum grammar, Eq1 ast, Ix grammar) => Alternative (Assignment ast grammar) where
|
||||
empty :: HasCallStack => Assignment ast grammar a
|
||||
instance (Enum grammar, Ix grammar) => Alternative (Assignment grammar) where
|
||||
empty :: HasCallStack => Assignment grammar a
|
||||
empty = tracing (Alt []) `Then` pure
|
||||
|
||||
(<|>) :: forall a. Assignment ast grammar a -> Assignment ast grammar a -> Assignment ast grammar a
|
||||
(<|>) :: forall a. Assignment grammar a -> Assignment grammar a -> Assignment grammar a
|
||||
Return a <|> _ = Return a
|
||||
l@(Tracing cs _ `Then` _) <|> r@Return{} = Tracing cs (Alt [l, r]) `Then` id
|
||||
l@(Tracing callSiteL la `Then` continueL) <|> r@(Tracing callSiteR ra `Then` continueR) = go callSiteL la continueL callSiteR ra continueR
|
||||
where go :: forall l r . Maybe (String, SrcLoc) -> AssignmentF ast grammar l -> (l -> Assignment ast grammar a) -> Maybe (String, SrcLoc) -> AssignmentF ast grammar r -> (r -> Assignment ast grammar a) -> Assignment ast grammar a
|
||||
where go :: forall l r . Maybe (String, SrcLoc) -> AssignmentF grammar l -> (l -> Assignment grammar a) -> Maybe (String, SrcLoc) -> AssignmentF grammar r -> (r -> Assignment grammar a) -> Assignment grammar a
|
||||
go callSiteL la continueL callSiteR ra continueR = case (la, ra) of
|
||||
(Fail _, _) -> r
|
||||
(Alt [], _) -> r
|
||||
@ -345,34 +337,34 @@ instance (Enum grammar, Eq1 ast, Ix grammar) => Alternative (Assignment ast gram
|
||||
(Alt ls, _) -> rebuild (Alt ((continueL <$> ls) <> pure r)) id
|
||||
(_, Alt rs) -> rebuild (Alt (pure l <> (continueR <$> rs))) id
|
||||
_ -> rebuild (Alt [l, r]) id
|
||||
where alternate :: AssignmentF ast grammar (Either l r) -> Assignment ast grammar a
|
||||
where alternate :: AssignmentF grammar (Either l r) -> Assignment grammar a
|
||||
alternate a = rebuild a (either continueL continueR)
|
||||
rebuild :: AssignmentF ast grammar x -> (x -> Assignment ast grammar a) -> Assignment ast grammar a
|
||||
rebuild :: AssignmentF grammar x -> (x -> Assignment grammar a) -> Assignment grammar a
|
||||
rebuild a c = Tracing (callSiteL <|> callSiteR) a `Then` c
|
||||
|
||||
many :: HasCallStack => Assignment ast grammar a -> Assignment ast grammar [a]
|
||||
many :: HasCallStack => Assignment grammar a -> Assignment grammar [a]
|
||||
many a = tracing (Many a) `Then` pure
|
||||
|
||||
instance MonadFail (Assignment ast grammar) where
|
||||
fail :: HasCallStack => String -> Assignment ast grammar a
|
||||
instance MonadFail (Assignment grammar) where
|
||||
fail :: HasCallStack => String -> Assignment grammar a
|
||||
fail s = tracing (Fail s) `Then` pure
|
||||
|
||||
instance (Enum grammar, Eq1 ast, Ix grammar, Show grammar) => Parsing (Assignment ast grammar) where
|
||||
instance (Enum grammar, Ix grammar, Show grammar) => Parsing (Assignment grammar) where
|
||||
try = id
|
||||
|
||||
(<?>) :: HasCallStack => Assignment ast grammar a -> String -> Assignment ast grammar a
|
||||
(<?>) :: HasCallStack => Assignment grammar a -> String -> Assignment grammar a
|
||||
a <?> s = tracing (Label a s) `Then` pure
|
||||
|
||||
unexpected :: String -> Assignment ast grammar a
|
||||
unexpected :: String -> Assignment grammar a
|
||||
unexpected = fail
|
||||
|
||||
eof :: HasCallStack => Assignment ast grammar ()
|
||||
eof :: HasCallStack => Assignment grammar ()
|
||||
eof = tracing End `Then` pure
|
||||
|
||||
notFollowedBy :: Show a => Assignment ast grammar a -> Assignment ast grammar ()
|
||||
notFollowedBy :: Show a => Assignment grammar a -> Assignment grammar ()
|
||||
notFollowedBy a = (a >>= unexpected . show) <|> pure ()
|
||||
|
||||
instance (Enum grammar, Eq1 ast, Ix grammar, Show grammar) => MonadError (Error (Either String grammar)) (Assignment ast grammar) where
|
||||
instance (Enum grammar, Ix grammar, Show grammar) => MonadError (Error (Either String grammar)) (Assignment grammar) where
|
||||
throwError err = fail (show err)
|
||||
|
||||
catchError rule handler = iterFreer (\ continue (Tracing cs assignment) -> case assignment of
|
||||
|
@ -77,7 +77,7 @@ import Data.Semilattice.Lower
|
||||
import Data.Set (Set)
|
||||
import GHC.Generics (Generic1)
|
||||
import GHC.Stack
|
||||
import Source.Span (Span)
|
||||
import Source.Span (Pos (..), Span, point)
|
||||
|
||||
|
||||
-- | Evaluates an action locally the scope and frame of the given frame address.
|
||||
@ -191,7 +191,7 @@ define :: ( HasCallStack
|
||||
-> Evaluator term address value m ()
|
||||
define declaration rel accessControl def = withCurrentCallStack callStack $ do
|
||||
-- TODO: This span is still wrong.
|
||||
declare declaration rel accessControl lowerBound Unknown Nothing
|
||||
declare declaration rel accessControl (point (Pos 1 1)) Unknown Nothing
|
||||
slot <- lookupSlot declaration
|
||||
value <- def
|
||||
assign slot value
|
||||
|
@ -40,12 +40,11 @@ import Control.Monad.IO.Class
|
||||
import Data.Foldable
|
||||
import Data.Functor.Classes
|
||||
import Data.Maybe.Exts
|
||||
import Data.Semilattice.Lower
|
||||
import Data.Set (Set)
|
||||
import qualified Data.Set as Set
|
||||
import GHC.Generics (Generic1)
|
||||
import Source.Span
|
||||
import System.FilePath.Posix (takeDirectory)
|
||||
import qualified System.Path as Path
|
||||
|
||||
import Control.Abstract.Evaluator
|
||||
import Data.Abstract.BaseError
|
||||
@ -116,8 +115,8 @@ instance ( Has (Reader (ModuleTable (Module (ModuleResult address value)))) sig
|
||||
case op of
|
||||
Load name k -> askModuleTable >>= maybeM (throwLoadError (ModuleNotFoundError name)) . fmap moduleBody . ModuleTable.lookup name >>= k
|
||||
Lookup path k -> askModuleTable >>= k . fmap moduleBody . ModuleTable.lookup path
|
||||
Resolve names k -> k (find (`Set.member` paths) names)
|
||||
List dir k -> k (filter ((dir ==) . takeDirectory) (toList paths))
|
||||
Resolve names k -> k (find (`Set.member` paths) (map Path.absRel names))
|
||||
List dir k -> k (filter ((dir ==) . Path.toString . Path.takeDirectory) (toList paths))
|
||||
alg (R other) = ModulesC (alg (R (handleCoercible other)))
|
||||
|
||||
askModuleTable :: Has (Reader (ModuleTable (Module (ModuleResult address value)))) sig m => m (ModuleTable (Module (ModuleResult address value)))
|
||||
@ -147,7 +146,7 @@ runLoadErrorWith f = raiseHandler $ With.runResumable (runEvaluator . f)
|
||||
throwLoadError :: Has (Resumable (BaseError (LoadError address value))) sig m
|
||||
=> LoadError address value resume
|
||||
-> m resume
|
||||
throwLoadError err@(ModuleNotFoundError name) = throwResumable $ BaseError (ModuleInfo name "Unknown" mempty) lowerBound err
|
||||
throwLoadError err@(ModuleNotFoundError name) = throwResumable $ BaseError (ModuleInfo name "Unknown" mempty) (point (Pos 1 1)) err
|
||||
-- TODO: Might be able to get rest of ModuleInfo from the env ^.
|
||||
|
||||
|
||||
|
@ -18,9 +18,9 @@ import Data.Abstract.BaseError
|
||||
import qualified Data.Abstract.ScopeGraph as ScopeGraph
|
||||
import Data.Map.Strict as Map
|
||||
import Data.Maybe
|
||||
import Data.Semilattice.Lower
|
||||
import Data.Traversable
|
||||
import GHC.Stack
|
||||
import Source.Span (Pos (..), point)
|
||||
|
||||
defineBuiltIn :: ( HasCallStack
|
||||
, Has (Deref value) sig m
|
||||
@ -47,11 +47,11 @@ defineBuiltIn declaration rel accessControl value = withCurrentCallStack callSta
|
||||
let lexicalEdges = Map.singleton Lexical [ currentScope' ]
|
||||
associatedScope <- newPreludeScope lexicalEdges
|
||||
-- TODO: This span is still wrong.
|
||||
declare declaration rel accessControl lowerBound ScopeGraph.Unknown (Just associatedScope)
|
||||
declare declaration rel accessControl (point (Pos 1 1)) ScopeGraph.Unknown (Just associatedScope)
|
||||
|
||||
withScope associatedScope $ do
|
||||
param <- gensym
|
||||
declare (Declaration param) ScopeGraph.Gensym accessControl lowerBound ScopeGraph.Unknown Nothing
|
||||
declare (Declaration param) ScopeGraph.Gensym accessControl (point (Pos 1 1)) ScopeGraph.Unknown Nothing
|
||||
|
||||
slot <- lookupSlot declaration
|
||||
value <- builtIn associatedScope value
|
||||
|
@ -11,7 +11,7 @@ module Control.Carrier.Parse.Measured
|
||||
( -- * Parse carrier
|
||||
ParseC(..)
|
||||
-- * Exceptions
|
||||
, ParserCancelled(..)
|
||||
, AssignmentTimedOut(..)
|
||||
-- * Parse effect
|
||||
, module Control.Effect.Parse
|
||||
) where
|
||||
@ -19,8 +19,10 @@ module Control.Carrier.Parse.Measured
|
||||
import qualified Assigning.Assignment as Assignment
|
||||
import Control.Algebra
|
||||
import Control.Effect.Error
|
||||
import Control.Effect.Lift
|
||||
import Control.Effect.Parse
|
||||
import Control.Effect.Reader
|
||||
import Control.Effect.Timeout
|
||||
import Control.Effect.Trace
|
||||
import Control.Exception
|
||||
import Control.Monad
|
||||
@ -30,13 +32,11 @@ import qualified Data.Error as Error
|
||||
import qualified Data.Flag as Flag
|
||||
import Data.Foldable
|
||||
import qualified Data.Syntax as Syntax
|
||||
import Parsing.CMark
|
||||
import Parsing.Parser
|
||||
import Parsing.TreeSitter
|
||||
import Semantic.Config
|
||||
import Semantic.Task (TaskSession (..))
|
||||
import Semantic.Telemetry
|
||||
import Semantic.Timeout
|
||||
import Source.Source (Source)
|
||||
|
||||
newtype ParseC m a = ParseC { runParse :: m a }
|
||||
@ -45,8 +45,8 @@ newtype ParseC m a = ParseC { runParse :: m a }
|
||||
instance ( Has (Error SomeException) sig m
|
||||
, Has (Reader TaskSession) sig m
|
||||
, Has Telemetry sig m
|
||||
, Has Timeout sig m
|
||||
, Has Trace sig m
|
||||
, Has (Lift IO) sig m
|
||||
, MonadIO m
|
||||
)
|
||||
=> Algebra (Parse :+: sig) (ParseC m) where
|
||||
@ -54,10 +54,17 @@ instance ( Has (Error SomeException) sig m
|
||||
alg (R other) = ParseC (alg (handleCoercible other))
|
||||
|
||||
-- | Parse a 'Blob' in 'IO'.
|
||||
runParser :: (Has (Error SomeException) sig m, Has (Reader TaskSession) sig m, Has Telemetry sig m, Has Timeout sig m, Has Trace sig m, MonadIO m)
|
||||
=> Blob
|
||||
-> Parser term
|
||||
-> m term
|
||||
runParser ::
|
||||
( Has (Error SomeException) sig m
|
||||
, Has (Reader TaskSession) sig m
|
||||
, Has Telemetry sig m
|
||||
, Has (Lift IO) sig m
|
||||
, Has Trace sig m
|
||||
, MonadIO m
|
||||
)
|
||||
=> Blob
|
||||
-> Parser term
|
||||
-> m term
|
||||
runParser blob@Blob{..} parser = case parser of
|
||||
ASTParser language ->
|
||||
time "parse.tree_sitter_ast_parse" languageTag $ do
|
||||
@ -71,10 +78,6 @@ runParser blob@Blob{..} parser = case parser of
|
||||
|
||||
AssignmentParser parser assignment -> runAssignment Assignment.assign parser blob assignment
|
||||
|
||||
MarkdownParser ->
|
||||
time "parse.cmark_parse" languageTag $
|
||||
let term = cmarkParser blobSource
|
||||
in length term `seq` pure term
|
||||
where
|
||||
languageTag = [("language" :: String, show (blobLanguage blob))]
|
||||
executeParserAction act = do
|
||||
@ -83,10 +86,10 @@ runParser blob@Blob{..} parser = case parser of
|
||||
when shouldFailFlag (throwError (SomeException AssignmentTimedOut))
|
||||
act >>= either (\e -> trace (displayException e) *> throwError (SomeException e)) pure
|
||||
|
||||
data ParserCancelled = ParserTimedOut | AssignmentTimedOut
|
||||
deriving (Show)
|
||||
data AssignmentTimedOut = AssignmentTimedOut deriving (Show)
|
||||
|
||||
instance Exception AssignmentTimedOut
|
||||
|
||||
instance Exception ParserCancelled
|
||||
|
||||
|
||||
runAssignment
|
||||
@ -95,8 +98,8 @@ runAssignment
|
||||
, Has (Error SomeException) sig m
|
||||
, Has (Reader TaskSession) sig m
|
||||
, Has Telemetry sig m
|
||||
, Has Timeout sig m
|
||||
, Has Trace sig m
|
||||
, Has (Lift IO) sig m
|
||||
, MonadIO m
|
||||
)
|
||||
=> (Source -> assignment (term Assignment.Loc) -> ast -> Either (Error.Error String) (term Assignment.Loc))
|
||||
@ -109,7 +112,7 @@ runAssignment assign parser blob@Blob{..} assignment = do
|
||||
let requestID' = ("github_request_id", requestID taskSession)
|
||||
let isPublic' = ("github_is_public", show (isPublic taskSession))
|
||||
let logPrintFlag = configLogPrintSource . config $ taskSession
|
||||
let blobFields = ("path", if isPublic taskSession || Flag.toBool LogPrintSource logPrintFlag then blobPath blob else "<filtered>")
|
||||
let blobFields = ("path", if isPublic taskSession || Flag.toBool LogPrintSource logPrintFlag then blobFilePath blob else "<filtered>")
|
||||
let logFields = requestID' : isPublic' : blobFields : languageTag
|
||||
let shouldFailForTesting = configFailParsingForTesting $ config taskSession
|
||||
let shouldFailOnParsing = optionsFailOnParseError . configOptions $ config taskSession
|
||||
|
@ -1,5 +1,11 @@
|
||||
{-# LANGUAGE FlexibleContexts, FlexibleInstances, GADTs, GeneralizedNewtypeDeriving, MultiParamTypeClasses,
|
||||
RecordWildCards, TypeOperators, UndecidableInstances #-}
|
||||
{-# LANGUAGE FlexibleContexts #-}
|
||||
{-# LANGUAGE FlexibleInstances #-}
|
||||
{-# LANGUAGE GADTs #-}
|
||||
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
|
||||
{-# LANGUAGE MultiParamTypeClasses #-}
|
||||
{-# LANGUAGE RecordWildCards #-}
|
||||
{-# LANGUAGE TypeOperators #-}
|
||||
{-# LANGUAGE UndecidableInstances #-}
|
||||
-- | A carrier for 'Parse' effects suitable for use in the repl, tests, etc.
|
||||
module Control.Carrier.Parse.Simple
|
||||
( -- * Parse carrier
|
||||
@ -19,7 +25,6 @@ import Control.Effect.Parse
|
||||
import Control.Exception
|
||||
import Control.Monad.IO.Class
|
||||
import Data.Blob
|
||||
import Parsing.CMark
|
||||
import Parsing.Parser
|
||||
import Parsing.TreeSitter
|
||||
|
||||
@ -57,10 +62,6 @@ runParser timeout blob@Blob{..} parser = case parser of
|
||||
AssignmentParser parser assignment ->
|
||||
runParser timeout blob parser >>= either (throwError . toException) pure . Assignment.assign blobSource assignment
|
||||
|
||||
MarkdownParser ->
|
||||
let term = cmarkParser blobSource
|
||||
in length term `seq` pure term
|
||||
|
||||
newtype ParseFailure = ParseFailure String
|
||||
deriving (Show)
|
||||
|
||||
|
23
src/Control/Effect/Timeout.hs
Normal file
23
src/Control/Effect/Timeout.hs
Normal file
@ -0,0 +1,23 @@
|
||||
{-# LANGUAGE ScopedTypeVariables #-}
|
||||
{-# LANGUAGE TypeApplications #-}
|
||||
module Control.Effect.Timeout
|
||||
( timeout
|
||||
) where
|
||||
|
||||
import Control.Algebra
|
||||
import Control.Effect.Lift
|
||||
import Data.Duration
|
||||
import qualified System.Timeout as System
|
||||
|
||||
-- | Run an action with a timeout. Returns 'Nothing' when no result is available
|
||||
-- within the specified duration. Uses 'System.Timeout.timeout' so all caveats
|
||||
-- about not operating over FFI boundaries apply.
|
||||
--
|
||||
-- Any state changes in the action are discarded if the timeout fails.
|
||||
timeout :: Has (Lift IO) sig m => Duration -> m a -> m (Maybe a)
|
||||
timeout n m = liftWith $ \ ctx hdl
|
||||
-> maybe
|
||||
-- Restore the old state if it timed out.
|
||||
(Nothing <$ ctx)
|
||||
-- Apply it if it succeeded.
|
||||
(fmap Just) <$> System.timeout (toMicroseconds n) (hdl (m <$ ctx))
|
@ -13,7 +13,7 @@ import Data.JSON.Fields
|
||||
import Source.Loc as Loc
|
||||
|
||||
-- | An AST node labelled with symbols and source location.
|
||||
type AST syntax grammar = Term syntax (Node grammar)
|
||||
type AST grammar = Term [] (Node grammar)
|
||||
|
||||
data Node grammar = Node
|
||||
{ nodeSymbol :: !grammar
|
||||
|
@ -12,13 +12,15 @@ import Control.Abstract.Evaluator
|
||||
import qualified Data.Abstract.Module as M
|
||||
import Data.Functor.Classes
|
||||
import qualified Source.Span as S
|
||||
import qualified System.Path as Path
|
||||
|
||||
data BaseError (exc :: * -> *) resume = BaseError { baseErrorModuleInfo :: ModuleInfo, baseErrorSpan :: Span, baseErrorException :: exc resume }
|
||||
|
||||
instance (Show (exc resume)) => Show (BaseError exc resume) where
|
||||
showsPrec _ BaseError{..} = shows baseErrorException <> showString " " <> showString errorLocation
|
||||
where errorLocation | startErrorLine == endErrorLine = M.modulePath baseErrorModuleInfo <> " " <> startErrorLine <> ":" <> startErrorCol <> "-" <> endErrorCol
|
||||
| otherwise = M.modulePath baseErrorModuleInfo <> " " <> startErrorLine <> ":" <> startErrorCol <> "-" <> endErrorLine <> ":" <> endErrorCol
|
||||
where errorLocation | startErrorLine == endErrorLine = baseModuleFilePath <> " " <> startErrorLine <> ":" <> startErrorCol <> "-" <> endErrorCol
|
||||
| otherwise = baseModuleFilePath <> " " <> startErrorLine <> ":" <> startErrorCol <> "-" <> endErrorLine <> ":" <> endErrorCol
|
||||
baseModuleFilePath = Path.toString $ M.modulePath baseErrorModuleInfo
|
||||
startErrorLine = show $ S.line (S.start baseErrorSpan)
|
||||
endErrorLine = show $ S.line (S.end baseErrorSpan)
|
||||
startErrorCol = show $ S.column (S.start baseErrorSpan)
|
||||
|
@ -35,11 +35,10 @@ import Data.Functor.Classes
|
||||
import Data.List.NonEmpty (nonEmpty)
|
||||
import Data.Scientific (Scientific)
|
||||
import Data.Semigroup.Foldable
|
||||
import Data.Semilattice.Lower
|
||||
import Data.Sum
|
||||
import Data.Text
|
||||
import GHC.Stack
|
||||
import Source.Span (HasSpan (..))
|
||||
import Source.Span (HasSpan (..), Pos (..), point)
|
||||
|
||||
import Analysis.Name as X
|
||||
import Control.Abstract hiding (Load, String)
|
||||
@ -229,7 +228,7 @@ defineSelf :: ( Has (State (ScopeGraph address)) sig m
|
||||
=> Evaluator term address value m ()
|
||||
defineSelf = do
|
||||
let self = Declaration __self
|
||||
declare self ScopeGraph.Prelude Public lowerBound ScopeGraph.Unknown Nothing
|
||||
declare self ScopeGraph.Prelude Public (point (Pos 1 1)) ScopeGraph.Unknown Nothing
|
||||
slot <- lookupSlot self
|
||||
assign slot =<< object =<< currentFrame
|
||||
|
||||
|
@ -20,7 +20,7 @@ import qualified Data.Map as Map
|
||||
import Data.Semilattice.Lower
|
||||
import Data.Set (Set)
|
||||
import Prelude hiding (lookup)
|
||||
import System.FilePath.Posix
|
||||
import qualified System.Path as Path
|
||||
|
||||
newtype ModuleTable a = ModuleTable { unModuleTable :: Map.Map ModulePath a }
|
||||
deriving (Eq, Foldable, Functor, Lower, Monoid, Ord, Semigroup, Traversable)
|
||||
@ -32,7 +32,7 @@ modulePaths :: ModuleTable a -> Set ModulePath
|
||||
modulePaths = Map.keysSet . unModuleTable
|
||||
|
||||
modulePathsInDir :: FilePath -> ModuleTable a -> [ModulePath]
|
||||
modulePathsInDir k = filter (\e -> k == takeDirectory e) . Map.keys . unModuleTable
|
||||
modulePathsInDir k = filter (\e -> Path.absRel k == Path.takeDirectory e) . Map.keys . unModuleTable
|
||||
|
||||
lookup :: ModulePath -> ModuleTable a -> Maybe a
|
||||
lookup k = Map.lookup k . unModuleTable
|
||||
|
@ -32,12 +32,12 @@ import Data.Bifunctor
|
||||
import qualified Data.ByteString.Lazy as BL
|
||||
import Data.Edit
|
||||
import Data.JSON.Fields
|
||||
import Data.Maybe
|
||||
import Data.Maybe.Exts
|
||||
import Data.Module
|
||||
import Data.List (stripPrefix)
|
||||
import GHC.Generics (Generic)
|
||||
import Source.Language as Language
|
||||
import qualified System.FilePath as FP
|
||||
import qualified System.Path as Path
|
||||
|
||||
|
||||
newtype Blobs a = Blobs { blobs :: [a] }
|
||||
@ -47,10 +47,10 @@ decodeBlobs :: BL.ByteString -> Either String [Blob]
|
||||
decodeBlobs = fmap blobs <$> eitherDecode
|
||||
|
||||
-- | An exception indicating that we’ve tried to diff or parse a blob of unknown language.
|
||||
newtype NoLanguageForBlob = NoLanguageForBlob FilePath
|
||||
newtype NoLanguageForBlob = NoLanguageForBlob Path.AbsRelFile
|
||||
deriving (Eq, Exception, Ord, Show)
|
||||
|
||||
noLanguageForBlob :: Has (Error SomeException) sig m => FilePath -> m a
|
||||
noLanguageForBlob :: Has (Error SomeException) sig m => Path.AbsRelFile -> m a
|
||||
noLanguageForBlob blobPath = throwError (SomeException (NoLanguageForBlob blobPath))
|
||||
|
||||
-- | Construct a 'Module' for a 'Blob' and @term@, relative to some root 'FilePath'.
|
||||
@ -59,8 +59,16 @@ moduleForBlob :: Maybe FilePath -- ^ The root directory relative to which the mo
|
||||
-> term -- ^ The @term@ representing the body of the module.
|
||||
-> Module term -- ^ A 'Module' named appropriate for the 'Blob', holding the @term@, and constructed relative to the root 'FilePath', if any.
|
||||
moduleForBlob rootDir b = Module info
|
||||
where root = fromMaybe (FP.takeDirectory (blobPath b)) rootDir
|
||||
info = ModuleInfo (FP.makeRelative root (blobPath b)) (languageToText (blobLanguage b)) mempty
|
||||
where root = maybe (Path.takeDirectory $ blobPath b) Path.absRel rootDir
|
||||
info = ModuleInfo (dropRelative root (blobPath b)) (languageToText (blobLanguage b)) mempty
|
||||
|
||||
dropRelative :: Path.AbsRelDir -> Path.AbsRelFile -> Path.AbsRelFile
|
||||
dropRelative a' b' = case as `stripPrefix` bs of
|
||||
Just rs | ra == rb -> Path.toAbsRel $ (foldl (Path.</>) Path.currentDir rs) Path.</> bf
|
||||
_ -> b'
|
||||
where (ra, as, _) = Path.splitPath $ Path.normalise a'
|
||||
(rb, bs, _) = Path.splitPath $ Path.normalise $ Path.takeDirectory b'
|
||||
bf = Path.takeFileName b'
|
||||
|
||||
-- | Represents a blobs suitable for diffing which can be either a blob to
|
||||
-- delete, a blob to insert, or a pair of blobs to diff.
|
||||
@ -80,7 +88,7 @@ languageForBlobPair = mergeEdit combine . bimap blobLanguage blobLanguage where
|
||||
| a == Unknown || b == Unknown = Unknown
|
||||
| otherwise = b
|
||||
|
||||
pathForBlobPair :: BlobPair -> FilePath
|
||||
pathForBlobPair :: BlobPair -> Path.AbsRelFile
|
||||
pathForBlobPair = blobPath . mergeEdit (const id)
|
||||
|
||||
languageTagForBlobPair :: BlobPair -> [(String, String)]
|
||||
@ -88,12 +96,12 @@ languageTagForBlobPair pair = showLanguage (languageForBlobPair pair)
|
||||
where showLanguage = pure . (,) "language" . show
|
||||
|
||||
pathKeyForBlobPair :: BlobPair -> FilePath
|
||||
pathKeyForBlobPair = mergeEdit combine . bimap blobPath blobPath where
|
||||
pathKeyForBlobPair = mergeEdit combine . bimap blobFilePath blobFilePath where
|
||||
combine before after | before == after = after
|
||||
| otherwise = before <> " -> " <> after
|
||||
|
||||
instance ToJSONFields Blob where
|
||||
toJSONFields p = [ "path" .= blobPath p, "language" .= blobLanguage p]
|
||||
toJSONFields p = [ "path" .= blobFilePath p, "language" .= blobLanguage p]
|
||||
|
||||
decodeBlobPairs :: BL.ByteString -> Either String [BlobPair]
|
||||
decodeBlobPairs = fmap blobs <$> eitherDecode
|
||||
|
@ -5,7 +5,6 @@ module Data.Blob.IO
|
||||
( readBlobFromFile
|
||||
, readBlobFromFile'
|
||||
, readBlobFromPath
|
||||
, readBlobsFromDir
|
||||
, readFilePair
|
||||
, readProjectFromPaths
|
||||
) where
|
||||
@ -13,15 +12,14 @@ module Data.Blob.IO
|
||||
import Analysis.Blob
|
||||
import Analysis.File as File
|
||||
import Analysis.Project
|
||||
import qualified Control.Concurrent.Async as Async
|
||||
import Control.Monad.IO.Class
|
||||
import Data.Blob
|
||||
import qualified Data.ByteString as B
|
||||
import Data.Language
|
||||
import Data.Maybe.Exts
|
||||
import Data.Semilattice.Lower
|
||||
import Semantic.IO
|
||||
import qualified Source.Source as Source
|
||||
import Source.Span
|
||||
import qualified System.Path as Path
|
||||
|
||||
-- | Deprecated: this has very weird semantics.
|
||||
@ -44,9 +42,9 @@ readProjectFromPaths maybeRoot path lang excludeDirs = do
|
||||
|
||||
paths <- liftIO $ findFilesInDir rootDir exts excludeDirs
|
||||
blobs <- liftIO $ traverse (readBlobFromFile' . toFile) paths
|
||||
pure $ Project (Path.toString rootDir) blobs lang (fmap Path.toString excludeDirs)
|
||||
pure $ Project rootDir blobs lang excludeDirs
|
||||
where
|
||||
toFile path = File path lowerBound lang
|
||||
toFile path = File path (point (Pos 1 1)) lang
|
||||
exts = extensionsForLanguage lang
|
||||
|
||||
|
||||
@ -68,11 +66,6 @@ readBlobFromFile' file = do
|
||||
readBlobFromPath :: (MonadFail m, MonadIO m) => Path.AbsRelFile -> m Blob
|
||||
readBlobFromPath = readBlobFromFile' . File.fromPath
|
||||
|
||||
-- | Read all blobs in the directory with Language.supportedExts.
|
||||
readBlobsFromDir :: MonadIO m => Path.AbsRelDir -> m [Blob]
|
||||
readBlobsFromDir path = liftIO . fmap catMaybes $
|
||||
findFilesInDir path supportedExts mempty >>= Async.mapConcurrently (readBlobFromFile . File.fromPath)
|
||||
|
||||
readFilePair :: MonadIO m => File Language -> File Language -> m BlobPair
|
||||
readFilePair a b = do
|
||||
before <- readBlobFromFile a
|
||||
|
@ -59,7 +59,7 @@ formatError includeSource colourize blob@Blob{..} Error{..}
|
||||
. (if Flag.toBool LogPrintSource includeSource then showExcerpt colourize errorSpan blob else id)
|
||||
. showCallStack colourize callStack . showChar '\n'
|
||||
where
|
||||
path = Just $ if Flag.toBool LogPrintSource includeSource then blobPath blob else "<filtered>"
|
||||
path = Just $ if Flag.toBool LogPrintSource includeSource then blobFilePath blob else "<filtered>"
|
||||
|
||||
showExcerpt :: Flag Colourize -> Span -> Blob -> ShowS
|
||||
showExcerpt colourize Span{..} Blob{..}
|
||||
|
@ -45,6 +45,7 @@ import GHC.Generics (V1)
|
||||
import Prelude hiding (span)
|
||||
import qualified Source.Loc as Loc
|
||||
import Source.Span
|
||||
import qualified System.Path as Path
|
||||
|
||||
-- | A vertex of representing some node in a control flow graph.
|
||||
data ControlFlowVertex
|
||||
@ -60,19 +61,19 @@ packageVertex :: PackageInfo -> ControlFlowVertex
|
||||
packageVertex (PackageInfo name _) = Package (formatName name)
|
||||
|
||||
moduleVertex :: ModuleInfo -> ControlFlowVertex
|
||||
moduleVertex = Module . T.pack . modulePath
|
||||
moduleVertex = Module . T.pack . Path.toString . modulePath
|
||||
|
||||
unknownModuleVertex :: ModuleInfo -> ControlFlowVertex
|
||||
unknownModuleVertex = UnknownModule . T.pack . modulePath
|
||||
unknownModuleVertex = UnknownModule . T.pack . Path.toString . modulePath
|
||||
|
||||
variableVertex :: Text -> ModuleInfo -> Span -> ControlFlowVertex
|
||||
variableVertex name ModuleInfo{..} = Variable name (T.pack modulePath)
|
||||
variableVertex name ModuleInfo{..} = Variable name (T.pack $ Path.toString modulePath)
|
||||
|
||||
methodVertex :: Text -> ModuleInfo -> Span -> ControlFlowVertex
|
||||
methodVertex name ModuleInfo{..} = Method name (T.pack modulePath)
|
||||
methodVertex name ModuleInfo{..} = Method name (T.pack $ Path.toString modulePath)
|
||||
|
||||
functionVertex :: Text -> ModuleInfo -> Span -> ControlFlowVertex
|
||||
functionVertex name ModuleInfo{..} = Function name (T.pack modulePath)
|
||||
functionVertex name ModuleInfo{..} = Function name (T.pack $ Path.toString modulePath)
|
||||
|
||||
vertexIdentifier :: ControlFlowVertex -> Text
|
||||
vertexIdentifier v = case v of
|
||||
|
@ -5,26 +5,9 @@ module Data.Language
|
||||
, defaultLanguageModes
|
||||
, preciseLanguageModes
|
||||
, aLaCarteLanguageModes
|
||||
, codeNavLanguages
|
||||
, supportedExts
|
||||
) where
|
||||
|
||||
import qualified Data.Languages as Lingo
|
||||
import qualified Data.Map.Strict as Map
|
||||
import qualified Data.Text as T
|
||||
import Source.Language
|
||||
|
||||
codeNavLanguages :: [Language]
|
||||
codeNavLanguages = [Go, Java, Ruby, Python, JavaScript, TypeScript, PHP]
|
||||
|
||||
supportedExts :: [String]
|
||||
supportedExts = foldr append mempty supportedLanguages
|
||||
where
|
||||
append (Just l) b = fmap T.unpack (Lingo.languageExtensions l) <> b
|
||||
append Nothing b = b
|
||||
supportedLanguages = fmap lookup (languageToText <$> codeNavLanguages)
|
||||
lookup k = Map.lookup k Lingo.languages
|
||||
|
||||
import Source.Language
|
||||
|
||||
data PerLanguageModes = PerLanguageModes
|
||||
{ pythonMode :: LanguageMode
|
||||
|
@ -77,16 +77,16 @@ makeTerm1' syntax = case toList syntax of
|
||||
_ -> error "makeTerm1': empty structure"
|
||||
|
||||
-- | Construct an empty term at the current position.
|
||||
emptyTerm :: (Empty :< syntaxes, Sum syntaxes ~ Syntax term, Apply Foldable syntaxes, IsTerm term) => Assignment.Assignment ast grammar (term Loc)
|
||||
emptyTerm :: (Empty :< syntaxes, Sum syntaxes ~ Syntax term, Apply Foldable syntaxes, IsTerm term) => Assignment.Assignment grammar (term Loc)
|
||||
emptyTerm = makeTerm . startLocation <$> Assignment.location <*> pure Empty
|
||||
where startLocation Loc{..} = Loc (Range.point (Range.start byteRange)) (Span.point (Span.start span))
|
||||
|
||||
-- | Catch assignment errors into an error term.
|
||||
handleError :: (HasCallStack, Error :< syntaxes, Sum syntaxes ~ Syntax term, Enum grammar, Eq1 ast, Ix grammar, Show grammar, Apply Foldable syntaxes, IsTerm term) => Assignment.Assignment ast grammar (term Loc) -> Assignment.Assignment ast grammar (term Loc)
|
||||
handleError :: (HasCallStack, Error :< syntaxes, Sum syntaxes ~ Syntax term, Enum grammar, Ix grammar, Show grammar, Apply Foldable syntaxes, IsTerm term) => Assignment.Assignment grammar (term Loc) -> Assignment.Assignment grammar (term Loc)
|
||||
handleError = flip Assignment.catchError (\ err -> makeTerm <$> Assignment.location <*> pure (errorSyntax (either id show <$> err) []) <* Assignment.source)
|
||||
|
||||
-- | Catch parse errors into an error term.
|
||||
parseError :: (Error :< syntaxes, Sum syntaxes ~ Syntax term, Bounded grammar, Enum grammar, Apply Foldable syntaxes, IsTerm term) => Assignment.Assignment ast grammar (term Loc)
|
||||
parseError :: (Error :< syntaxes, Sum syntaxes ~ Syntax term, Bounded grammar, Enum grammar, Apply Foldable syntaxes, IsTerm term) => Assignment.Assignment grammar (term Loc)
|
||||
parseError = makeTerm <$> Assignment.token maxBound <*> pure (Error (ErrorStack $ errorSite <$> getCallStack (freezeCallStack callStack)) [] (Just "ParseError") [])
|
||||
|
||||
-- | Match context terms before a subject term, wrapping both up in a Context term if any context terms matched, or otherwise returning the subject term.
|
||||
|
@ -18,7 +18,6 @@ import Data.Functor.Classes.Generic
|
||||
import Data.Hashable.Lifted
|
||||
import qualified Data.Map.Strict as Map
|
||||
import Data.Maybe.Exts
|
||||
import Data.Semilattice.Lower
|
||||
import qualified Data.Set as Set
|
||||
import Data.Traversable
|
||||
import GHC.Generics (Generic1)
|
||||
@ -106,7 +105,7 @@ instance Evaluatable Method where
|
||||
|
||||
params <- withScope associatedScope $ do
|
||||
-- TODO: Should we give `self` a special Relation?
|
||||
declare (Declaration __self) ScopeGraph.Prelude ScopeGraph.Public lowerBound ScopeGraph.Unknown Nothing
|
||||
declare (Declaration __self) ScopeGraph.Prelude ScopeGraph.Public (point (Pos 1 1)) ScopeGraph.Unknown Nothing
|
||||
for methodParameters $ \paramNode -> declareMaybeName (declaredName paramNode) Default ScopeGraph.Public (paramNode^.span_) ScopeGraph.Parameter Nothing
|
||||
|
||||
addr <- lookupSlot (Declaration name)
|
||||
|
@ -15,6 +15,7 @@ import qualified Data.Text as T
|
||||
import Diffing.Algorithm
|
||||
import GHC.Generics (Generic1)
|
||||
import Source.Span
|
||||
import qualified System.Path as Path
|
||||
|
||||
-- A file directive like the Ruby constant `__FILE__`.
|
||||
data File a = File
|
||||
@ -25,7 +26,7 @@ instance Ord1 File where liftCompare = genericLiftCompare
|
||||
instance Show1 File where liftShowsPrec = genericLiftShowsPrec
|
||||
|
||||
instance Evaluatable File where
|
||||
eval _ _ File = currentModule >>= string . T.pack . modulePath
|
||||
eval _ _ File = currentModule >>= string . T.pack . Path.toString . modulePath
|
||||
|
||||
|
||||
-- A line directive like the Ruby constant `__LINE__`.
|
||||
|
@ -16,6 +16,7 @@ import qualified Assigning.Assignment as Assignment
|
||||
import Control.Monad
|
||||
import qualified Data.Abstract.ScopeGraph as ScopeGraph (AccessControl (..))
|
||||
import Data.ImportPath ()
|
||||
import Data.ImportPath (defaultAlias, importPath)
|
||||
import Data.List.NonEmpty (NonEmpty (..), some1)
|
||||
import Data.Sum
|
||||
import Data.Syntax
|
||||
@ -28,13 +29,12 @@ import qualified Data.Syntax.Literal as Literal
|
||||
import qualified Data.Syntax.Statement as Statement
|
||||
import qualified Data.Syntax.Type as Type
|
||||
import qualified Data.Term as Term
|
||||
import Language.Go.Grammar as Grammar
|
||||
import Language.Go.Syntax as Go.Syntax hiding (labelName, runeLiteral)
|
||||
import Language.Go.Term as Go
|
||||
import Language.Go.Type as Go.Type
|
||||
import Data.ImportPath (importPath, defaultAlias)
|
||||
import Language.Go.Grammar as Grammar
|
||||
|
||||
type Assignment = Assignment.Assignment [] Grammar
|
||||
type Assignment = Assignment.Assignment Grammar
|
||||
|
||||
-- | Assignment from AST in Go's grammar onto a program in Go's syntax.
|
||||
assignment :: Assignment (Term Loc)
|
||||
|
@ -11,9 +11,9 @@ import Data.Abstract.BaseError
|
||||
import Data.Abstract.Evaluatable
|
||||
import Data.Abstract.Module
|
||||
import qualified Data.Abstract.Package as Package
|
||||
import Data.Abstract.Path
|
||||
import qualified Data.Abstract.ScopeGraph as ScopeGraph
|
||||
import Data.Foldable
|
||||
import Data.Abstract.Path
|
||||
import Data.Functor.Classes
|
||||
import Data.Functor.Classes.Generic
|
||||
import Data.Hashable.Lifted
|
||||
@ -27,8 +27,10 @@ import Data.Text (Text)
|
||||
import qualified Data.Text as T
|
||||
import Diffing.Algorithm
|
||||
import GHC.Generics (Generic1)
|
||||
import qualified System.Path as Path
|
||||
import System.FilePath.Posix
|
||||
|
||||
|
||||
resolveGoImport :: ( Has (Modules address value) sig m
|
||||
, Has (Reader ModuleInfo) sig m
|
||||
, Has (Reader Package.PackageInfo) sig m
|
||||
@ -41,7 +43,7 @@ resolveGoImport :: ( Has (Modules address value) sig m
|
||||
resolveGoImport (ImportPath path Data.ImportPath.Unknown) = throwResolutionError $ GoImportError path
|
||||
resolveGoImport (ImportPath path Relative) = do
|
||||
ModuleInfo{..} <- currentModule
|
||||
paths <- listModulesInDir (joinPaths (takeDirectory modulePath) path)
|
||||
paths <- listModulesInDir $ (joinPaths (takeDirectory . Path.toString $ modulePath) path)
|
||||
case paths of
|
||||
[] -> throwResolutionError $ GoImportError path
|
||||
_ -> pure paths
|
||||
|
@ -1,147 +0,0 @@
|
||||
{-# LANGUAGE DataKinds #-}
|
||||
{-# LANGUAGE FlexibleContexts #-}
|
||||
{-# LANGUAGE RankNTypes #-}
|
||||
{-# LANGUAGE RecordWildCards #-}
|
||||
{-# LANGUAGE TypeFamilies #-}
|
||||
{-# LANGUAGE TypeOperators #-}
|
||||
module Language.Markdown.Assignment
|
||||
( assignment
|
||||
, Markdown.Syntax
|
||||
, Grammar
|
||||
, Markdown.Term(..)
|
||||
) where
|
||||
|
||||
import Assigning.Assignment hiding (Assignment, Error)
|
||||
import qualified Assigning.Assignment as Assignment
|
||||
import qualified CMarkGFM
|
||||
import Control.Monad
|
||||
import Data.Sum
|
||||
import Data.Syntax (makeTerm)
|
||||
import qualified Data.Syntax as Syntax
|
||||
import qualified Data.Term as Term
|
||||
import qualified Data.Text as Text
|
||||
import qualified Language.Markdown.Syntax as Markup
|
||||
import Language.Markdown.Term as Markdown
|
||||
import Parsing.CMark as Grammar (Grammar (..))
|
||||
|
||||
type Assignment = Assignment.Assignment (Term.TermF [] CMarkGFM.NodeType) Grammar
|
||||
|
||||
assignment :: Assignment (Term Loc)
|
||||
assignment = Syntax.handleError $ makeTerm <$> symbol Document <*> children (Markup.Document <$> many blockElement)
|
||||
|
||||
|
||||
-- Block elements
|
||||
|
||||
blockElement :: Assignment (Term Loc)
|
||||
blockElement = choice
|
||||
[ paragraph
|
||||
, list
|
||||
, blockQuote
|
||||
, codeBlock
|
||||
, thematicBreak
|
||||
, htmlBlock
|
||||
, heading
|
||||
, table
|
||||
]
|
||||
|
||||
paragraph :: Assignment (Term Loc)
|
||||
paragraph = makeTerm <$> symbol Paragraph <*> children (Markup.Paragraph <$> many inlineElement)
|
||||
|
||||
list :: Assignment (Term Loc)
|
||||
list = Term.termIn <$> symbol List <*> (makeList . Term.termFAnnotation . Term.termFOut <$> currentNode <*> children (many item))
|
||||
where
|
||||
makeList (CMarkGFM.LIST CMarkGFM.ListAttributes{..}) = case listType of
|
||||
CMarkGFM.BULLET_LIST -> inject . Markup.UnorderedList
|
||||
CMarkGFM.ORDERED_LIST -> inject . Markup.OrderedList
|
||||
makeList _ = inject . Markup.UnorderedList
|
||||
|
||||
item :: Assignment (Term Loc)
|
||||
item = makeTerm <$> symbol Item <*> children (many blockElement)
|
||||
|
||||
heading :: Assignment (Term Loc)
|
||||
heading = makeTerm <$> symbol Heading <*> (makeHeading . Term.termFAnnotation . Term.termFOut <$> currentNode <*> children (many inlineElement) <*> manyTill blockElement (void (symbol Heading) <|> eof))
|
||||
where
|
||||
makeHeading (CMarkGFM.HEADING level) = Markup.Heading level
|
||||
makeHeading _ = Markup.Heading 0
|
||||
|
||||
blockQuote :: Assignment (Term Loc)
|
||||
blockQuote = makeTerm <$> symbol BlockQuote <*> children (Markup.BlockQuote <$> many blockElement)
|
||||
|
||||
codeBlock :: Assignment (Term Loc)
|
||||
codeBlock = makeTerm <$> symbol CodeBlock <*> (makeCode . Term.termFAnnotation . Term.termFOut <$> currentNode <*> source)
|
||||
where
|
||||
makeCode (CMarkGFM.CODE_BLOCK language _) = Markup.Code (nullText language)
|
||||
makeCode _ = Markup.Code Nothing
|
||||
|
||||
thematicBreak :: Assignment (Term Loc)
|
||||
thematicBreak = makeTerm <$> token ThematicBreak <*> pure Markup.ThematicBreak
|
||||
|
||||
htmlBlock :: Assignment (Term Loc)
|
||||
htmlBlock = makeTerm <$> symbol HTMLBlock <*> (Markup.HTMLBlock <$> source)
|
||||
|
||||
table :: Assignment (Term Loc)
|
||||
table = makeTerm <$> symbol Table <*> children (Markup.Table <$> many tableRow)
|
||||
|
||||
tableRow :: Assignment (Term Loc)
|
||||
tableRow = makeTerm <$> symbol TableRow <*> children (Markup.TableRow <$> many tableCell)
|
||||
|
||||
tableCell :: Assignment (Term Loc)
|
||||
tableCell = makeTerm <$> symbol TableCell <*> children (Markup.TableCell <$> many inlineElement)
|
||||
|
||||
-- Inline elements
|
||||
|
||||
inlineElement :: Assignment (Term Loc)
|
||||
inlineElement = choice
|
||||
[ strong
|
||||
, emphasis
|
||||
, strikethrough
|
||||
, text
|
||||
, link
|
||||
, htmlInline
|
||||
, image
|
||||
, code
|
||||
, lineBreak
|
||||
, softBreak
|
||||
]
|
||||
|
||||
strong :: Assignment (Term Loc)
|
||||
strong = makeTerm <$> symbol Strong <*> children (Markup.Strong <$> many inlineElement)
|
||||
|
||||
emphasis :: Assignment (Term Loc)
|
||||
emphasis = makeTerm <$> symbol Emphasis <*> children (Markup.Emphasis <$> many inlineElement)
|
||||
|
||||
strikethrough :: Assignment (Term Loc)
|
||||
strikethrough = makeTerm <$> symbol Strikethrough <*> children (Markup.Strikethrough <$> many inlineElement)
|
||||
|
||||
text :: Assignment (Term Loc)
|
||||
text = makeTerm <$> symbol Text <*> (Markup.Text <$> source)
|
||||
|
||||
htmlInline :: Assignment (Term Loc)
|
||||
htmlInline = makeTerm <$> symbol HTMLInline <*> (Markup.HTMLBlock <$> source)
|
||||
|
||||
link :: Assignment (Term Loc)
|
||||
link = makeTerm <$> symbol Link <*> (makeLink . Term.termFAnnotation . Term.termFOut <$> currentNode) <* advance
|
||||
where
|
||||
makeLink (CMarkGFM.LINK url title) = Markup.Link url (nullText title)
|
||||
makeLink _ = Markup.Link mempty Nothing
|
||||
|
||||
image :: Assignment (Term Loc)
|
||||
image = makeTerm <$> symbol Image <*> (makeImage . Term.termFAnnotation . Term.termFOut <$> currentNode) <* advance
|
||||
where
|
||||
makeImage (CMarkGFM.IMAGE url title) = Markup.Image url (nullText title)
|
||||
makeImage _ = Markup.Image mempty Nothing
|
||||
|
||||
code :: Assignment (Term Loc)
|
||||
code = makeTerm <$> symbol Code <*> (Markup.Code Nothing <$> source)
|
||||
|
||||
lineBreak :: Assignment (Term Loc)
|
||||
lineBreak = makeTerm <$> token LineBreak <*> pure Markup.LineBreak
|
||||
|
||||
softBreak :: Assignment (Term Loc)
|
||||
softBreak = makeTerm <$> token SoftBreak <*> pure Markup.LineBreak
|
||||
|
||||
|
||||
-- Implementation details
|
||||
|
||||
nullText :: Text.Text -> Maybe Text.Text
|
||||
nullText text = if Text.null text then Nothing else Just text
|
@ -1,153 +0,0 @@
|
||||
{-# LANGUAGE DeriveAnyClass #-}
|
||||
{-# LANGUAGE DeriveGeneric #-}
|
||||
{-# LANGUAGE DeriveTraversable #-}
|
||||
{-# LANGUAGE DuplicateRecordFields #-}
|
||||
module Language.Markdown.Syntax (module Language.Markdown.Syntax) where
|
||||
|
||||
import Data.Abstract.Declarations
|
||||
import Data.Functor.Classes
|
||||
import Data.Functor.Classes.Generic
|
||||
import Data.Hashable.Lifted
|
||||
import Data.JSON.Fields
|
||||
import qualified Data.Text as T
|
||||
import Diffing.Algorithm
|
||||
import GHC.Generics (Generic1)
|
||||
|
||||
newtype Document a = Document { values :: [a] }
|
||||
deriving (Declarations1, Foldable, Traversable, Functor, Generic1, Hashable1, Diffable, ToJSONFields1)
|
||||
|
||||
instance Eq1 Document where liftEq = genericLiftEq
|
||||
instance Ord1 Document where liftCompare = genericLiftCompare
|
||||
instance Show1 Document where liftShowsPrec = genericLiftShowsPrec
|
||||
|
||||
|
||||
-- Block elements
|
||||
|
||||
newtype Paragraph a = Paragraph { values :: [a] }
|
||||
deriving (Declarations1, Foldable, Traversable, Functor, Generic1, Hashable1, Diffable, ToJSONFields1)
|
||||
|
||||
instance Eq1 Paragraph where liftEq = genericLiftEq
|
||||
instance Ord1 Paragraph where liftCompare = genericLiftCompare
|
||||
instance Show1 Paragraph where liftShowsPrec = genericLiftShowsPrec
|
||||
|
||||
data Heading a = Heading { headingLevel :: Int, headingContent :: [a], sectionContent :: [a] }
|
||||
deriving (Declarations1, Foldable, Traversable, Functor, Generic1, Hashable1, Diffable, ToJSONFields1)
|
||||
|
||||
instance Eq1 Heading where liftEq = genericLiftEq
|
||||
instance Ord1 Heading where liftCompare = genericLiftCompare
|
||||
instance Show1 Heading where liftShowsPrec = genericLiftShowsPrec
|
||||
|
||||
newtype UnorderedList a = UnorderedList { values :: [a] }
|
||||
deriving (Declarations1, Foldable, Traversable, Functor, Generic1, Hashable1, Diffable, ToJSONFields1)
|
||||
|
||||
instance Eq1 UnorderedList where liftEq = genericLiftEq
|
||||
instance Ord1 UnorderedList where liftCompare = genericLiftCompare
|
||||
instance Show1 UnorderedList where liftShowsPrec = genericLiftShowsPrec
|
||||
|
||||
newtype OrderedList a = OrderedList { values :: [a] }
|
||||
deriving (Declarations1, Foldable, Traversable, Functor, Generic1, Hashable1, Diffable, ToJSONFields1)
|
||||
|
||||
instance Eq1 OrderedList where liftEq = genericLiftEq
|
||||
instance Ord1 OrderedList where liftCompare = genericLiftCompare
|
||||
instance Show1 OrderedList where liftShowsPrec = genericLiftShowsPrec
|
||||
|
||||
newtype BlockQuote a = BlockQuote { values :: [a] }
|
||||
deriving (Declarations1, Foldable, Traversable, Functor, Generic1, Hashable1, Diffable, ToJSONFields1)
|
||||
|
||||
instance Eq1 BlockQuote where liftEq = genericLiftEq
|
||||
instance Ord1 BlockQuote where liftCompare = genericLiftCompare
|
||||
instance Show1 BlockQuote where liftShowsPrec = genericLiftShowsPrec
|
||||
|
||||
data ThematicBreak a = ThematicBreak
|
||||
deriving (Declarations1, Foldable, Traversable, Functor, Generic1, Hashable1, Diffable, ToJSONFields1)
|
||||
|
||||
instance Eq1 ThematicBreak where liftEq = genericLiftEq
|
||||
instance Ord1 ThematicBreak where liftCompare = genericLiftCompare
|
||||
instance Show1 ThematicBreak where liftShowsPrec = genericLiftShowsPrec
|
||||
|
||||
newtype HTMLBlock a = HTMLBlock { value :: T.Text }
|
||||
deriving (Declarations1, Foldable, Traversable, Functor, Generic1, Hashable1, Diffable, ToJSONFields1)
|
||||
|
||||
instance Eq1 HTMLBlock where liftEq = genericLiftEq
|
||||
instance Ord1 HTMLBlock where liftCompare = genericLiftCompare
|
||||
instance Show1 HTMLBlock where liftShowsPrec = genericLiftShowsPrec
|
||||
|
||||
newtype Table a = Table { values :: [a] }
|
||||
deriving (Declarations1, Foldable, Traversable, Functor, Generic1, Hashable1, Diffable, ToJSONFields1)
|
||||
|
||||
instance Eq1 Table where liftEq = genericLiftEq
|
||||
instance Ord1 Table where liftCompare = genericLiftCompare
|
||||
instance Show1 Table where liftShowsPrec = genericLiftShowsPrec
|
||||
|
||||
newtype TableRow a = TableRow { values :: [a] }
|
||||
deriving (Declarations1, Foldable, Traversable, Functor, Generic1, Hashable1, Diffable, ToJSONFields1)
|
||||
|
||||
instance Eq1 TableRow where liftEq = genericLiftEq
|
||||
instance Ord1 TableRow where liftCompare = genericLiftCompare
|
||||
instance Show1 TableRow where liftShowsPrec = genericLiftShowsPrec
|
||||
|
||||
newtype TableCell a = TableCell { values :: [a] }
|
||||
deriving (Declarations1, Foldable, Traversable, Functor, Generic1, Hashable1, Diffable, ToJSONFields1)
|
||||
|
||||
instance Eq1 TableCell where liftEq = genericLiftEq
|
||||
instance Ord1 TableCell where liftCompare = genericLiftCompare
|
||||
instance Show1 TableCell where liftShowsPrec = genericLiftShowsPrec
|
||||
|
||||
|
||||
-- Inline elements
|
||||
|
||||
newtype Strong a = Strong { values :: [a] }
|
||||
deriving (Declarations1, Foldable, Traversable, Functor, Generic1, Hashable1, Diffable, ToJSONFields1)
|
||||
|
||||
instance Eq1 Strong where liftEq = genericLiftEq
|
||||
instance Ord1 Strong where liftCompare = genericLiftCompare
|
||||
instance Show1 Strong where liftShowsPrec = genericLiftShowsPrec
|
||||
|
||||
newtype Emphasis a = Emphasis { values :: [a] }
|
||||
deriving (Declarations1, Foldable, Traversable, Functor, Generic1, Hashable1, Diffable, ToJSONFields1)
|
||||
|
||||
instance Eq1 Emphasis where liftEq = genericLiftEq
|
||||
instance Ord1 Emphasis where liftCompare = genericLiftCompare
|
||||
instance Show1 Emphasis where liftShowsPrec = genericLiftShowsPrec
|
||||
|
||||
newtype Text a = Text { value :: T.Text}
|
||||
deriving (Declarations1, Foldable, Traversable, Functor, Generic1, Hashable1, Diffable, ToJSONFields1)
|
||||
|
||||
instance Eq1 Text where liftEq = genericLiftEq
|
||||
instance Ord1 Text where liftCompare = genericLiftCompare
|
||||
instance Show1 Text where liftShowsPrec = genericLiftShowsPrec
|
||||
|
||||
data Link a = Link { linkURL :: T.Text, linkTitle :: Maybe T.Text }
|
||||
deriving (Declarations1, Foldable, Traversable, Functor, Generic1, Hashable1, Diffable, ToJSONFields1)
|
||||
|
||||
instance Eq1 Link where liftEq = genericLiftEq
|
||||
instance Ord1 Link where liftCompare = genericLiftCompare
|
||||
instance Show1 Link where liftShowsPrec = genericLiftShowsPrec
|
||||
|
||||
data Image a = Image { imageURL :: T.Text, imageTitle :: Maybe T.Text }
|
||||
deriving (Declarations1, Foldable, Traversable, Functor, Generic1, Hashable1, Diffable, ToJSONFields1)
|
||||
|
||||
instance Eq1 Image where liftEq = genericLiftEq
|
||||
instance Ord1 Image where liftCompare = genericLiftCompare
|
||||
instance Show1 Image where liftShowsPrec = genericLiftShowsPrec
|
||||
|
||||
data Code a = Code { codeLanguage :: Maybe T.Text, codeContent :: T.Text }
|
||||
deriving (Declarations1, Foldable, Traversable, Functor, Generic1, Hashable1, Diffable, ToJSONFields1)
|
||||
|
||||
instance Eq1 Code where liftEq = genericLiftEq
|
||||
instance Ord1 Code where liftCompare = genericLiftCompare
|
||||
instance Show1 Code where liftShowsPrec = genericLiftShowsPrec
|
||||
|
||||
data LineBreak a = LineBreak
|
||||
deriving (Declarations1, Foldable, Traversable, Functor, Generic1, Hashable1, Diffable, ToJSONFields1)
|
||||
|
||||
instance Eq1 LineBreak where liftEq = genericLiftEq
|
||||
instance Ord1 LineBreak where liftCompare = genericLiftCompare
|
||||
instance Show1 LineBreak where liftShowsPrec = genericLiftShowsPrec
|
||||
|
||||
newtype Strikethrough a = Strikethrough { values :: [a] }
|
||||
deriving (Declarations1, Foldable, Traversable, Functor, Generic1, Hashable1, Diffable, ToJSONFields1)
|
||||
|
||||
instance Eq1 Strikethrough where liftEq = genericLiftEq
|
||||
instance Ord1 Strikethrough where liftCompare = genericLiftCompare
|
||||
instance Show1 Strikethrough where liftShowsPrec = genericLiftShowsPrec
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue
Block a user