Renamed "semantic analyzer" to "binder".

Moved import detection from post-parse walker into parser.
This commit is contained in:
Eric Traut 2019-09-12 18:02:12 -07:00
parent 8bd000d65d
commit a5db38d69e
13 changed files with 127 additions and 134 deletions

View File

@ -33,7 +33,7 @@ The [parser](https://github.com/Microsoft/pyright/blob/master/server/src/parser/
The [postParseWalker](https://github.com/Microsoft/pyright/blob/master/server/src/analyzer/postParseWalker.ts) adds parent links to parse tree nodes and builds name bindings for names that appear within modules, classes and functions. It also detects and reports inconsistent name bindings (e.g. a variable that uses both a global and nonlocal binding in the same scope). It is also responsible for creating a list of all imports, allowing the program object to resolve these imports (using the importResolver) and add the imported source files to the program.
The [semanticAnalyzer](https://github.com/Microsoft/pyright/blob/master/server/src/analyzer/semanticAnalyzer.ts) is responsible for performing basic semantic analysis. It does not perform any type checking, but it detects and reports other semantic errors that will result in unintended runtime exceptions. It also constructs information needed by the next phase of analysis.
The [binder](https://github.com/Microsoft/pyright/blob/master/server/src/analyzer/binder.ts) is responsible for building scopes and associated symbol tables and populating those symbol tables. It does not perform any type checking, but it detects and reports other semantic errors that will result in unintended runtime exceptions.
The [typeAnalyzer](https://github.com/Microsoft/pyright/blob/master/server/src/analyzer/typeAnalyzer.ts) is responsible for interpreting type annotations, performing type inference, and reporting type inconsistencies. Unlike all previous passes, the typeAnalyzer pass runs multiple times — at least twice per file. This is necessary because type annotations can contain forward references within a file and because Python supports circular import dependencies across files. The typeAnalyzer therefore runs multiple times until all type information “converges”, and no new information is discovered.

View File

@ -34,7 +34,7 @@ export class AnalyzerNodeInfo {
_importInfo?: ImportResult;
//---------------------------------------------------------------
// Set by SemanticAnalyzer
// Set by Binder
// Scope for nodes that introduce scopes: modules, functions,
// classes, and lambdas. A scope is used to store symbol names

View File

@ -1,20 +1,19 @@
/*
* semanticAnalyzer.ts
* binder.ts
* Copyright (c) Microsoft Corporation.
* Licensed under the MIT license.
* Author: Eric Traut
*
* A parse tree walker that performs general semantic analysis. It does
* this at the scope level. A scope in Python is defined by a module,
* class, function or lambda.
* The analyzer walks the parse tree by scopes starting at the module
* A parse tree walker that performs basic name binding (creation of
* scopes and associated symbol tables).
* The binder walks the parse tree by scopes starting at the module
* level. When a new scope is detected, it is pushed onto a list and
* analyzed separately at a later time. (The exception is a class scope,
* which is immediately analyzed.) Walking the tree in this manner
* walked separately at a later time. (The exception is a class scope,
* which is immediately walked.) Walking the tree in this manner
* simulates the order in which execution normally occurs in a Python
* file. The analyzer attempts to statically detect runtime errors that
* file. The binder attempts to statically detect runtime errors that
* would be reported by the python interpreter when executing the code.
* This analyzer doesn't perform any static type checking.
* This binder doesn't perform any static type checking.
*/
import * as assert from 'assert';
@ -46,12 +45,12 @@ import { AnyType, ClassType, ClassTypeFlags, FunctionParameter, FunctionType,
type ScopedNode = ModuleNode | ClassNode | FunctionNode | LambdaNode;
export abstract class SemanticAnalyzer extends ParseTreeWalker {
export abstract class Binder extends ParseTreeWalker {
protected readonly _scopedNode: ScopedNode;
protected readonly _fileInfo: AnalyzerFileInfo;
// A queue of scoped nodes that need to be analyzed.
protected _subscopesToAnalyze: SemanticAnalyzer[] = [];
protected _subscopesToAnalyze: Binder[] = [];
// The current scope in effect. This is either the base scope or a
// "temporary scope", used for analyzing conditional code blocks. Their
@ -125,8 +124,8 @@ export abstract class SemanticAnalyzer extends ParseTreeWalker {
// We separate analysis into two passes. The first happens immediately when
// the scope analyzer is created. The second happens after its parent scope
// has been fully analyzed.
abstract analyzeImmediate(): void;
abstract analyzeDeferred(): void;
abstract bindImmediate(): void;
abstract bindDeferred(): void;
visitModule(node: ModuleNode): boolean {
// Tree walking should start with the children of
@ -223,10 +222,10 @@ export abstract class SemanticAnalyzer extends ParseTreeWalker {
AnalyzerNodeInfo.setExpressionType(node, classType);
const analyzer = new ClassScopeAnalyzer(node, this._currentScope, classType, this._fileInfo);
this._queueSubScopeAnalyzer(analyzer);
const binder = new ClassScopeBinder(node, this._currentScope, classType, this._fileInfo);
this._queueSubScopeAnalyzer(binder);
// Add the class symbol. We do this in the semantic analyzer to speed
// Add the class symbol. We do this in the binder to speed
// up overall analysis times. Without this, the type analyzer needs
// to do more passes to resolve classes.
this._addSymbolToPermanentScope(node.name.nameToken.value, classType,
@ -300,8 +299,8 @@ export abstract class SemanticAnalyzer extends ParseTreeWalker {
const functionOrModuleScope = AnalyzerNodeInfo.getScope(functionOrModuleNode!);
assert(functionOrModuleScope !== undefined);
const analyzer = new FunctionScopeAnalyzer(node, functionOrModuleScope!, this._fileInfo);
this._queueSubScopeAnalyzer(analyzer);
const binder = new FunctionScopeBinder(node, functionOrModuleScope!, this._fileInfo);
this._queueSubScopeAnalyzer(binder);
return false;
}
@ -314,8 +313,8 @@ export abstract class SemanticAnalyzer extends ParseTreeWalker {
}
});
const analyzer = new LambdaScopeAnalyzer(node, this._currentScope, this._fileInfo);
this._queueSubScopeAnalyzer(analyzer);
const binder = new LambdaScopeBinder(node, this._currentScope, this._fileInfo);
this._queueSubScopeAnalyzer(binder);
return false;
}
@ -475,7 +474,7 @@ export abstract class SemanticAnalyzer extends ParseTreeWalker {
// Analyzes the subscopes that are discovered during the first analysis pass.
protected _analyzeSubscopesDeferred() {
for (const subscope of this._subscopesToAnalyze) {
subscope.analyzeDeferred();
subscope.bindDeferred();
}
this._subscopesToAnalyze = [];
@ -569,9 +568,9 @@ export abstract class SemanticAnalyzer extends ParseTreeWalker {
return false;
}
private _queueSubScopeAnalyzer(analyzer: SemanticAnalyzer) {
analyzer.analyzeImmediate();
this._subscopesToAnalyze.push(analyzer);
private _queueSubScopeAnalyzer(binder: Binder) {
binder.bindImmediate();
this._subscopesToAnalyze.push(binder);
}
private _addDiagnostic(diagLevel: DiagnosticLevel, rule: string, message: string, textRange: TextRange) {
@ -596,18 +595,18 @@ export abstract class SemanticAnalyzer extends ParseTreeWalker {
}
}
export class ModuleScopeAnalyzer extends SemanticAnalyzer {
export class ModuleScopeBinder extends Binder {
constructor(node: ModuleNode, fileInfo: AnalyzerFileInfo) {
super(node, fileInfo.builtinsScope ? ScopeType.Module : ScopeType.BuiltIn,
fileInfo.builtinsScope, fileInfo);
}
analyze() {
this.analyzeImmediate();
this.analyzeDeferred();
bind() {
this.bindImmediate();
this.bindDeferred();
}
analyzeImmediate() {
bindImmediate() {
this._bindImplicitNames();
const nameBindings = AnalyzerNodeInfo.getNameBindings(this._scopedNode);
assert(nameBindings !== undefined);
@ -622,7 +621,7 @@ export class ModuleScopeAnalyzer extends SemanticAnalyzer {
AnalyzerNodeInfo.setExpressionType(this._scopedNode, moduleType);
}
analyzeDeferred() {
bindDeferred() {
// Analyze any sub-scopes that were discovered during the earlier pass.
this._analyzeSubscopesDeferred();
}
@ -641,7 +640,7 @@ export class ModuleScopeAnalyzer extends SemanticAnalyzer {
}
}
export class ClassScopeAnalyzer extends SemanticAnalyzer {
export class ClassScopeBinder extends Binder {
private _classType: ClassType;
constructor(node: ClassNode, parentScope: Scope, classType: ClassType, fileInfo: AnalyzerFileInfo) {
@ -649,7 +648,7 @@ export class ClassScopeAnalyzer extends SemanticAnalyzer {
this._classType = classType;
}
analyzeImmediate() {
bindImmediate() {
this._bindImplicitNames();
const nameBindings = AnalyzerNodeInfo.getNameBindings(this._scopedNode);
assert(nameBindings !== undefined);
@ -664,7 +663,7 @@ export class ClassScopeAnalyzer extends SemanticAnalyzer {
this._classType.setClassFields(this._currentScope.getSymbolTable());
}
analyzeDeferred() {
bindDeferred() {
// Analyze any sub-scopes that were discovered during the earlier pass.
this._analyzeSubscopesDeferred();
}
@ -682,18 +681,18 @@ export class ClassScopeAnalyzer extends SemanticAnalyzer {
}
}
export class FunctionScopeAnalyzer extends SemanticAnalyzer {
export class FunctionScopeBinder extends Binder {
constructor(node: FunctionNode, parentScope: Scope, fileInfo: AnalyzerFileInfo) {
super(node, ScopeType.Function, parentScope, fileInfo);
}
analyzeImmediate() {
bindImmediate() {
this._bindImplicitNames();
// Functions don't get analyzed immediately. They are analyzed in a deferred manner.
}
analyzeDeferred() {
bindDeferred() {
const functionNode = this._scopedNode as FunctionNode;
// Add the names for this scope. They are initially unbound. We
@ -728,16 +727,16 @@ export class FunctionScopeAnalyzer extends SemanticAnalyzer {
}
}
export class LambdaScopeAnalyzer extends SemanticAnalyzer {
export class LambdaScopeBinder extends Binder {
constructor(node: LambdaNode, parentScope: Scope, fileInfo: AnalyzerFileInfo) {
super(node, ScopeType.Function, parentScope, fileInfo);
}
analyzeImmediate() {
bindImmediate() {
// Lambdas don't get analyzed immediately. They are analyzed in a deferred manner.
}
analyzeDeferred() {
bindDeferred() {
const lambdaNode = this._scopedNode as LambdaNode;
// Add the names for this scope. They are initially unbound. We

View File

@ -18,26 +18,15 @@ import { TextRangeDiagnosticSink } from '../common/diagnosticSink';
import { NameBindings, NameBindingType } from '../parser/nameBindings';
import { AssignmentNode, AugmentedAssignmentExpressionNode, ClassNode, DelNode,
ExpressionNode, ForNode, FunctionNode, GlobalNode, ImportAsNode,
ImportFromAsNode, ImportFromNode, LambdaNode, ModuleNameNode, ModuleNode,
NonlocalNode, ParseNode, ParseNodeArray, ParseNodeType, TypeAnnotationExpressionNode,
ImportFromAsNode, LambdaNode, ModuleNode, NonlocalNode, ParseNode,
ParseNodeArray, ParseNodeType, TypeAnnotationExpressionNode,
WithNode } from '../parser/parseNodes';
import { AnalyzerNodeInfo } from './analyzerNodeInfo';
import { ParseTreeWalker } from './parseTreeWalker';
export interface ModuleImport {
nameNode: ModuleNameNode;
leadingDots: number;
nameParts: string[];
// Used for "from X import Y" pattern. An empty
// array implies "from X import *".
importedSymbols: string[] | undefined;
}
export class PostParseWalker extends ParseTreeWalker {
private _parseTree: ModuleNode;
private _diagnosticSink: TextRangeDiagnosticSink;
private _importedModules: ModuleImport[] = [];
private _currentNameBindings: NameBindings;
private _currentBindingType: NameBindingType;
@ -58,10 +47,6 @@ export class PostParseWalker extends ParseTreeWalker {
this.walk(this._parseTree);
}
getImportedModules(): ModuleImport[] {
return this._importedModules;
}
visitNode(node: ParseNode) {
const children = super.visitNode(node);
@ -77,24 +62,6 @@ export class PostParseWalker extends ParseTreeWalker {
this._addName(node.module.nameParts[0].nameToken.value);
}
this._importedModules.push({
nameNode: node.module,
leadingDots: node.module.leadingDots,
nameParts: node.module.nameParts.map(p => p.nameToken.value),
importedSymbols: undefined
});
return true;
}
visitImportFrom(node: ImportFromNode): boolean {
this._importedModules.push({
nameNode: node.module,
leadingDots: node.module.leadingDots,
nameParts: node.module.nameParts.map(p => p.nameToken.value),
importedSymbols: node.imports.map(imp => imp.name.nameToken.value)
});
return true;
}

View File

@ -139,7 +139,7 @@ export class Program {
this._sourceFileList.forEach(fileInfo => {
if (fileInfo.sourceFile.isParseRequired() ||
fileInfo.sourceFile.isSemanticAnalysisRequired() ||
fileInfo.sourceFile.isBindingRequired() ||
fileInfo.sourceFile.isTypeAnalysisRequired()) {
sourceFileCount++;
}
@ -291,9 +291,9 @@ export class Program {
}
}
// Now do semantic analysis of the open files.
// Now do binding of the open files.
for (const sourceFileInfo of openFiles) {
this._doSemanticAnalysis(sourceFileInfo, options, importResolver);
this._bindFile(sourceFileInfo, options, importResolver);
if (isTimeElapsedOpenFiles()) {
return true;
@ -490,21 +490,21 @@ export class Program {
}
}
private _doSemanticAnalysis(fileToAnalyze: SourceFileInfo,
private _bindFile(fileToAnalyze: SourceFileInfo,
options: ConfigOptions, importResolver: ImportResolver) {
if (!this._isFileNeeded(fileToAnalyze) || !fileToAnalyze.sourceFile.isSemanticAnalysisRequired()) {
if (!this._isFileNeeded(fileToAnalyze) || !fileToAnalyze.sourceFile.isBindingRequired()) {
return;
}
this._parseFile(fileToAnalyze, options, importResolver);
// We need to parse and semantically analyze the builtins import first.
// We need to parse and bind the builtins import first.
let builtinsScope: Scope | undefined;
if (fileToAnalyze.builtinsImport) {
this._doSemanticAnalysis(fileToAnalyze.builtinsImport, options, importResolver);
this._bindFile(fileToAnalyze.builtinsImport, options, importResolver);
// Get the builtins scope to pass to the semantic analyzer pass.
// Get the builtins scope to pass to the binding pass.
const parseResults = fileToAnalyze.builtinsImport.sourceFile.getParseResults();
if (parseResults) {
builtinsScope = AnalyzerNodeInfo.getScope(parseResults.parseTree);
@ -512,7 +512,7 @@ export class Program {
}
}
fileToAnalyze.sourceFile.doSemanticAnalysis(options, builtinsScope);
fileToAnalyze.sourceFile.bind(options, builtinsScope);
}
private _buildImportMap(sourceFileInfo: SourceFileInfo): ImportMap {
@ -639,8 +639,8 @@ export class Program {
}
// Builds a map of files that includes fileToAnalyze and all of the files
// it imports (recursively) and ensures that all such files have been semantically
// analyzed in preparation for the type analysis phase. If any of these files have
// it imports (recursively) and ensures that all such files have completed
// binding in preparation for the type analysis phase. If any of these files have
// already been finalized (they and their recursive imports have completed the
// type analysis phase), they are not included in the results. Also builds a
// prioritized queue of files to analyze. Returns true if it ran out of time before
@ -670,8 +670,8 @@ export class Program {
return false;
}
// Make sure the file is parsed and semantically analyzed.
this._doSemanticAnalysis(fileToAnalyze, options, importResolver);
// Make sure the file is parsed and bound.
this._bindFile(fileToAnalyze, options, importResolver);
if (timeElapsedCallback()) {
return true;
}

View File

@ -20,9 +20,7 @@ import { FileDiagnostics } from '../common/diagnosticSink';
import { FileEditAction, TextEditAction } from '../common/editAction';
import { combinePaths, FileSpec, forEachAncestorDirectory, getDirectoryPath,
getFileName, getFileSpec, getFileSystemEntries, isDirectory,
isFile,
normalizePath,
stripFileExtension } from '../common/pathUtils';
isFile, normalizePath, stripFileExtension } from '../common/pathUtils';
import { Duration, timingStats } from '../common/timing';
import { HoverResults } from '../languageService/hoverProvider';
import { SignatureHelpResults } from '../languageService/signatureHelpProvider';

View File

@ -32,19 +32,19 @@ import { performQuickAction } from '../languageService/quickActions';
import { ReferencesProvider, ReferencesResult } from '../languageService/referencesProvider';
import { SignatureHelpProvider, SignatureHelpResults } from '../languageService/signatureHelpProvider';
import { ModuleNode } from '../parser/parseNodes';
import { ParseOptions, Parser, ParseResults } from '../parser/parser';
import { ModuleImport, ParseOptions, Parser, ParseResults } from '../parser/parser';
import { Token } from '../parser/tokenizerTypes';
import { TestWalker } from '../tests/testWalker';
import { AnalyzerFileInfo, ImportMap } from './analyzerFileInfo';
import { AnalyzerNodeInfo } from './analyzerNodeInfo';
import { ModuleScopeBinder } from './binder';
import { CircularDependency } from './circularDependency';
import { CommentUtils } from './commentUtils';
import { ImportResolver } from './importResolver';
import { ImportResult } from './importResult';
import { ParseTreeCleanerWalker } from './parseTreeCleaner';
import { ModuleImport, PostParseWalker } from './postParseWalker';
import { PostParseWalker } from './postParseWalker';
import { Scope } from './scope';
import { ModuleScopeAnalyzer } from './semanticAnalyzer';
import { SymbolTable } from './symbol';
import { TypeAnalyzer } from './typeAnalyzer';
import { ModuleType } from './types';
@ -53,10 +53,10 @@ const _maxImportCyclesPerFile = 4;
export const enum AnalysisPhase {
Parse = 0,
SemanticAnalysis = 1,
Bind = 1,
TypeAnalysis = 2,
FirstAnalysisPhase = SemanticAnalysis,
FirstAnalysisPhase = Bind,
LastAnalysisPhase = TypeAnalysis
}
@ -70,7 +70,7 @@ export interface AnalysisJob {
moduleType?: ModuleType;
parseDiagnostics: Diagnostic[];
semanticAnalysisDiagnostics: Diagnostic[];
bindDiagnostics: Diagnostic[];
typeAnalysisLastPassDiagnostics: Diagnostic[];
typeAnalysisFinalDiagnostics: Diagnostic[];
@ -119,11 +119,11 @@ export class SourceFile {
// of analysis.
private _analysisJob: AnalysisJob = {
fileContentsVersion: -1,
nextPhaseToRun: AnalysisPhase.SemanticAnalysis,
nextPhaseToRun: AnalysisPhase.Bind,
parseTreeNeedsCleaning: false,
parseDiagnostics: [],
semanticAnalysisDiagnostics: [],
bindDiagnostics: [],
typeAnalysisLastPassDiagnostics: [],
typeAnalysisFinalDiagnostics: [],
@ -209,7 +209,7 @@ export class SourceFile {
let diagList: Diagnostic[] = [];
diagList = diagList.concat(
this._analysisJob.parseDiagnostics,
this._analysisJob.semanticAnalysisDiagnostics,
this._analysisJob.bindDiagnostics,
this._analysisJob.typeAnalysisFinalDiagnostics);
if (options.diagnosticSettings.reportImportCycles !== 'none' && this._analysisJob.circularDependencies.length > 0) {
@ -300,12 +300,12 @@ export class SourceFile {
markDirty(): void {
this._fileContentsVersion++;
this._analysisJob.isTypeAnalysisFinalized = false;
this._analysisJob.nextPhaseToRun = AnalysisPhase.SemanticAnalysis;
this._analysisJob.nextPhaseToRun = AnalysisPhase.Bind;
}
markReanalysisRequired(): void {
// Keep the parse info, but reset the analysis to the beginning.
this._analysisJob.nextPhaseToRun = AnalysisPhase.SemanticAnalysis;
this._analysisJob.nextPhaseToRun = AnalysisPhase.Bind;
this._analysisJob.parseTreeNeedsCleaning = true;
this._analysisJob.isTypeAnalysisFinalized = false;
}
@ -338,16 +338,16 @@ export class SourceFile {
return this._analysisJob.fileContentsVersion !== this._fileContentsVersion;
}
isSemanticAnalysisRequired() {
isBindingRequired() {
if (this.isParseRequired()) {
return true;
}
return this._analysisJob.nextPhaseToRun <= AnalysisPhase.SemanticAnalysis;
return this._analysisJob.nextPhaseToRun <= AnalysisPhase.Bind;
}
isTypeAnalysisRequired() {
if (this.isSemanticAnalysisRequired()) {
if (this.isBindingRequired()) {
return true;
}
@ -455,7 +455,7 @@ export class SourceFile {
timingStats.resolveImportsTime.timeOperation(() => {
[this._analysisJob.imports, this._analysisJob.builtinsImport, this._analysisJob.typingModulePath] =
this._resolveImports(importResolver, walker.getImportedModules(), execEnvironment);
this._resolveImports(importResolver, parseResults.importedModules, execEnvironment);
});
this._analysisJob.parseDiagnostics = diagSink.diagnostics;
@ -473,8 +473,10 @@ export class SourceFile {
this._console.log(
`An internal error occurred while parsing ${ this.getFilePath() }: ` + message);
// Create dummy parse results.
this._analysisJob.parseResults = {
parseTree: ModuleNode.create({ start: 0, length: 0 }),
importedModules: [],
futureImports: new StringMap<boolean>(),
tokens: new TextRangeCollection<Token>([]),
lines: new TextRangeCollection<TextRange>([]),
@ -490,7 +492,7 @@ export class SourceFile {
}
this._analysisJob.fileContentsVersion = this._fileContentsVersion;
this._analysisJob.nextPhaseToRun = AnalysisPhase.SemanticAnalysis;
this._analysisJob.nextPhaseToRun = AnalysisPhase.Bind;
this._analysisJob.parseTreeNeedsCleaning = false;
this._analysisJob.hitMaxImportDepth = undefined;
this._diagnosticVersion++;
@ -619,25 +621,25 @@ export class SourceFile {
this._analysisJob.isTypeAnalysisFinalized = false;
}
doSemanticAnalysis(configOptions: ConfigOptions, builtinsScope?: Scope) {
bind(configOptions: ConfigOptions, builtinsScope?: Scope) {
assert(!this.isParseRequired());
assert(this.isSemanticAnalysisRequired());
assert(this.isBindingRequired());
assert(this._analysisJob.parseResults);
assert(this._analysisJob.nextPhaseToRun === AnalysisPhase.SemanticAnalysis);
assert(this._analysisJob.nextPhaseToRun === AnalysisPhase.Bind);
const fileInfo = this._buildFileInfo(configOptions, undefined, builtinsScope);
try {
this._cleanParseTreeIfRequired();
// Perform semantic analysis.
const scopeAnalyzer = new ModuleScopeAnalyzer(
this._analysisJob.parseResults!.parseTree, fileInfo);
timingStats.semanticAnalyzerTime.timeOperation(() => {
scopeAnalyzer.analyze();
// Perform name binding.
timingStats.bindTime.timeOperation(() => {
const binder = new ModuleScopeBinder(
this._analysisJob.parseResults!.parseTree, fileInfo);
binder.bind();
});
this._analysisJob.semanticAnalysisDiagnostics = fileInfo.diagnosticSink.diagnostics;
this._analysisJob.bindDiagnostics = fileInfo.diagnosticSink.diagnostics;
const moduleScope = AnalyzerNodeInfo.getScope(this._analysisJob.parseResults!.parseTree);
assert(moduleScope !== undefined);
this._analysisJob.moduleSymbolTable = moduleScope!.getSymbolTable();
@ -650,12 +652,12 @@ export class SourceFile {
(typeof e.message === 'string' ? e.message : undefined) ||
JSON.stringify(e);
this._console.log(
`An internal error occurred while performing semantic analysis for ${ this.getFilePath() }: ` + message);
`An internal error occurred while performing name binding for ${ this.getFilePath() }: ` + message);
const diagSink = new DiagnosticSink();
diagSink.addError(`An internal error occurred while performing semantic analysis`,
diagSink.addError(`An internal error occurred while performing name binding`,
getEmptyRange());
this._analysisJob.semanticAnalysisDiagnostics = diagSink.diagnostics;
this._analysisJob.bindDiagnostics = diagSink.diagnostics;
}
// Prepare for the next stage of the analysis.
@ -671,7 +673,7 @@ export class SourceFile {
doTypeAnalysis(configOptions: ConfigOptions, importMap: ImportMap) {
assert(!this.isParseRequired());
assert(!this.isSemanticAnalysisRequired());
assert(!this.isBindingRequired());
assert(this.isTypeAnalysisRequired());
assert(this._analysisJob.parseResults);
assert(this._analysisJob.nextPhaseToRun === AnalysisPhase.TypeAnalysis);

View File

@ -5,7 +5,7 @@
* Author: Eric Traut
*
* A parse tree walker that performs static type checking. It assumes
* that the semanticAnalyzer has already run and added information to
* that the binder has already run and added information to
* the parse nodes.
*/
@ -134,7 +134,7 @@ export class TypeAnalyzer extends ParseTreeWalker {
visitClass(node: ClassNode): boolean {
// We should have already resolved most of the base class
// parameters in the semantic analyzer, but if these parameters
// parameters in the binder, but if these parameters
// are variables, they may not have been resolved at that time.
const classType = AnalyzerNodeInfo.getExpressionType(node) as ClassType;
assert(classType instanceof ClassType);

View File

@ -55,7 +55,7 @@ export class TimingStats {
parseFileTime = new TimingStat();
postParseWalkerTime = new TimingStat();
resolveImportsTime = new TimingStat();
semanticAnalyzerTime = new TimingStat();
bindTime = new TimingStat();
typeAnalyzerTime = new TimingStat();
printSummary(console: ConsoleInterface) {
@ -71,7 +71,7 @@ export class TimingStats {
console.log('Parse: ' + this.parseFileTime.printTime());
console.log('Post-parse Walker: ' + this.postParseWalkerTime.printTime());
console.log('Resolve Imports: ' + this.resolveImportsTime.printTime());
console.log('Semantic Analyzer: ' + this.semanticAnalyzerTime.printTime());
console.log('Binder: ' + this.bindTime.printTime());
console.log('Type Analyzer: ' + this.typeAnalyzerTime.printTime());
}
}

View File

@ -60,6 +60,7 @@ export class ParseOptions {
export interface ParseResults {
parseTree: ModuleNode;
importedModules: ModuleImport[];
futureImports: StringMap<boolean>;
tokens: TextRangeCollection<Token>;
lines: TextRangeCollection<TextRange>;
@ -72,6 +73,16 @@ export interface ParseExpressionTextResults {
diagnostics: Diagnostic[];
}
export interface ModuleImport {
nameNode: ModuleNameNode;
leadingDots: number;
nameParts: string[];
// Used for "from X import Y" pattern. An empty
// array implies "from X import *".
importedSymbols: string[] | undefined;
}
export class Parser {
private _fileContents?: string;
private _tokenizerOutput?: TokenizerOutput;
@ -83,6 +94,7 @@ export class Parser {
private _isInFinally = false;
private _isParsingTypeAnnotation = false;
private _futureImportMap = new StringMap<boolean>();
private _importedModules: ModuleImport[] = [];
parseSourceFile(fileContents: string, parseOptions: ParseOptions,
diagSink: DiagnosticSink, cancelToken?: CancelToken): ParseResults {
@ -120,6 +132,7 @@ export class Parser {
return {
parseTree: moduleNode,
importedModules: this._importedModules,
futureImports: this._futureImportMap,
tokens: this._tokenizerOutput!.tokens,
lines: this._tokenizerOutput!.lines,
@ -999,6 +1012,13 @@ export class Parser {
}
}
this._importedModules.push({
nameNode: importFromNode.module,
leadingDots: importFromNode.module.leadingDots,
nameParts: importFromNode.module.nameParts.map(p => p.nameToken.value),
importedSymbols: importFromNode.imports.map(imp => imp.name.nameToken.value)
});
return importFromNode;
}
@ -1026,6 +1046,13 @@ export class Parser {
importNode.list.push(importAsNode);
this._importedModules.push({
nameNode: importAsNode.module,
leadingDots: importAsNode.module.leadingDots,
nameParts: importAsNode.module.nameParts.map(p => p.nameToken.value),
importedSymbols: undefined
});
if (!this._consumeTokenIfType(TokenType.Comma)) {
break;
}

View File

@ -1,10 +1,10 @@
/*
* semanticAnalyzer.test.ts
* binder.test.ts
* Copyright (c) Microsoft Corporation.
* Licensed under the MIT license.
* Author: Eric Traut
*
* Unit tests for pyright semantic analyzer.
* Unit tests for pyright name binder.
*/
import * as assert from 'assert';
@ -12,14 +12,14 @@ import * as assert from 'assert';
import { TestUtils } from './testUtils';
test('TryExcept1', () => {
const analysisResults = TestUtils.semanticallyAnalyzeSampleFile('tryExcept1.py');
const analysisResults = TestUtils.bindSampleFile('tryExcept1.py');
assert.equal(analysisResults.errors.length, 1);
assert.equal(analysisResults.warnings.length, 0);
});
test('FString1', () => {
const analysisResults = TestUtils.semanticallyAnalyzeSampleFile('fstring1.py');
const analysisResults = TestUtils.bindSampleFile('fstring1.py');
assert.equal(analysisResults.errors.length, 2);
assert.equal(analysisResults.warnings.length, 0);

View File

@ -1,4 +1,4 @@
# This sample tests the semantic analyzer's handling of
# This sample tests the name binder's handling of
# try/except/raise statements

View File

@ -12,10 +12,10 @@ import * as fs from 'fs';
import * as path from 'path';
import { AnalyzerFileInfo } from '../analyzer/analyzerFileInfo';
import { ModuleScopeBinder } from '../analyzer/binder';
import { ImportResolver } from '../analyzer/importResolver';
import { PostParseWalker } from '../analyzer/postParseWalker';
import { Program } from '../analyzer/program';
import { ModuleScopeAnalyzer } from '../analyzer/semanticAnalyzer';
import { cloneDiagnosticSettings, ConfigOptions, ExecutionEnvironment } from '../common/configOptions';
import { StandardConsole } from '../common/console';
import { Diagnostic, DiagnosticCategory } from '../common/diagnostic';
@ -109,7 +109,7 @@ export class TestUtils {
return fileInfo;
}
static semanticallyAnalyzeSampleFile(fileName: string,
static bindSampleFile(fileName: string,
configOptions = new ConfigOptions('.')): FileAnalysisResult {
const diagSink = new DiagnosticSink();
@ -118,8 +118,8 @@ export class TestUtils {
const parseResults = this.parseSampleFile(fileName, diagSink, execEnvironment);
const fileInfo = this.buildAnalyzerFileInfo(filePath, parseResults, configOptions);
const scopeAnalyzer = new ModuleScopeAnalyzer(parseResults.parseTree, fileInfo);
scopeAnalyzer.analyze();
const binder = new ModuleScopeBinder(parseResults.parseTree, fileInfo);
binder.bind();
return {
filePath,