Upgrade to Prettier 2.0, reformat (#610)

This commit is contained in:
Jake Bailey 2020-04-07 17:18:22 -07:00 committed by GitHub
parent d4f3f2f96c
commit 80355e6756
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
122 changed files with 1272 additions and 1245 deletions

View File

@ -1,6 +1,15 @@
{
"singleQuote": true,
"tabWidth": 4,
"useTabs": false,
"printWidth": 120
"singleQuote": true,
"tabWidth": 4,
"useTabs": false,
"printWidth": 120,
"endOfLine": "auto",
"overrides": [
{
"files": ["*.yml", "*.yaml"],
"options": {
"tabWidth": 2
}
}
]
}

View File

@ -15,7 +15,7 @@ import {
CancellationReceiverStrategy,
CancellationSenderStrategy,
CancellationStrategy,
Disposable
Disposable,
} from 'vscode-languageserver';
function getCancellationFolderPath(folderName: string) {

View File

@ -37,8 +37,8 @@ export function activate(context: ExtensionContext) {
module: fs.existsSync(nonBundlePath) ? nonBundlePath : bundlePath,
transport: TransportKind.ipc,
args: cancellationStrategy.getCommandLineArguments(),
options: debugOptions
}
options: debugOptions,
},
};
// Options to control the language client
@ -47,14 +47,14 @@ export function activate(context: ExtensionContext) {
documentSelector: [
{
scheme: 'file',
language: 'python'
}
language: 'python',
},
],
synchronize: {
// Synchronize the setting section to the server.
configurationSection: ['python', 'pyright']
configurationSection: ['python', 'pyright'],
},
connectionOptions: { cancellationStrategy: cancellationStrategy }
connectionOptions: { cancellationStrategy: cancellationStrategy },
};
// Create the language client and start the client.
@ -71,22 +71,22 @@ export function activate(context: ExtensionContext) {
// Register our custom commands.
const textEditorCommands = [Commands.orderImports, Commands.addMissingOptionalToParam];
textEditorCommands.forEach(commandName => {
textEditorCommands.forEach((commandName) => {
context.subscriptions.push(
commands.registerTextEditorCommand(
commandName,
(editor: TextEditor, edit: TextEditorEdit, ...args: any[]) => {
const cmd = {
command: commandName,
arguments: [editor.document.uri.toString(), ...args]
arguments: [editor.document.uri.toString(), ...args],
};
languageClient
.sendRequest('workspace/executeCommand', cmd)
.then((edits: TextEdit[] | undefined) => {
if (edits && edits.length > 0) {
editor.edit(editBuilder => {
edits.forEach(edit => {
editor.edit((editBuilder) => {
edits.forEach((edit) => {
const startPos = new Position(
edit.range.start.line,
edit.range.start.character
@ -107,7 +107,7 @@ export function activate(context: ExtensionContext) {
});
const genericCommands = [Commands.createTypeStub, Commands.restartServer];
genericCommands.forEach(command => {
genericCommands.forEach((command) => {
context.subscriptions.push(
commands.registerCommand(command, (...args: any[]) => {
languageClient.sendRequest('workspace/executeCommand', { command, arguments: args });

View File

@ -21,16 +21,16 @@ export class ProgressReporting implements Disposable {
constructor(languageClient: LanguageClient) {
languageClient.onReady().then(() => {
languageClient.onNotification('pyright/beginProgress', async () => {
const progressPromise = new Promise<void>(resolve => {
const progressPromise = new Promise<void>((resolve) => {
this._resolveProgress = resolve;
});
window.withProgress(
{
location: ProgressLocation.Window,
title: ''
title: '',
},
progress => {
(progress) => {
this._progress = progress;
return progressPromise;
}

6
package-lock.json generated
View File

@ -1313,9 +1313,9 @@
"dev": true
},
"prettier": {
"version": "1.19.1",
"resolved": "https://registry.npmjs.org/prettier/-/prettier-1.19.1.tgz",
"integrity": "sha512-s7PoyDv/II1ObgQunCbB9PdLmUcBZcnWOcxDh7O0N/UwDEsHyqkW+Qh28jW+mVuCdx7gLB0BotYI1Y6uI9iyew==",
"version": "2.0.4",
"resolved": "https://registry.npmjs.org/prettier/-/prettier-2.0.4.tgz",
"integrity": "sha512-SVJIQ51spzFDvh4fIbCLvciiDMCrRhlN3mbZvv/+ycjvmF5E73bKdGfU8QDLNmjYJf+lsGnDBC4UUnvTe5OO0w==",
"dev": true
},
"progress": {

View File

@ -31,7 +31,7 @@
"eslint": "^6.8.0",
"eslint-config-prettier": "^6.10.1",
"eslint-plugin-simple-import-sort": "^5.0.2",
"prettier": "1.19.1",
"prettier": "2.0.4",
"typescript": "^3.8.3"
},
"main": "index.js",

View File

@ -1,6 +1,15 @@
{
"singleQuote": true,
"tabWidth": 4,
"useTabs": false,
"printWidth": 120
"singleQuote": true,
"tabWidth": 4,
"useTabs": false,
"printWidth": 120,
"endOfLine": "auto",
"overrides": [
{
"files": ["*.yml", "*.yaml"],
"options": {
"tabWidth": 2
}
}
]
}

View File

@ -7,7 +7,7 @@
module.exports = {
roots: ['<rootDir>/src/tests'],
transform: {
'^.+\\.tsx?$': 'ts-jest'
'^.+\\.tsx?$': 'ts-jest',
},
testRegex: '(/__tests__/.*|(\\.|/)(test|spec))\\.tsx?$',
moduleFileExtensions: ['ts', 'tsx', 'js', 'jsx'],
@ -18,8 +18,8 @@ module.exports = {
// Needed because jest calls tsc in a way that doesn't
// inline const enums.
preserveConstEnums: true
}
}
}
preserveConstEnums: true,
},
},
},
};

View File

@ -7264,9 +7264,9 @@
"dev": true
},
"prettier": {
"version": "1.19.1",
"resolved": "https://registry.npmjs.org/prettier/-/prettier-1.19.1.tgz",
"integrity": "sha512-s7PoyDv/II1ObgQunCbB9PdLmUcBZcnWOcxDh7O0N/UwDEsHyqkW+Qh28jW+mVuCdx7gLB0BotYI1Y6uI9iyew==",
"version": "2.0.4",
"resolved": "https://registry.npmjs.org/prettier/-/prettier-2.0.4.tgz",
"integrity": "sha512-SVJIQ51spzFDvh4fIbCLvciiDMCrRhlN3mbZvv/+ycjvmF5E73bKdGfU8QDLNmjYJf+lsGnDBC4UUnvTe5OO0w==",
"dev": true
},
"pretty-format": {

View File

@ -43,7 +43,7 @@
"jest": "^25.2.4",
"jest-junit": "^10.0.0",
"node-loader": "^0.6.0",
"prettier": "1.19.1",
"prettier": "2.0.4",
"ts-jest": "^25.2.1",
"ts-loader": "^6.2.2",
"typescript": "^3.8.3",

View File

@ -17,7 +17,7 @@ import {
LambdaNode,
ListComprehensionNode,
ModuleNode,
ParseNode
ParseNode,
} from '../parser/parseNodes';
import { AnalyzerFileInfo } from './analyzerFileInfo';
import { FlowFlags, FlowNode } from './codeFlow';

View File

@ -67,7 +67,7 @@ import {
WhileNode,
WithNode,
YieldFromNode,
YieldNode
YieldNode,
} from '../parser/parseNodes';
import * as StringTokenUtils from '../parser/stringTokenUtils';
import { KeywordType, OperatorType } from '../parser/tokenizerTypes';
@ -86,7 +86,7 @@ import {
FlowPreFinallyGate,
FlowWildcardImport,
getUniqueFlowNodeId,
isCodeFlowSupportedForReference
isCodeFlowSupportedForReference,
} from './codeFlow';
import {
AliasDeclaration,
@ -96,7 +96,7 @@ import {
IntrinsicType,
ModuleLoaderActions,
ParameterDeclaration,
VariableDeclaration
VariableDeclaration,
} from './declaration';
import { ImplicitImport, ImportResult, ImportType } from './importResult';
import * as ParseTreeUtils from './parseTreeUtils';
@ -111,7 +111,7 @@ export const enum NameBindingType {
Nonlocal,
// With "global" keyword
Global
Global,
}
interface MemberAccessInfo {
@ -189,7 +189,7 @@ export class Binder extends ParseTreeWalker {
// Flow node that is used for unreachable code.
private static _unreachableFlowNode: FlowNode = {
flags: FlowFlags.Unreachable,
id: getUniqueFlowNodeId()
id: getUniqueFlowNodeId(),
};
constructor(fileInfo: AnalyzerFileInfo) {
@ -234,7 +234,7 @@ export class Binder extends ParseTreeWalker {
this._bindDeferred();
return {
moduleDocString: ParseTreeUtils.getDocString(node.statements)
moduleDocString: ParseTreeUtils.getDocString(node.statements),
};
}
@ -274,7 +274,7 @@ export class Binder extends ParseTreeWalker {
// Add a diagnostic action for resolving this diagnostic.
const createTypeStubAction: CreateTypeStubFileAction = {
action: Commands.createTypeStub,
moduleName: importResult.importName
moduleName: importResult.importName,
};
diagnostic.addAction(createTypeStubAction);
}
@ -292,7 +292,7 @@ export class Binder extends ParseTreeWalker {
type: DeclarationType.Class,
node,
path: this._fileInfo.filePath,
range: convertOffsetsToRange(node.name.start, TextRange.getEnd(node.name), this._fileInfo.lines)
range: convertOffsetsToRange(node.name.start, TextRange.getEnd(node.name), this._fileInfo.lines),
};
const symbol = this._bindNameToScope(this._currentScope, node.name.value);
@ -337,7 +337,7 @@ export class Binder extends ParseTreeWalker {
isMethod: !!containingClassNode,
isGenerator: false,
path: this._fileInfo.filePath,
range: convertOffsetsToRange(node.name.start, TextRange.getEnd(node.name), this._fileInfo.lines)
range: convertOffsetsToRange(node.name.start, TextRange.getEnd(node.name), this._fileInfo.lines),
};
if (symbol) {
@ -348,7 +348,7 @@ export class Binder extends ParseTreeWalker {
AnalyzerNodeInfo.setDeclaration(node, functionDeclaration);
this.walkMultiple(node.decorators);
node.parameters.forEach(param => {
node.parameters.forEach((param) => {
if (param.defaultValue) {
this.walk(param.defaultValue);
}
@ -411,7 +411,7 @@ export class Binder extends ParseTreeWalker {
// Create a start node for the function.
this._currentFlowNode = this._createStartFlowNode();
node.parameters.forEach(paramNode => {
node.parameters.forEach((paramNode) => {
if (paramNode.name) {
const symbol = this._bindNameToScope(this._currentScope, paramNode.name.value);
if (symbol) {
@ -423,7 +423,7 @@ export class Binder extends ParseTreeWalker {
paramNode.start,
TextRange.getEnd(paramNode),
this._fileInfo.lines
)
),
};
symbol.addDeclaration(paramDeclaration);
@ -464,7 +464,7 @@ export class Binder extends ParseTreeWalker {
visitLambda(node: LambdaNode): boolean {
// Analyze the parameter defaults in the context of the parent's scope
// before we add any names from the function's scope.
node.parameters.forEach(param => {
node.parameters.forEach((param) => {
if (param.defaultValue) {
this.walk(param.defaultValue);
}
@ -477,7 +477,7 @@ export class Binder extends ParseTreeWalker {
// Create a start node for the lambda.
this._currentFlowNode = this._createStartFlowNode();
node.parameters.forEach(paramNode => {
node.parameters.forEach((paramNode) => {
if (paramNode.name) {
const symbol = this._bindNameToScope(this._currentScope, paramNode.name.value);
if (symbol) {
@ -489,7 +489,7 @@ export class Binder extends ParseTreeWalker {
paramNode.start,
TextRange.getEnd(paramNode),
this._fileInfo.lines
)
),
};
symbol.addDeclaration(paramDeclaration);
@ -592,7 +592,7 @@ export class Binder extends ParseTreeWalker {
}
visitDel(node: DelNode) {
node.expressions.forEach(expr => {
node.expressions.forEach((expr) => {
this._bindPossibleTupleNamedTarget(expr);
this.walk(expr);
this._createAssignmentTargetFlowNodes(expr, false, true);
@ -807,7 +807,7 @@ export class Binder extends ParseTreeWalker {
node: node.name,
isConstant: isConstantName(node.name.value),
path: this._fileInfo.filePath,
range: convertOffsetsToRange(node.name.start, TextRange.getEnd(node.name), this._fileInfo.lines)
range: convertOffsetsToRange(node.name.start, TextRange.getEnd(node.name), this._fileInfo.lines),
};
symbol.addDeclaration(declaration);
}
@ -897,7 +897,7 @@ export class Binder extends ParseTreeWalker {
flags: FlowFlags.PreFinallyGate,
id: getUniqueFlowNodeId(),
antecedent: preFinallyReturnOrRaiseLabel,
isGateClosed: false
isGateClosed: false,
};
if (node.finallySuite) {
this._addAntecedent(preFinallyLabel, preFinallyGate);
@ -906,7 +906,7 @@ export class Binder extends ParseTreeWalker {
// An exception may be generated before the first flow node
// added by the try block, so all of the exception targets
// must have the pre-try flow node as an antecedent.
curExceptTargets.forEach(exceptLabel => {
curExceptTargets.forEach((exceptLabel) => {
this._addAntecedent(exceptLabel, this._currentFlowNode);
});
@ -958,7 +958,7 @@ export class Binder extends ParseTreeWalker {
flags: FlowFlags.PostFinally,
id: getUniqueFlowNodeId(),
antecedent: this._currentFlowNode,
preFinallyGate
preFinallyGate,
};
this._currentFlowNode = isAfterElseAndExceptsReachable ? postFinallyNode : Binder._unreachableFlowNode;
}
@ -1021,7 +1021,7 @@ export class Binder extends ParseTreeWalker {
visitGlobal(node: GlobalNode): boolean {
const globalScope = this._currentScope.getGlobalScope();
node.nameList.forEach(name => {
node.nameList.forEach((name) => {
const nameValue = name.value;
// Is the binding inconsistent?
@ -1053,7 +1053,7 @@ export class Binder extends ParseTreeWalker {
if (this._currentScope === globalScope) {
this._addError('Nonlocal declaration not allowed at module level', node);
} else {
node.nameList.forEach(name => {
node.nameList.forEach((name) => {
const nameValue = name.value;
// Is the binding inconsistent?
@ -1112,14 +1112,14 @@ export class Binder extends ParseTreeWalker {
// python module loader.
const existingDecl = symbol
.getDeclarations()
.find(decl => decl.type === DeclarationType.Alias && decl.firstNamePart === firstNamePartValue);
.find((decl) => decl.type === DeclarationType.Alias && decl.firstNamePart === firstNamePartValue);
const newDecl: AliasDeclaration = (existingDecl as AliasDeclaration) || {
type: DeclarationType.Alias,
node,
path: '',
range: getEmptyRange(),
firstNamePart: firstNamePartValue
firstNamePart: firstNamePartValue,
};
// Add the implicit imports for this module if it's the last
@ -1146,7 +1146,7 @@ export class Binder extends ParseTreeWalker {
// Allocate a new loader action.
loaderActions = {
path: '',
implicitImports: new Map<string, ModuleLoaderActions>()
implicitImports: new Map<string, ModuleLoaderActions>(),
};
if (!curLoaderActions.implicitImports) {
curLoaderActions.implicitImports = new Map<string, ModuleLoaderActions>();
@ -1195,7 +1195,7 @@ export class Binder extends ParseTreeWalker {
const lookupInfo = this._fileInfo.importLookup(resolvedPath);
if (lookupInfo) {
const wildcardNames = this._getWildcardImportNames(lookupInfo);
wildcardNames.forEach(name => {
wildcardNames.forEach((name) => {
const symbol = lookupInfo.symbolTable.get(name)!;
// Don't include the ignored names in the symbol table.
@ -1207,7 +1207,7 @@ export class Binder extends ParseTreeWalker {
node,
path: resolvedPath,
range: getEmptyRange(),
symbolName: name
symbolName: name,
};
symbol.addDeclaration(aliasDecl);
names.push(name);
@ -1218,16 +1218,16 @@ export class Binder extends ParseTreeWalker {
// Also add all of the implicitly-imported modules for
// the import module.
importInfo.implicitImports.forEach(implicitImport => {
importInfo.implicitImports.forEach((implicitImport) => {
// Don't overwrite a symbol that was imported from the module.
if (!names.some(name => name === implicitImport.name)) {
if (!names.some((name) => name === implicitImport.name)) {
const symbol = this._bindNameToScope(this._currentScope, implicitImport.name);
if (symbol) {
const submoduleFallback: AliasDeclaration = {
type: DeclarationType.Alias,
node,
path: implicitImport.path,
range: getEmptyRange()
range: getEmptyRange(),
};
const aliasDecl: AliasDeclaration = {
@ -1236,7 +1236,7 @@ export class Binder extends ParseTreeWalker {
path: resolvedPath,
symbolName: implicitImport.name,
submoduleFallback,
range: getEmptyRange()
range: getEmptyRange(),
};
symbol.addDeclaration(aliasDecl);
@ -1248,7 +1248,7 @@ export class Binder extends ParseTreeWalker {
this._createFlowWildcardImport(node, names);
}
} else {
node.imports.forEach(importSymbolNode => {
node.imports.forEach((importSymbolNode) => {
const importedName = importSymbolNode.name.value;
const nameNode = importSymbolNode.alias || importSymbolNode.name;
const symbol = this._bindNameToScope(this._currentScope, nameNode.value);
@ -1264,7 +1264,7 @@ export class Binder extends ParseTreeWalker {
// Is the import referring to an implicitly-imported module?
let implicitImport: ImplicitImport | undefined;
if (importInfo && importInfo.implicitImports) {
implicitImport = importInfo.implicitImports.find(imp => imp.name === importedName);
implicitImport = importInfo.implicitImports.find((imp) => imp.name === importedName);
}
let submoduleFallback: AliasDeclaration | undefined;
@ -1273,7 +1273,7 @@ export class Binder extends ParseTreeWalker {
type: DeclarationType.Alias,
node: importSymbolNode,
path: implicitImport.path,
range: getEmptyRange()
range: getEmptyRange(),
};
// Handle the case of "from . import X". In this case,
@ -1290,7 +1290,7 @@ export class Binder extends ParseTreeWalker {
path: resolvedPath,
symbolName: importedName,
submoduleFallback,
range: getEmptyRange()
range: getEmptyRange(),
};
symbol.addDeclaration(aliasDecl);
@ -1303,7 +1303,7 @@ export class Binder extends ParseTreeWalker {
}
visitWith(node: WithNode): boolean {
node.withItems.forEach(item => {
node.withItems.forEach((item) => {
this.walk(item.expression);
if (item.target) {
this._bindPossibleTupleNamedTarget(item.target);
@ -1476,7 +1476,7 @@ export class Binder extends ParseTreeWalker {
if (firstDecl.node.parent && firstDecl.node.parent.nodeType === ParseNodeType.Assignment) {
const expr = firstDecl.node.parent.rightExpression;
if (expr.nodeType === ParseNodeType.List) {
expr.entries.forEach(listEntryNode => {
expr.entries.forEach((listEntryNode) => {
if (
listEntryNode.nodeType === ParseNodeType.StringList &&
listEntryNode.strings.length === 1 &&
@ -1544,7 +1544,7 @@ export class Binder extends ParseTreeWalker {
private _createStartFlowNode() {
const flowNode: FlowNode = {
flags: FlowFlags.Start,
id: getUniqueFlowNodeId()
id: getUniqueFlowNodeId(),
};
return flowNode;
}
@ -1553,7 +1553,7 @@ export class Binder extends ParseTreeWalker {
const flowNode: FlowLabel = {
flags: FlowFlags.BranchLabel,
id: getUniqueFlowNodeId(),
antecedents: []
antecedents: [],
};
return flowNode;
}
@ -1562,7 +1562,7 @@ export class Binder extends ParseTreeWalker {
const flowNode: FlowLabel = {
flags: FlowFlags.LoopLabel,
id: getUniqueFlowNodeId(),
antecedents: []
antecedents: [],
};
return flowNode;
}
@ -1625,7 +1625,7 @@ export class Binder extends ParseTreeWalker {
return antecedent;
}
expressionList.forEach(expr => {
expressionList.forEach((expr) => {
const referenceKey = createKeyForReference(expr);
this._currentExecutionScopeReferenceMap.set(referenceKey, referenceKey);
});
@ -1634,7 +1634,7 @@ export class Binder extends ParseTreeWalker {
flags,
id: getUniqueFlowNodeId(),
expression,
antecedent
antecedent,
};
this._addExceptTargets(conditionalFlowNode);
@ -1759,7 +1759,7 @@ export class Binder extends ParseTreeWalker {
}
case ParseNodeType.Tuple: {
target.expressions.forEach(expr => {
target.expressions.forEach((expr) => {
this._createAssignmentTargetFlowNodes(expr, walkTargets, unbound);
});
break;
@ -1782,7 +1782,7 @@ export class Binder extends ParseTreeWalker {
}
case ParseNodeType.List: {
target.entries.forEach(entry => {
target.entries.forEach((entry) => {
this._createAssignmentTargetFlowNodes(entry, walkTargets, unbound);
});
break;
@ -1802,7 +1802,7 @@ export class Binder extends ParseTreeWalker {
flags: FlowFlags.Call,
id: getUniqueFlowNodeId(),
node,
antecedent: this._currentFlowNode
antecedent: this._currentFlowNode,
};
this._currentFlowNode = flowNode;
@ -1816,7 +1816,7 @@ export class Binder extends ParseTreeWalker {
id: getUniqueFlowNodeId(),
antecedent: this._currentFlowNode,
targetSymbolId,
aliasSymbolId
aliasSymbolId,
};
this._currentFlowNode = flowNode;
@ -1838,7 +1838,7 @@ export class Binder extends ParseTreeWalker {
id: getUniqueFlowNodeId(),
node,
antecedent: this._currentFlowNode,
targetSymbolId
targetSymbolId,
};
const referenceKey = createKeyForReference(node);
@ -1865,7 +1865,7 @@ export class Binder extends ParseTreeWalker {
id: getUniqueFlowNodeId(),
node,
names,
antecedent: this._currentFlowNode
antecedent: this._currentFlowNode,
};
this._addExceptTargets(flowNode);
@ -1883,7 +1883,7 @@ export class Binder extends ParseTreeWalker {
// If there are any except targets, then we're in a try block, and we
// have to assume that an exception can be raised after every assignment.
if (this._currentExceptTargets) {
this._currentExceptTargets.forEach(label => {
this._currentExceptTargets.forEach((label) => {
this._addAntecedent(label, flowNode);
});
}
@ -1904,7 +1904,7 @@ export class Binder extends ParseTreeWalker {
private _addAntecedent(label: FlowLabel, antecedent: FlowNode) {
if (!(this._currentFlowNode.flags & FlowFlags.Unreachable)) {
// Don't add the same antecedent twice.
if (!label.antecedents.some(existing => existing.id === antecedent.id)) {
if (!label.antecedents.some((existing) => existing.id === antecedent.id)) {
label.antecedents.push(antecedent);
}
}
@ -1950,14 +1950,14 @@ export class Binder extends ParseTreeWalker {
}
case ParseNodeType.Tuple: {
target.expressions.forEach(expr => {
target.expressions.forEach((expr) => {
this._bindPossibleTupleNamedTarget(expr, addedSymbols);
});
break;
}
case ParseNodeType.List: {
target.entries.forEach(expr => {
target.entries.forEach((expr) => {
this._bindPossibleTupleNamedTarget(expr, addedSymbols);
});
break;
@ -1987,7 +1987,7 @@ export class Binder extends ParseTreeWalker {
node,
intrinsicType: type,
path: this._fileInfo.filePath,
range: getEmptyRange()
range: getEmptyRange(),
});
symbol.setIsIgnoredForProtocolMatch();
}
@ -2055,7 +2055,7 @@ export class Binder extends ParseTreeWalker {
isConstant: isConstantName(target.value),
inferredTypeSource: source,
path: this._fileInfo.filePath,
range: convertOffsetsToRange(name.start, TextRange.getEnd(name), this._fileInfo.lines)
range: convertOffsetsToRange(name.start, TextRange.getEnd(name), this._fileInfo.lines),
};
symbolWithScope.symbol.addDeclaration(declaration);
}
@ -2092,7 +2092,7 @@ export class Binder extends ParseTreeWalker {
target.memberName.start,
target.memberName.start + target.memberName.length,
this._fileInfo.lines
)
),
};
symbol.addDeclaration(declaration);
}
@ -2100,7 +2100,7 @@ export class Binder extends ParseTreeWalker {
}
case ParseNodeType.Tuple: {
target.expressions.forEach(expr => {
target.expressions.forEach((expr) => {
this._addInferredTypeAssignmentForVariable(expr, source);
});
break;
@ -2117,7 +2117,7 @@ export class Binder extends ParseTreeWalker {
}
case ParseNodeType.List: {
target.entries.forEach(entry => {
target.entries.forEach((entry) => {
this._addInferredTypeAssignmentForVariable(entry, source);
});
break;
@ -2141,7 +2141,7 @@ export class Binder extends ParseTreeWalker {
isFinal: finalInfo.isFinal,
path: this._fileInfo.filePath,
typeAnnotationNode: finalInfo.isFinal ? finalInfo.finalTypeNode : typeAnnotation,
range: convertOffsetsToRange(name.start, TextRange.getEnd(name), this._fileInfo.lines)
range: convertOffsetsToRange(name.start, TextRange.getEnd(name), this._fileInfo.lines),
};
symbolWithScope.symbol.addDeclaration(declaration);
}
@ -2190,7 +2190,7 @@ export class Binder extends ParseTreeWalker {
target.memberName.start,
target.memberName.start + target.memberName.length,
this._fileInfo.lines
)
),
};
symbol.addDeclaration(declaration);
@ -2319,12 +2319,12 @@ export class Binder extends ParseTreeWalker {
classNode,
methodNode,
classScope,
isInstanceMember
isInstanceMember,
};
}
private _addImplicitImportsToLoaderActions(importResult: ImportResult, loaderActions: ModuleLoaderActions) {
importResult.implicitImports.forEach(implicitImport => {
importResult.implicitImports.forEach((implicitImport) => {
const existingLoaderAction = loaderActions.implicitImports
? loaderActions.implicitImports.get(implicitImport.name)
: undefined;
@ -2336,7 +2336,7 @@ export class Binder extends ParseTreeWalker {
}
loaderActions.implicitImports.set(implicitImport.name, {
path: implicitImport.path,
implicitImports: new Map<string, ModuleLoaderActions>()
implicitImports: new Map<string, ModuleLoaderActions>(),
});
}
});
@ -2375,7 +2375,7 @@ export class Binder extends ParseTreeWalker {
ClassVar: true,
Final: true,
Literal: true,
TypedDict: true
TypedDict: true,
};
const assignedName = assignedNameNode.value;
@ -2394,7 +2394,7 @@ export class Binder extends ParseTreeWalker {
annotationNode.start,
TextRange.getEnd(annotationNode),
this._fileInfo.lines
)
),
});
}
return true;
@ -2405,7 +2405,7 @@ export class Binder extends ParseTreeWalker {
scope: this._currentScope,
nonLocalBindingsMap: this._notLocalBindings,
codeFlowExpressionMap: this._currentExecutionScopeReferenceMap,
callback
callback,
});
}

View File

@ -57,7 +57,7 @@ import {
WhileNode,
WithNode,
YieldFromNode,
YieldNode
YieldNode,
} from '../parser/parseNodes';
import { AnalyzerFileInfo } from './analyzerFileInfo';
import * as AnalyzerNodeInfo from './analyzerNodeInfo';
@ -82,7 +82,7 @@ import {
ObjectType,
Type,
TypeCategory,
UnknownType
UnknownType,
} from './types';
import {
ClassMemberLookupFlags,
@ -95,7 +95,7 @@ import {
isProperty,
lookUpClassMember,
specializeType,
transformTypeObjectToClass
transformTypeObjectToClass,
} from './typeUtils';
export class Checker extends ParseTreeWalker {
@ -189,7 +189,7 @@ export class Checker extends ParseTreeWalker {
}
}
node.parameters.forEach(param => {
node.parameters.forEach((param) => {
if (param.defaultValue) {
this.walk(param.defaultValue);
}
@ -205,7 +205,7 @@ export class Checker extends ParseTreeWalker {
this.walkMultiple(node.decorators);
node.parameters.forEach(param => {
node.parameters.forEach((param) => {
if (param.name) {
this.walk(param.name);
}
@ -229,7 +229,7 @@ export class Checker extends ParseTreeWalker {
// Walk the children.
this.walkMultiple([...node.parameters, node.expression]);
node.parameters.forEach(param => {
node.parameters.forEach((param) => {
if (param.name) {
const paramType = this._evaluator.getType(param.name);
if (paramType) {
@ -315,7 +315,7 @@ export class Checker extends ParseTreeWalker {
}
visitWith(node: WithNode): boolean {
node.withItems.forEach(item => {
node.withItems.forEach((item) => {
this._evaluator.evaluateTypesForStatement(item);
});
@ -421,7 +421,7 @@ export class Checker extends ParseTreeWalker {
if (exceptionType && baseExceptionType && baseExceptionType.category === TypeCategory.Class) {
const diagAddendum = new DiagnosticAddendum();
doForSubtypes(exceptionType, subtype => {
doForSubtypes(exceptionType, (subtype) => {
if (!isAnyOrUnknown(subtype)) {
if (subtype.category === TypeCategory.Class) {
if (!derivesFromClassRecursive(subtype, baseExceptionType)) {
@ -463,7 +463,7 @@ export class Checker extends ParseTreeWalker {
if (exceptionType && baseExceptionType && baseExceptionType.category === TypeCategory.Class) {
const diagAddendum = new DiagnosticAddendum();
doForSubtypes(exceptionType, subtype => {
doForSubtypes(exceptionType, (subtype) => {
if (!isAnyOrUnknown(subtype) && !isNoneOrNever(subtype)) {
if (subtype.category === TypeCategory.Object) {
if (!derivesFromClassRecursive(subtype.classType, baseExceptionType)) {
@ -602,7 +602,7 @@ export class Checker extends ParseTreeWalker {
}
visitFormatString(node: FormatStringNode): boolean {
node.expressions.forEach(formatExpr => {
node.expressions.forEach((formatExpr) => {
this._evaluator.getType(formatExpr);
});
@ -616,7 +616,7 @@ export class Checker extends ParseTreeWalker {
}
visitDel(node: DelNode) {
node.expressions.forEach(expr => {
node.expressions.forEach((expr) => {
this._evaluator.verifyDeleteExpression(expr);
});
@ -640,7 +640,7 @@ export class Checker extends ParseTreeWalker {
visitImportFrom(node: ImportFromNode): boolean {
if (!node.isWildcardImport) {
node.imports.forEach(importAs => {
node.imports.forEach((importAs) => {
this._evaluator.evaluateTypesForStatement(importAs);
});
}
@ -689,7 +689,7 @@ export class Checker extends ParseTreeWalker {
} else if (exceptionType.category === TypeCategory.Object) {
const iterableType = this._evaluator.getTypeFromIterable(exceptionType, false, errorNode, false);
resultingExceptionType = doForSubtypes(iterableType, subtype => {
resultingExceptionType = doForSubtypes(iterableType, (subtype) => {
if (isAnyOrUnknown(subtype)) {
return subtype;
}
@ -750,7 +750,7 @@ export class Checker extends ParseTreeWalker {
let sawFinal = false;
let sawAssignment = false;
decls.forEach(decl => {
decls.forEach((decl) => {
if (isFinalVariableDeclaration(decl)) {
if (sawFinal) {
this._evaluator.addError(`"${name}" was previously declared as Final`, decl.node);
@ -768,7 +768,7 @@ export class Checker extends ParseTreeWalker {
// If it's not a stub file, an assignment must be provided.
if (!sawAssignment && !this._fileInfo.isStubFile) {
const firstDecl = decls.find(decl => decl.type === DeclarationType.Variable && decl.isFinal);
const firstDecl = decls.find((decl) => decl.type === DeclarationType.Variable && decl.isFinal);
if (firstDecl) {
this._evaluator.addError(`"${name}" is declared Final, but value is not assigned`, firstDecl.node);
}
@ -786,12 +786,12 @@ export class Checker extends ParseTreeWalker {
return;
}
let otherDecls = symbol.getDeclarations().filter(decl => decl !== primaryDecl);
let otherDecls = symbol.getDeclarations().filter((decl) => decl !== primaryDecl);
// If it's a function, we can skip any other declarations
// that are overloads.
if (primaryDecl.type === DeclarationType.Function) {
otherDecls = otherDecls.filter(decl => decl.type !== DeclarationType.Function);
otherDecls = otherDecls.filter((decl) => decl.type !== DeclarationType.Function);
}
// If there are no other declarations to consider, we're done.
@ -916,7 +916,7 @@ export class Checker extends ParseTreeWalker {
}
const decls = symbol.getDeclarations();
decls.forEach(decl => {
decls.forEach((decl) => {
this._conditionallyReportUnusedDeclaration(decl, this._isSymbolPrivate(name, scopeType));
});
}
@ -938,7 +938,7 @@ export class Checker extends ParseTreeWalker {
// Handle multi-part names specially.
const nameParts = decl.node.module.nameParts;
if (nameParts.length > 0) {
const multipartName = nameParts.map(np => np.value).join('.');
const multipartName = nameParts.map((np) => np.value).join('.');
const textRange: TextRange = { start: nameParts[0].start, length: nameParts[0].length };
TextRange.extend(textRange, nameParts[nameParts.length - 1]);
this._fileInfo.diagnosticSink.addUnusedCodeWithTextRange(
@ -1045,7 +1045,7 @@ export class Checker extends ParseTreeWalker {
if (!arg0Type) {
return;
}
arg0Type = doForSubtypes(arg0Type, subtype => {
arg0Type = doForSubtypes(arg0Type, (subtype) => {
return transformTypeObjectToClass(subtype);
});
@ -1066,7 +1066,7 @@ export class Checker extends ParseTreeWalker {
// parameter is a tuple of classes.
const objClass = arg1Type.classType;
if (ClassType.isBuiltIn(objClass, 'Tuple') && objClass.typeArguments) {
objClass.typeArguments.forEach(typeArg => {
objClass.typeArguments.forEach((typeArg) => {
if (typeArg.category === TypeCategory.Class) {
classTypeList.push(typeArg);
} else {
@ -1080,7 +1080,7 @@ export class Checker extends ParseTreeWalker {
// According to PEP 544, protocol classes cannot be used as the right-hand
// argument to isinstance or issubclass.
if (classTypeList.some(type => ClassType.isProtocolClass(type))) {
if (classTypeList.some((type) => ClassType.isProtocolClass(type))) {
this._evaluator.addError(
`Protocol class cannot be used in ${callName} call`,
node.arguments[1].valueExpression
@ -1125,7 +1125,7 @@ export class Checker extends ParseTreeWalker {
}
// Make all class types into object types before returning them.
return filteredTypes.map(t => (t.category === TypeCategory.Class ? ObjectType.create(t) : t));
return filteredTypes.map((t) => (t.category === TypeCategory.Class ? ObjectType.create(t) : t));
};
let filteredType: Type;
@ -1139,7 +1139,7 @@ export class Checker extends ParseTreeWalker {
let remainingTypes: Type[] = [];
let foundAnyType = false;
arg0Type.subtypes.forEach(t => {
arg0Type.subtypes.forEach((t) => {
if (isAnyOrUnknown(t)) {
foundAnyType = true;
}
@ -1162,7 +1162,7 @@ export class Checker extends ParseTreeWalker {
}
const getTestType = () => {
const objTypeList = classTypeList.map(t => ObjectType.create(t));
const objTypeList = classTypeList.map((t) => ObjectType.create(t));
return combineTypes(objTypeList);
};
@ -1319,7 +1319,7 @@ export class Checker extends ParseTreeWalker {
this._evaluator.addError(`TypedDict classes can contain only type annotations`, node);
};
suiteNode.statements.forEach(statement => {
suiteNode.statements.forEach((statement) => {
if (!AnalyzerNodeInfo.isCodeUnreachable(statement)) {
if (statement.nodeType === ParseNodeType.StatementList) {
for (const substatement of statement.statements) {
@ -1674,11 +1674,11 @@ export class Checker extends ParseTreeWalker {
const importModuleMap = new Map<string, ImportAsNode>();
importStatements.orderedImports.forEach(importStatement => {
importStatements.orderedImports.forEach((importStatement) => {
if (importStatement.node.nodeType === ParseNodeType.ImportFrom) {
const symbolMap = new Map<string, ImportFromAsNode>();
importStatement.node.imports.forEach(importFromAs => {
importStatement.node.imports.forEach((importFromAs) => {
// Ignore duplicates if they're aliased.
if (!importFromAs.alias) {
const prevImport = symbolMap.get(importFromAs.name.value);

View File

@ -20,7 +20,7 @@ import {
ImportFromNode,
MemberAccessNode,
NameNode,
ParseNodeType
ParseNodeType,
} from '../parser/parseNodes';
export enum FlowFlags {
@ -36,7 +36,7 @@ export enum FlowFlags {
Call = 1 << 10, // Call node
PreFinallyGate = 1 << 11, // Injected edge that links pre-finally label and pre-try flow
PostFinally = 1 << 12, // Injected edge that links post-finally flow with the rest of the graph
AssignmentAlias = 1 << 13 // Assigned symbol is aliased to another symbol with the same name
AssignmentAlias = 1 << 13, // Assigned symbol is aliased to another symbol with the same name
}
let _nextFlowNodeId = 1;

View File

@ -14,7 +14,7 @@ import {
DiagnosticSettings,
getBooleanDiagnosticSettings,
getDiagLevelSettings,
getStrictDiagnosticSettings
getStrictDiagnosticSettings,
} from '../common/configOptions';
import { TextRangeCollection } from '../common/textRangeCollection';
import { Token } from '../parser/tokenizerTypes';
@ -69,14 +69,14 @@ function _applyStrictSettings(settings: DiagnosticSettings) {
function _parsePyrightComment(commentValue: string, settings: DiagnosticSettings) {
// Is this a pyright or mspython-specific comment?
const validPrefixes = ['pyright:', 'mspython:'];
const prefix = validPrefixes.find(p => commentValue.startsWith(p));
const prefix = validPrefixes.find((p) => commentValue.startsWith(p));
if (prefix) {
const operands = commentValue.substr(prefix.length).trim();
const operandList = operands.split(',').map(s => s.trim());
const operandList = operands.split(',').map((s) => s.trim());
// If it contains a "strict" operand, replace the existing
// diagnostic settings with their strict counterparts.
if (operandList.some(s => s === 'strict')) {
if (operandList.some((s) => s === 'strict')) {
_applyStrictSettings(settings);
}
@ -89,7 +89,7 @@ function _parsePyrightComment(commentValue: string, settings: DiagnosticSettings
}
function _parsePyrightOperand(operand: string, settings: DiagnosticSettings) {
const operandSplit = operand.split('=').map(s => s.trim());
const operandSplit = operand.split('=').map((s) => s.trim());
if (operandSplit.length !== 2) {
return settings;
}
@ -98,12 +98,12 @@ function _parsePyrightOperand(operand: string, settings: DiagnosticSettings) {
const boolSettings = getBooleanDiagnosticSettings();
const diagLevelSettings = getDiagLevelSettings();
if (diagLevelSettings.find(s => s === settingName)) {
if (diagLevelSettings.find((s) => s === settingName)) {
const diagLevelValue = _parseDiagLevel(operandSplit[1]);
if (diagLevelValue !== undefined) {
(settings as any)[settingName] = diagLevelValue;
}
} else if (boolSettings.find(s => s === settingName)) {
} else if (boolSettings.find((s) => s === settingName)) {
const boolValue = _parseBoolSetting(operandSplit[1]);
if (boolValue !== undefined) {
(settings as any)[settingName] = boolValue;

View File

@ -25,7 +25,7 @@ import {
StringListNode,
TypeAnnotationNode,
YieldFromNode,
YieldNode
YieldNode,
} from '../parser/parseNodes';
export const enum DeclarationType {
@ -35,7 +35,7 @@ export const enum DeclarationType {
Function,
Class,
SpecialBuiltInClass,
Alias
Alias,
}
export type IntrinsicType = 'Any' | 'str' | 'Iterable[str]' | 'class' | 'Dict[str, Any]';

View File

@ -41,7 +41,7 @@ const PotentialHeaders: RegexReplacement[] = [
{ exp: /^\s*=+(\s+=+)+$/, replacement: '=' },
{ exp: /^\s*-+(\s+-+)+$/, replacement: '-' },
{ exp: /^\s*~+(\s+-+)+$/, replacement: '~' },
{ exp: /^\s*\++(\s+\++)+$/, replacement: '+' }
{ exp: /^\s*\++(\s+\++)+$/, replacement: '+' },
];
// Regexes for replace all
@ -59,7 +59,7 @@ const LiteralBlockReplacements: RegexReplacement[] = [
{ exp: /(\S)\s*::$/g, replacement: '$1:' },
// http://docutils.sourceforge.net/docs/ref/rst/restructuredtext.html#interpreted-text
{ exp: /:[\w_\-+:.]+:`/g, replacement: '`' },
{ exp: /`:[\w_\-+:.]+:/g, replacement: '`' }
{ exp: /`:[\w_\-+:.]+:/g, replacement: '`' },
];
// Converter is a state machine, where the current state is a function which
@ -133,7 +133,9 @@ class DocStringConverter {
}
private _nextBlockIndent(): number {
return _countLeadingSpaces(this._lines.slice(this._lineNum + 1).find(v => !_isUndefinedOrWhitespace(v)) || '');
return _countLeadingSpaces(
this._lines.slice(this._lineNum + 1).find((v) => !_isUndefinedOrWhitespace(v)) || ''
);
}
private _currentLineIsOutsideBlock(): boolean {
@ -284,7 +286,7 @@ class DocStringConverter {
return '';
}
LiteralBlockReplacements.forEach(item => (line = line.replace(item.exp, item.replacement)));
LiteralBlockReplacements.forEach((item) => (line = line.replace(item.exp, item.replacement)));
line = line.replace(DoubleTickRegEx, '`');
return line;
@ -503,7 +505,7 @@ function _splitDocString(docstring: string): string[] {
// As done by inspect.cleandoc.
docstring = docstring.replace(TabRegEx, ' '.repeat(8));
let lines = docstring.split(CrLfRegEx).map(v => v.trimRight());
let lines = docstring.split(CrLfRegEx).map((v) => v.trimRight());
if (lines.length > 0) {
let first: string | undefined = lines[0].trimLeft();
if (first === '') {
@ -524,11 +526,11 @@ function _splitDocString(docstring: string): string[] {
function _stripLeadingWhitespace(lines: string[], trim?: number): string[] {
const amount = trim === undefined ? _largestTrim(lines) : trim;
return lines.map(line => (amount > line.length ? '' : line.substr(amount)));
return lines.map((line) => (amount > line.length ? '' : line.substr(amount)));
}
function _largestTrim(lines: string[]): number {
const nonEmptyLines = lines.filter(s => !_isUndefinedOrWhitespace(s));
const nonEmptyLines = lines.filter((s) => !_isUndefinedOrWhitespace(s));
const counts = nonEmptyLines.map(_countLeadingSpaces);
const largest = counts.length > 0 ? Math.min(...counts) : 0;
return largest;

View File

@ -20,7 +20,7 @@ import {
isDirectory,
isFile,
stripFileExtension,
stripTrailingDirectorySeparator
stripTrailingDirectorySeparator,
} from '../common/pathUtils';
import { versionToString } from '../common/pythonVersion';
import * as StringUtils from '../common/stringUtils';
@ -243,7 +243,7 @@ export class ImportResolver {
importType: ImportType.Local,
isStubFile: false,
isPydFile: false,
implicitImports: []
implicitImports: [],
};
return this._addResultsToCache(execEnv, importName, notFoundResult, undefined);
@ -837,7 +837,7 @@ export class ImportResolver {
searchPath: rootPath,
isStubFile,
isPydFile,
implicitImports
implicitImports,
};
}
@ -853,7 +853,7 @@ export class ImportResolver {
// Copy the nameParts into a new directory and add an extra empty
// part if there is a trailing dot.
const nameParts = moduleDescriptor.nameParts.map(name => name);
const nameParts = moduleDescriptor.nameParts.map((name) => name);
if (moduleDescriptor.hasTrailingDot) {
nameParts.push('');
}
@ -881,7 +881,7 @@ export class ImportResolver {
private _addFilteredSuggestions(dirPath: string, filter: string, suggestions: string[], similarityLimit: number) {
const entries = getFileSystemEntries(this.fileSystem, dirPath);
entries.files.forEach(file => {
entries.files.forEach((file) => {
const fileWithoutExtension = stripFileExtension(file);
const fileExtension = getFileExtension(file);
@ -897,7 +897,7 @@ export class ImportResolver {
}
});
entries.directories.forEach(dir => {
entries.directories.forEach((dir) => {
if (!filter || dir.startsWith(filter)) {
this._addUniqueSuggestion(dir, suggestions);
}
@ -905,7 +905,7 @@ export class ImportResolver {
}
private _addUniqueSuggestion(suggestionToAdd: string, suggestions: string[]) {
if (suggestions.some(s => s === suggestionToAdd)) {
if (suggestions.some((s) => s === suggestionToAdd)) {
return;
}
@ -939,8 +939,8 @@ export class ImportResolver {
return importResult;
}
const filteredImplicitImports = importResult.implicitImports.filter(implicitImport => {
return importedSymbols.some(sym => sym === implicitImport.name);
const filteredImplicitImports = importResult.implicitImports.filter((implicitImport) => {
return importedSymbols.some((sym) => sym === implicitImport.name);
});
if (filteredImplicitImports.length === importResult.implicitImports.length) {
@ -963,12 +963,12 @@ export class ImportResolver {
if (fileName.endsWith('.py') || fileName.endsWith('.pyi')) {
const filePath = combinePaths(dirPath, fileName);
if (!exclusions.find(exclusion => exclusion === filePath)) {
if (!exclusions.find((exclusion) => exclusion === filePath)) {
const strippedFileName = stripFileExtension(fileName);
const implicitImport: ImplicitImport = {
isStubFile: fileName.endsWith('.pyi'),
name: strippedFileName,
path: filePath
path: filePath,
};
// Always prefer stub files over non-stub files.
@ -995,11 +995,11 @@ export class ImportResolver {
}
if (path) {
if (!exclusions.find(exclusion => exclusion === path)) {
if (!exclusions.find((exclusion) => exclusion === path)) {
const implicitImport: ImplicitImport = {
isStubFile,
name: dirName,
path
path,
};
implicitImportMap.set(implicitImport.name, implicitImport);
@ -1016,7 +1016,7 @@ export class ImportResolver {
name += '.';
}
return name + moduleDescriptor.nameParts.map(part => part).join('.');
return name + moduleDescriptor.nameParts.map((part) => part).join('.');
}
}

View File

@ -10,7 +10,7 @@
export const enum ImportType {
BuiltIn,
ThirdParty,
Local
Local,
}
export interface ImplicitImport {

View File

@ -19,7 +19,7 @@ import {
ImportNode,
ModuleNameNode,
ModuleNode,
ParseNodeType
ParseNodeType,
} from '../parser/parseNodes';
import { ParseResults } from '../parser/parser';
import * as AnalyzerNodeInfo from './analyzerNodeInfo';
@ -45,15 +45,15 @@ export interface ImportStatements {
export function getTopLevelImports(parseTree: ModuleNode): ImportStatements {
const localImports: ImportStatements = {
orderedImports: [],
mapByFilePath: new Map<string, ImportStatement>()
mapByFilePath: new Map<string, ImportStatement>(),
};
let followsNonImportStatement = false;
let foundFirstImportStatement = false;
parseTree.statements.forEach(statement => {
parseTree.statements.forEach((statement) => {
if (statement.nodeType === ParseNodeType.StatementList) {
statement.statements.forEach(subStatement => {
statement.statements.forEach((subStatement) => {
if (subStatement.nodeType === ParseNodeType.Import) {
foundFirstImportStatement = true;
_processImportNode(subStatement, localImports, followsNonImportStatement);
@ -88,7 +88,7 @@ export function getTextEditsForAutoImportSymbolAddition(
if (importStatement.node && importStatement.node.nodeType === ParseNodeType.ImportFrom) {
// Make sure we're not attempting to auto-import a symbol that
// already exists in the import list.
if (!importStatement.node.imports.some(importAs => importAs.name.value === symbolName)) {
if (!importStatement.node.imports.some((importAs) => importAs.name.value === symbolName)) {
for (const curImport of importStatement.node.imports) {
if (curImport.name.value > symbolName) {
break;
@ -106,7 +106,7 @@ export function getTextEditsForAutoImportSymbolAddition(
textEditList.push({
range: { start: insertionPosition, end: insertionPosition },
replacementText: priorImport ? ', ' + symbolName : symbolName + ', '
replacementText: priorImport ? ', ' + symbolName : symbolName + ', ',
});
}
}
@ -247,14 +247,14 @@ export function getTextEditsForAutoImportInsertion(
textEditList.push({
range: { start: insertionPosition, end: insertionPosition },
replacementText: newImportStatement
replacementText: newImportStatement,
});
return textEditList;
}
function _processImportNode(node: ImportNode, localImports: ImportStatements, followsNonImportStatement: boolean) {
node.list.forEach(importAsNode => {
node.list.forEach((importAsNode) => {
const importResult = AnalyzerNodeInfo.getImportInfo(importAsNode.module);
let resolvedPath: string | undefined;
@ -268,7 +268,7 @@ function _processImportNode(node: ImportNode, localImports: ImportStatements, fo
importResult,
resolvedPath,
moduleName: _formatModuleName(importAsNode.module),
followsNonImportStatement
followsNonImportStatement,
};
localImports.orderedImports.push(localImport);
@ -302,7 +302,7 @@ function _processImportFromNode(
importResult,
resolvedPath,
moduleName: _formatModuleName(node.module),
followsNonImportStatement
followsNonImportStatement,
};
localImports.orderedImports.push(localImport);
@ -329,7 +329,7 @@ function _formatModuleName(node: ModuleNameNode): string {
moduleName = moduleName + '.';
}
moduleName += node.nameParts.map(part => part.value).join('.');
moduleName += node.nameParts.map((part) => part.value).join('.');
return moduleName;
}

View File

@ -27,7 +27,7 @@ import {
ParseNode,
ParseNodeType,
StatementNode,
SuiteNode
SuiteNode,
} from '../parser/parseNodes';
import { KeywordType, OperatorType, StringTokenFlags } from '../parser/tokenizerTypes';
import { decodeDocString } from './docStringUtils';
@ -37,7 +37,7 @@ export const enum PrintExpressionFlags {
None = 0,
// Don't use string literals for forward declarations.
ForwardDeclarations = 1 << 0
ForwardDeclarations = 1 << 0,
}
export function getNodeDepth(node: ParseNode): number {
@ -104,7 +104,7 @@ export function printExpression(node: ExpressionNode, flags = PrintExpressionFla
printExpression(node.leftExpression, flags) +
'(' +
node.arguments
.map(arg => {
.map((arg) => {
let argStr = '';
if (arg.argumentCategory === ArgumentCategory.UnpackedList) {
argStr = '*';
@ -126,7 +126,7 @@ export function printExpression(node: ExpressionNode, flags = PrintExpressionFla
return (
printExpression(node.baseExpression, flags) +
'[' +
node.items.items.map(item => printExpression(item, flags)).join(', ') +
node.items.items.map((item) => printExpression(item, flags)).join(', ') +
']'
);
}
@ -158,7 +158,7 @@ export function printExpression(node: ExpressionNode, flags = PrintExpressionFla
return printExpression(node.typeAnnotation, flags);
} else {
return node.strings
.map(str => {
.map((str) => {
return printExpression(str, flags);
})
.join(' ');
@ -237,7 +237,7 @@ export function printExpression(node: ExpressionNode, flags = PrintExpressionFla
}
case ParseNodeType.List: {
const expressions = node.entries.map(expr => {
const expressions = node.entries.map((expr) => {
return printExpression(expr, flags);
});
return `[${expressions.join(', ')}]`;
@ -248,7 +248,7 @@ export function printExpression(node: ExpressionNode, flags = PrintExpressionFla
}
case ParseNodeType.Tuple: {
const expressions = node.expressions.map(expr => {
const expressions = node.expressions.map((expr) => {
return printExpression(expr, flags);
});
if (expressions.length === 1) {
@ -288,7 +288,7 @@ export function printExpression(node: ExpressionNode, flags = PrintExpressionFla
listStr +
' ' +
node.comprehensions
.map(expr => {
.map((expr) => {
if (expr.nodeType === ParseNodeType.ListComprehensionFor) {
return (
`${expr.isAsync ? 'async ' : ''}for ` +
@ -321,7 +321,7 @@ export function printExpression(node: ExpressionNode, flags = PrintExpressionFla
return (
'lambda ' +
node.parameters
.map(param => {
.map((param) => {
let paramStr = '';
if (param.category === ParameterCategory.VarArgList) {
@ -359,7 +359,7 @@ export function printExpression(node: ExpressionNode, flags = PrintExpressionFla
}
case ParseNodeType.Dictionary: {
return `{ ${node.entries.map(entry => {
return `{ ${node.entries.map((entry) => {
if (entry.nodeType === ParseNodeType.DictionaryKeyEntry) {
return (
`${printExpression(entry.keyExpression, flags)}: ` +
@ -376,7 +376,7 @@ export function printExpression(node: ExpressionNode, flags = PrintExpressionFla
}
case ParseNodeType.Set: {
return node.entries.map(entry => printExpression(entry, flags)).join(', ');
return node.entries.map((entry) => printExpression(entry, flags)).join(', ');
}
}
@ -425,7 +425,7 @@ export function printOperator(operator: OperatorType): string {
[OperatorType.Is]: 'is',
[OperatorType.IsNot]: 'is not',
[OperatorType.In]: 'in',
[OperatorType.NotIn]: 'not in'
[OperatorType.NotIn]: 'not in',
};
if (operatorMap[operator]) {
@ -554,7 +554,7 @@ export function getEvaluationScopeNode(node: ParseNode): EvaluationScopeNode {
// the enclosing scope instead.
switch (curNode.nodeType) {
case ParseNodeType.Function: {
if (curNode.parameters.some(param => param === prevNode)) {
if (curNode.parameters.some((param) => param === prevNode)) {
if (isParamNameNode) {
return curNode;
}
@ -737,7 +737,7 @@ export function isAssignmentToDefaultsFollowingNamedTuple(callNode: ParseNode):
}
const moduleOrSuite = statementList.parent;
let statementIndex = moduleOrSuite.statements.findIndex(s => s === statementList);
let statementIndex = moduleOrSuite.statements.findIndex((s) => s === statementList);
if (statementIndex < 0) {
return false;

View File

@ -74,7 +74,7 @@ import {
WithItemNode,
WithNode,
YieldFromNode,
YieldNode
YieldNode,
} from '../parser/parseNodes';
// To use this class, create a subclass and override the
@ -88,7 +88,7 @@ export class ParseTreeWalker {
}
walkMultiple(nodes: ParseNodeArray) {
nodes.forEach(node => {
nodes.forEach((node) => {
if (node) {
this.walk(node);
}

View File

@ -13,7 +13,7 @@ import {
CompletionItem,
CompletionList,
DocumentSymbol,
SymbolInformation
SymbolInformation,
} from 'vscode-languageserver';
import { throwIfCancellationRequested } from '../common/cancellationUtils';
@ -30,7 +30,7 @@ import {
getRelativePath,
makeDirectories,
normalizePath,
stripFileExtension
stripFileExtension,
} from '../common/pathUtils';
import { DocumentRange, doRangesOverlap, Position, Range } from '../common/textRange';
import { Duration, timingStats } from '../common/timing';
@ -119,13 +119,13 @@ export class Program {
if (this._sourceFileList.length > 0) {
// We need to determine which files to remove from the existing file list.
const newFileMap = new Map<string, string>();
filePaths.forEach(path => {
filePaths.forEach((path) => {
newFileMap.set(path, path);
});
// Files that are not in the tracked file list are
// marked as no longer tracked.
this._sourceFileList.forEach(oldFile => {
this._sourceFileList.forEach((oldFile) => {
const filePath = oldFile.sourceFile.getFilePath();
if (!newFileMap.has(filePath)) {
oldFile.isTracked = false;
@ -155,7 +155,7 @@ export class Program {
getFilesToAnalyzeCount() {
let sourceFileCount = 0;
this._sourceFileList.forEach(fileInfo => {
this._sourceFileList.forEach((fileInfo) => {
if (
fileInfo.sourceFile.isParseRequired() ||
fileInfo.sourceFile.isBindingRequired() ||
@ -175,7 +175,7 @@ export class Program {
}
addTrackedFiles(filePaths: string[]) {
filePaths.forEach(filePath => {
filePaths.forEach((filePath) => {
this.addTrackedFile(filePath);
});
}
@ -196,7 +196,7 @@ export class Program {
isThirdPartyImport: false,
diagnosticsVersion: sourceFile.getDiagnosticVersion(),
imports: [],
importedBy: []
importedBy: [],
};
this._addToSourceFileListAndMap(sourceFileInfo);
return sourceFile;
@ -214,7 +214,7 @@ export class Program {
isThirdPartyImport: false,
diagnosticsVersion: sourceFile.getDiagnosticVersion(),
imports: [],
importedBy: []
importedBy: [],
};
this._addToSourceFileListAndMap(sourceFileInfo);
} else {
@ -251,7 +251,7 @@ export class Program {
markAllFilesDirty(evenIfContentsAreSame: boolean) {
const markDirtyMap = new Map<string, boolean>();
this._sourceFileList.forEach(sourceFileInfo => {
this._sourceFileList.forEach((sourceFileInfo) => {
if (evenIfContentsAreSame) {
sourceFileInfo.sourceFile.markDirty();
} else if (sourceFileInfo.sourceFile.didContentsChangeOnDisk()) {
@ -270,7 +270,7 @@ export class Program {
markFilesDirty(filePaths: string[]) {
const markDirtyMap = new Map<string, boolean>();
filePaths.forEach(filePath => {
filePaths.forEach((filePath) => {
const sourceFileInfo = this._sourceFileMap.get(filePath);
if (sourceFileInfo) {
sourceFileInfo.sourceFile.markDirty();
@ -309,7 +309,7 @@ export class Program {
const elapsedTime = new Duration();
const openFiles = this._sourceFileList.filter(
sf => sf.isOpenByClient && sf.sourceFile.isCheckingRequired()
(sf) => sf.isOpenByClient && sf.sourceFile.isCheckingRequired()
);
if (openFiles.length > 0) {
@ -359,14 +359,14 @@ export class Program {
// the program, skipping any typeshed files.
printDependencies(projectRootDir: string, verbose: boolean) {
const sortedFiles = this._sourceFileList
.filter(s => !s.isTypeshedFile)
.filter((s) => !s.isTypeshedFile)
.sort((a, b) => {
return a.sourceFile.getFilePath() < b.sourceFile.getFilePath() ? 1 : -1;
});
const zeroImportFiles: SourceFile[] = [];
sortedFiles.forEach(sfInfo => {
sortedFiles.forEach((sfInfo) => {
this._console.log('');
let filePath = sfInfo.sourceFile.getFilePath();
const relPath = getRelativePath(filePath, projectRootDir);
@ -380,7 +380,7 @@ export class Program {
` Imports ${sfInfo.imports.length} ` + `file${sfInfo.imports.length === 1 ? '' : 's'}`
);
if (verbose) {
sfInfo.imports.forEach(importInfo => {
sfInfo.imports.forEach((importInfo) => {
this._console.log(` ${importInfo.sourceFile.getFilePath()}`);
});
}
@ -389,7 +389,7 @@ export class Program {
` Imported by ${sfInfo.importedBy.length} ` + `file${sfInfo.importedBy.length === 1 ? '' : 's'}`
);
if (verbose) {
sfInfo.importedBy.forEach(importInfo => {
sfInfo.importedBy.forEach((importInfo) => {
this._console.log(` ${importInfo.sourceFile.getFilePath()}`);
});
}
@ -404,7 +404,7 @@ export class Program {
this._console.log(
`${zeroImportFiles.length} file${zeroImportFiles.length === 1 ? '' : 's'}` + ` not explicitly imported`
);
zeroImportFiles.forEach(importFile => {
zeroImportFiles.forEach((importFile) => {
this._console.log(` ${importFile.getFilePath()}`);
});
}
@ -533,7 +533,7 @@ export class Program {
return {
symbolTable,
docString
docString,
};
};
@ -542,7 +542,7 @@ export class Program {
private _buildModuleSymbolsMap(sourceFileToExclude?: SourceFileInfo): ModuleSymbolMap {
const moduleSymbolMap = new Map<string, SymbolTable>();
this._sourceFileList.forEach(fileInfo => {
this._sourceFileList.forEach((fileInfo) => {
if (fileInfo !== sourceFileToExclude) {
const symbolTable = fileInfo.sourceFile.getModuleSymbolTable();
if (symbolTable) {
@ -601,7 +601,7 @@ export class Program {
const closureMap = new Map<string, SourceFileInfo>();
this._getImportsRecursive(fileToCheck, closureMap, 0);
closureMap.forEach(file => {
closureMap.forEach((file) => {
timingStats.cycleDetectionTime.timeOperation(() => {
this._detectAndReportImportCycles(file);
});
@ -689,7 +689,7 @@ export class Program {
private _logImportCycle(dependencyChain: SourceFileInfo[]) {
const circDep = new CircularDependency();
dependencyChain.forEach(sourceFileInfo => {
dependencyChain.forEach((sourceFileInfo) => {
circDep.appendPath(sourceFileInfo.sourceFile.getFilePath());
});
@ -708,7 +708,7 @@ export class Program {
sourceFileInfo.sourceFile.markReanalysisRequired();
markMap.set(filePath, true);
sourceFileInfo.importedBy.forEach(dep => {
sourceFileInfo.importedBy.forEach((dep) => {
this._markFileDirtyRecursive(dep, markMap);
});
}
@ -717,7 +717,7 @@ export class Program {
getDiagnostics(options: ConfigOptions): FileDiagnostics[] {
const fileDiagnostics: FileDiagnostics[] = this._removeUnneededFiles();
this._sourceFileList.forEach(sourceFileInfo => {
this._sourceFileList.forEach((sourceFileInfo) => {
if (sourceFileInfo.isOpenByClient || (!options.checkOnlyOpenFiles && !sourceFileInfo.isThirdPartyImport)) {
const diagnostics = sourceFileInfo.sourceFile.getDiagnostics(
options,
@ -726,7 +726,7 @@ export class Program {
if (diagnostics !== undefined) {
fileDiagnostics.push({
filePath: sourceFileInfo.sourceFile.getFilePath(),
diagnostics
diagnostics,
});
// Update the cached diagnosticsVersion so we can determine
@ -750,7 +750,7 @@ export class Program {
return [];
}
return unfilteredDiagnostics.filter(diag => {
return unfilteredDiagnostics.filter((diag) => {
return doRangesOverlap(diag.range, range);
});
}
@ -977,11 +977,11 @@ export class Program {
const editActions: FileEditAction[] = [];
referencesResult.locations.forEach(loc => {
referencesResult.locations.forEach((loc) => {
editActions.push({
filePath: loc.path,
range: loc.range,
replacementText: newName
replacementText: newName,
});
});
@ -1037,7 +1037,7 @@ export class Program {
if (!this._isFileNeeded(fileInfo)) {
fileDiagnostics.push({
filePath: fileInfo.sourceFile.getFilePath(),
diagnostics: []
diagnostics: [],
});
fileInfo.sourceFile.prepareForClose();
@ -1046,8 +1046,8 @@ export class Program {
// Unlink any imports and remove them from the list if
// they are no longer referenced.
fileInfo.imports.forEach(importedFile => {
const indexToRemove = importedFile.importedBy.findIndex(fi => fi === fileInfo);
fileInfo.imports.forEach((importedFile) => {
const indexToRemove = importedFile.importedBy.findIndex((fi) => fi === fileInfo);
assert(indexToRemove >= 0);
importedFile.importedBy.splice(indexToRemove, 1);
@ -1055,11 +1055,11 @@ export class Program {
// is no longer needed. If its index is >= i, it will be
// removed when we get to it.
if (!this._isFileNeeded(importedFile)) {
const indexToRemove = this._sourceFileList.findIndex(fi => fi === importedFile);
const indexToRemove = this._sourceFileList.findIndex((fi) => fi === importedFile);
if (indexToRemove >= 0 && indexToRemove < i) {
fileDiagnostics.push({
filePath: importedFile.sourceFile.getFilePath(),
diagnostics: []
diagnostics: [],
});
importedFile.sourceFile.prepareForClose();
@ -1079,7 +1079,7 @@ export class Program {
) {
fileDiagnostics.push({
filePath: fileInfo.sourceFile.getFilePath(),
diagnostics: []
diagnostics: [],
});
fileInfo.diagnosticsVersion = fileInfo.sourceFile.getDiagnosticVersion();
@ -1188,7 +1188,7 @@ export class Program {
// Create a map of unique imports, since imports can appear more than once.
const newImportPathMap = new Map<string, UpdateImportInfo>();
imports.forEach(importResult => {
imports.forEach((importResult) => {
if (importResult.isImportFound) {
if (this._isImportAllowed(sourceFileInfo, importResult, importResult.isStubFile)) {
if (importResult.resolvedPaths.length > 0) {
@ -1198,19 +1198,19 @@ export class Program {
isTypeshedFile: !!importResult.isTypeshedFile,
isThirdPartyImport:
importResult.importType === ImportType.ThirdParty ||
(sourceFileInfo.isThirdPartyImport && importResult.importType === ImportType.Local)
(sourceFileInfo.isThirdPartyImport && importResult.importType === ImportType.Local),
});
}
}
}
importResult.implicitImports.forEach(implicitImport => {
importResult.implicitImports.forEach((implicitImport) => {
if (this._isImportAllowed(sourceFileInfo, importResult, implicitImport.isStubFile)) {
newImportPathMap.set(implicitImport.path, {
isTypeshedFile: !!importResult.isTypeshedFile,
isThirdPartyImport:
importResult.importType === ImportType.ThirdParty ||
(sourceFileInfo.isThirdPartyImport && importResult.importType === ImportType.Local)
(sourceFileInfo.isThirdPartyImport && importResult.importType === ImportType.Local),
});
}
});
@ -1221,7 +1221,7 @@ export class Program {
`in file '${sourceFileInfo.sourceFile.getFilePath()}'`
);
if (importResult.importFailureInfo) {
importResult.importFailureInfo.forEach(diag => {
importResult.importFailureInfo.forEach((diag) => {
this._console.log(` ${diag}`);
});
}
@ -1230,13 +1230,13 @@ export class Program {
});
const updatedImportMap = new Map<string, SourceFileInfo>();
sourceFileInfo.imports.forEach(importInfo => {
sourceFileInfo.imports.forEach((importInfo) => {
const oldFilePath = importInfo.sourceFile.getFilePath();
// A previous import was removed.
if (!newImportPathMap.has(oldFilePath)) {
importInfo.importedBy = importInfo.importedBy.filter(
fi => fi.sourceFile.getFilePath() !== sourceFileInfo.sourceFile.getFilePath()
(fi) => fi.sourceFile.getFilePath() !== sourceFileInfo.sourceFile.getFilePath()
);
} else {
updatedImportMap.set(oldFilePath, importInfo);
@ -1267,7 +1267,7 @@ export class Program {
isThirdPartyImport: importInfo.isThirdPartyImport,
diagnosticsVersion: sourceFile.getDiagnosticVersion(),
imports: [],
importedBy: []
importedBy: [],
};
this._addToSourceFileListAndMap(importedFileInfo);

View File

@ -19,7 +19,7 @@ import {
getDirectoryPath,
getFileSystemEntries,
isDirectory,
normalizePath
normalizePath,
} from '../common/pathUtils';
const cachedSearchPaths = new Map<string, PythonPathResult>();
@ -120,7 +120,7 @@ export function findPythonSearchPaths(
const pathResult = getPythonPathFromPythonInterpreter(fs, configOptions.pythonPath, importFailureInfo);
if (includeWatchPathsOnly && workspaceRoot) {
const paths = pathResult.paths.filter(
p => !containsPath(workspaceRoot, p, true) || containsPath(pathResult.prefix, p, true)
(p) => !containsPath(workspaceRoot, p, true) || containsPath(pathResult.prefix, p, true)
);
return paths;
@ -149,7 +149,7 @@ export function getPythonPathFromPythonInterpreter(
const result: PythonPathResult = {
paths: [],
prefix: ''
prefix: '',
};
try {
@ -163,7 +163,7 @@ export function getPythonPathFromPythonInterpreter(
const commandLineArgs: string[] = [
'-c',
'import sys, json; json.dump(dict(path=sys.path, prefix=sys.prefix), sys.stdout)'
'import sys, json; json.dump(dict(path=sys.path, prefix=sys.prefix), sys.stdout)',
];
let execOutput: string;
@ -208,7 +208,7 @@ export function getPythonPathFromPythonInterpreter(
cachedSearchPaths.set(searchKey, result);
importFailureInfo.push(`Received ${result.paths.length} paths from interpreter`);
result.paths.forEach(path => {
result.paths.forEach((path) => {
importFailureInfo.push(` ${path}`);
});
return result;

View File

@ -27,7 +27,7 @@ export const enum ScopeType {
// Built-in scopes are used for all ambient symbols provided
// by the Python environment.
Builtin
Builtin,
}
// Provides information for recursive scope lookups.
@ -121,7 +121,7 @@ export class Scope {
symbol,
isOutsideCallerModule,
isBeyondExecutionScope,
scope: this
scope: this,
};
}

View File

@ -13,7 +13,7 @@ import {
CompletionItem,
CompletionList,
DocumentSymbol,
SymbolInformation
SymbolInformation,
} from 'vscode-languageserver';
import { getGlobalCancellationToken, OperationCanceledException } from '../common/cancellationUtils';
@ -36,7 +36,7 @@ import {
getFileSystemEntries,
isDirectory,
normalizePath,
stripFileExtension
stripFileExtension,
} from '../common/pathUtils';
import { DocumentRange, Position, Range } from '../common/textRange';
import { Duration, timingStats } from '../common/timing';
@ -340,7 +340,7 @@ export class AnalyzerService {
const defaultExcludes = ['**/node_modules', '**/__pycache__', '.git'];
if (commandLineOptions.fileSpecs.length > 0) {
commandLineOptions.fileSpecs.forEach(fileSpec => {
commandLineOptions.fileSpecs.forEach((fileSpec) => {
configOptions.include.push(getFileSpec(projectRoot, fileSpec));
});
} else if (!configFilePath) {
@ -351,7 +351,7 @@ export class AnalyzerService {
configOptions.include.push(getFileSpec(commandLineOptions.executionRoot, '.'));
// Add a few common excludes to avoid long scan times.
defaultExcludes.forEach(exclude => {
defaultExcludes.forEach((exclude) => {
configOptions.exclude.push(getFileSpec(commandLineOptions.executionRoot, exclude));
});
}
@ -377,7 +377,7 @@ export class AnalyzerService {
// If there was no explicit set of excludes, add a few common ones to avoid long scan times.
if (configOptions.exclude.length === 0) {
defaultExcludes.forEach(exclude => {
defaultExcludes.forEach((exclude) => {
this._console.log(`Auto-excluding ${exclude}`);
configOptions.exclude.push(getFileSpec(configFileDir, exclude));
});
@ -477,7 +477,7 @@ export class AnalyzerService {
);
if (configOptions.verboseOutput) {
importFailureInfo.forEach(diag => {
importFailureInfo.forEach((diag) => {
this._console.log(` ${diag}`);
});
}
@ -498,7 +498,7 @@ export class AnalyzerService {
} else {
if (configOptions.verboseOutput) {
this._console.log(`Search paths found for configured python interpreter:`);
pythonPaths.forEach(path => {
pythonPaths.forEach((path) => {
this._console.log(` ${path}`);
});
}
@ -507,7 +507,7 @@ export class AnalyzerService {
if (configOptions.verboseOutput) {
if (importFailureInfo.length > 0) {
this._console.log(`When attempting to get search paths from python interpreter:`);
importFailureInfo.forEach(diag => {
importFailureInfo.forEach((diag) => {
this._console.log(` ${diag}`);
});
}
@ -515,7 +515,7 @@ export class AnalyzerService {
}
// Is there a reference to a venv? If so, there needs to be a valid venvPath.
if (configOptions.defaultVenv || configOptions.executionEnvironments.find(e => !!e.venv)) {
if (configOptions.defaultVenv || configOptions.executionEnvironments.find((e) => !!e.venv)) {
if (!configOptions.venvPath) {
this._console.log(`venvPath not specified, so venv settings will be ignored.`);
}
@ -619,7 +619,7 @@ export class AnalyzerService {
}
private _findConfigFileHereOrUp(searchPath: string): string | undefined {
return forEachAncestorDirectory(searchPath, ancestor => this._findConfigFile(ancestor));
return forEachAncestorDirectory(searchPath, (ancestor) => this._findConfigFile(ancestor));
}
private _findConfigFile(searchPath: string): string | undefined {
@ -700,7 +700,7 @@ export class AnalyzerService {
const moduleDescriptor: ImportedModuleDescriptor = {
leadingDots: 0,
nameParts: this._typeStubTargetImportName.split('.'),
importedSymbols: []
importedSymbols: [],
};
const importResult = this._importResolver.resolveImport('', execEnv, moduleDescriptor);
@ -740,7 +740,7 @@ export class AnalyzerService {
}
// Add the implicit import paths.
importResult.implicitImports.forEach(implicitImport => {
importResult.implicitImports.forEach((implicitImport) => {
filesToImport.push(implicitImport.path);
});
@ -769,7 +769,7 @@ export class AnalyzerService {
}
private _isInExcludePath(path: string, excludePaths: FileSpec[]) {
return !!excludePaths.find(excl => excl.regExp.test(path));
return !!excludePaths.find((excl) => excl.regExp.test(path));
}
private _matchFiles(include: FileSpec[], exclude: FileSpec[]): string[] {
@ -779,7 +779,7 @@ export class AnalyzerService {
const visitDirectory = (absolutePath: string, includeRegExp: RegExp) => {
if (this._configOptions.autoExcludeVenv) {
if (envMarkers.some(f => this._fs.existsSync(combinePaths(absolutePath, ...f)))) {
if (envMarkers.some((f) => this._fs.existsSync(combinePaths(absolutePath, ...f)))) {
this._console.log(`Auto-excluding ${absolutePath}`);
return;
}
@ -807,7 +807,7 @@ export class AnalyzerService {
}
};
include.forEach(includeSpec => {
include.forEach((includeSpec) => {
let foundFileSpec = false;
if (!this._isInExcludePath(includeSpec.wildcardRoot, exclude)) {
@ -855,7 +855,7 @@ export class AnalyzerService {
}
if (this._configOptions.include.length > 0) {
const fileList = this._configOptions.include.map(spec => {
const fileList = this._configOptions.include.map((spec) => {
return combinePaths(this._executionRootPath, spec.wildcardRoot);
});
@ -967,7 +967,7 @@ export class AnalyzerService {
this._removeConfigFileWatcher();
if (this._configFilePath) {
this._configFileWatcher = this._fs.createFileSystemWatcher([this._configFilePath], event => {
this._configFileWatcher = this._fs.createFileSystemWatcher([this._configFilePath], (event) => {
if (this._verboseOutput) {
this._console.log(`Received fs event '${event}' for config file`);
}
@ -1105,7 +1105,7 @@ export class AnalyzerService {
checkingOnlyOpenFiles: this._program.isCheckingOnlyOpenFiles(),
fatalErrorOccurred: false,
configParseErrorOccurred: false,
elapsedTime: duration.getDurationInSeconds()
elapsedTime: duration.getDurationInSeconds(),
};
const diagnosticFileCount = results.diagnostics.length;
@ -1135,7 +1135,7 @@ export class AnalyzerService {
checkingOnlyOpenFiles: true,
fatalErrorOccurred: true,
configParseErrorOccurred: false,
elapsedTime: 0
elapsedTime: 0,
});
}
}
@ -1153,7 +1153,7 @@ export class AnalyzerService {
checkingOnlyOpenFiles: this._program.isCheckingOnlyOpenFiles(),
fatalErrorOccurred: false,
configParseErrorOccurred: false,
elapsedTime: 0
elapsedTime: 0,
});
}
}
@ -1168,7 +1168,7 @@ export class AnalyzerService {
checkingOnlyOpenFiles: true,
fatalErrorOccurred: false,
configParseErrorOccurred: true,
elapsedTime: 0
elapsedTime: 0,
});
}
}

View File

@ -12,7 +12,7 @@ import {
CompletionItem,
CompletionList,
DocumentSymbol,
SymbolInformation
SymbolInformation,
} from 'vscode-languageserver';
import { OperationCanceledException } from '../common/cancellationUtils';
@ -213,7 +213,7 @@ export class SourceFile {
if (options.diagnosticSettings.enableTypeIgnoreComments) {
const typeIgnoreLines = this._parseResults ? this._parseResults.tokenizerOutput.typeIgnoreLines : {};
if (Object.keys(typeIgnoreLines).length > 0) {
diagList = diagList.filter(d => {
diagList = diagList.filter((d) => {
for (let line = d.range.start.line; line <= d.range.end.line; line++) {
if (typeIgnoreLines[line]) {
return false;
@ -231,14 +231,14 @@ export class SourceFile {
? DiagnosticCategory.Warning
: DiagnosticCategory.Error;
this._circularDependencies.forEach(cirDep => {
this._circularDependencies.forEach((cirDep) => {
diagList.push(
new Diagnostic(
category,
'Cycle detected in import chain\n' +
cirDep
.getPaths()
.map(path => ' ' + path)
.map((path) => ' ' + path)
.join('\n'),
getEmptyRange()
)
@ -261,7 +261,7 @@ export class SourceFile {
includeWarningsAndErrors = false;
} else if (options.diagnosticSettings.reportTypeshedErrors === 'warning') {
// Convert all the errors to warnings.
diagList = diagList.map(diag => {
diagList = diagList.map((diag) => {
if (diag.category === DiagnosticCategory.Error) {
return new Diagnostic(DiagnosticCategory.Warning, diag.message, diag.range);
}
@ -271,7 +271,7 @@ export class SourceFile {
}
// If the file is in the ignore list, clear the diagnostic list.
if (options.ignore.find(ignoreFileSpec => ignoreFileSpec.regExp.test(this._filePath))) {
if (options.ignore.find((ignoreFileSpec) => ignoreFileSpec.regExp.test(this._filePath))) {
diagList = [];
}
@ -287,7 +287,7 @@ export class SourceFile {
// the errors and warnings, leaving only the unreachable code
// diagnostics.
if (!includeWarningsAndErrors) {
diagList = diagList.filter(diag => diag.category === DiagnosticCategory.UnusedCode);
diagList = diagList.filter((diag) => diag.category === DiagnosticCategory.UnusedCode);
}
return diagList;
@ -428,7 +428,7 @@ export class SourceFile {
// Some topologies can result in a massive number of cycles. We'll cut it off.
if (this._circularDependencies.length < _maxImportCyclesPerFile) {
if (!this._circularDependencies.some(dep => dep.isEqual(circDependency))) {
if (!this._circularDependencies.some((dep) => dep.isEqual(circDependency))) {
this._circularDependencies.push(circDependency);
updatedDependencyList = true;
}
@ -497,14 +497,14 @@ export class SourceFile {
this._imports,
this._builtinsImport,
this._typingModulePath,
this._collectionsModulePath
this._collectionsModulePath,
] = this._resolveImports(importResolver, parseResults.importedModules, execEnvironment);
this._parseDiagnostics = diagSink.fetchAndClear();
});
// Is this file in a "strict" path?
const useStrict =
configOptions.strict.find(strictFileSpec => strictFileSpec.regExp.test(this._filePath)) !== undefined;
configOptions.strict.find((strictFileSpec) => strictFileSpec.regExp.test(this._filePath)) !== undefined;
this._diagnosticSettings = CommentUtils.getFileLevelDirectives(
this._parseResults.tokenizerOutput.tokens,
@ -531,9 +531,9 @@ export class SourceFile {
typeIgnoreLines: {},
predominantEndOfLineSequence: '\n',
predominantTabSequence: ' ',
predominantSingleQuoteCharacter: "'"
predominantSingleQuoteCharacter: "'",
},
containsWildcardImport: false
containsWildcardImport: false,
};
this._imports = undefined;
this._builtinsImport = undefined;
@ -886,7 +886,7 @@ export class SourceFile {
isStubFile: this._isStubFile,
isTypingStubFile: this._isTypingStubFile,
isBuiltInStubFile: this._isBuiltInStubFile,
accessedSymbolMap: new Map<number, true>()
accessedSymbolMap: new Map<number, true>(),
};
return fileInfo;
}
@ -912,7 +912,7 @@ export class SourceFile {
let builtinsImportResult: ImportResult | undefined = importResolver.resolveImport(this._filePath, execEnv, {
leadingDots: 0,
nameParts: ['builtins'],
importedSymbols: undefined
importedSymbols: undefined,
});
// Avoid importing builtins from the builtins.pyi file itself.
@ -929,7 +929,7 @@ export class SourceFile {
const typingImportResult: ImportResult | undefined = importResolver.resolveImport(this._filePath, execEnv, {
leadingDots: 0,
nameParts: ['typing'],
importedSymbols: undefined
importedSymbols: undefined,
});
// Avoid importing typing from the typing.pyi file itself.
@ -948,7 +948,7 @@ export class SourceFile {
const importResult = importResolver.resolveImport(this._filePath, execEnv, {
leadingDots: moduleImport.leadingDots,
nameParts: moduleImport.nameParts,
importedSymbols: moduleImport.importedSymbols
importedSymbols: moduleImport.importedSymbols,
});
// If the file imports the stdlib 'collections' module, stash

View File

@ -63,7 +63,7 @@ export function evaluateStaticBoolExpression(node: ExpressionNode, execEnv: Exec
node.rightExpression.nodeType === ParseNodeType.StringList
) {
// Handle the special case of "sys.platform != 'X'"
const comparisonPlatform = node.rightExpression.strings.map(s => s.value).join('');
const comparisonPlatform = node.rightExpression.strings.map((s) => s.value).join('');
const expectedPlatformName = _getExpectedPlatformNameFromPlatform(execEnv);
return _evaluateStringBinaryOperation(node.operator, expectedPlatformName || '', comparisonPlatform);
} else if (
@ -71,7 +71,7 @@ export function evaluateStaticBoolExpression(node: ExpressionNode, execEnv: Exec
node.rightExpression.nodeType === ParseNodeType.StringList
) {
// Handle the special case of "os.name == 'X'"
const comparisonOsName = node.rightExpression.strings.map(s => s.value).join('');
const comparisonOsName = node.rightExpression.strings.map((s) => s.value).join('');
const expectedOsName = _getExpectedOsNameFromPlatform(execEnv);
if (expectedOsName !== undefined) {
return _evaluateStringBinaryOperation(node.operator, expectedOsName, comparisonOsName);

View File

@ -40,7 +40,7 @@ export const enum SymbolFlags {
// Indicates that the symbol is not considered for protocol
// matching. This applies to some built-in symbols like __class__.
IgnoredForProtocolMatch = 1 << 5
IgnoredForProtocolMatch = 1 << 5,
}
let nextSymbolId = 1;
@ -129,7 +129,7 @@ export class Symbol {
// See if this node was already identified as a declaration. If so,
// replace it. Otherwise, add it as a new declaration to the end of
// the list.
const declIndex = this._declarations.findIndex(decl => areDeclarationsSame(decl, declaration));
const declIndex = this._declarations.findIndex((decl) => areDeclarationsSame(decl, declaration));
if (declIndex < 0) {
this._declarations.push(declaration);
} else {
@ -177,11 +177,11 @@ export class Symbol {
return true;
}
return this.getDeclarations().some(decl => hasTypeForDeclaration(decl));
return this.getDeclarations().some((decl) => hasTypeForDeclaration(decl));
}
getTypedDeclarations() {
return this.getDeclarations().filter(decl => hasTypeForDeclaration(decl));
return this.getDeclarations().filter((decl) => hasTypeForDeclaration(decl));
}
getSynthesizedType() {

View File

@ -37,5 +37,5 @@ export function isTypedDictMemberAccessedThroughIndex(symbol: Symbol): boolean {
}
export function isFinalVariable(symbol: Symbol): boolean {
return symbol.getDeclarations().some(decl => isFinalVariableDeclaration(decl));
return symbol.getDeclarations().some((decl) => isFinalVariableDeclaration(decl));
}

View File

@ -24,7 +24,7 @@ export class TestWalker extends ParseTreeWalker {
// Make sure that all of the children point to their parent.
private _verifyParentChildLinks(node: ParseNode, children: ParseNodeArray) {
children.forEach(child => {
children.forEach((child) => {
if (child) {
if (child.parent !== node) {
fail(
@ -42,7 +42,7 @@ export class TestWalker extends ParseTreeWalker {
private _verifyChildRanges(node: ParseNode, children: ParseNodeArray) {
let prevNode: ParseNode | undefined;
children.forEach(child => {
children.forEach((child) => {
if (child) {
let skipCheck = false;

View File

@ -58,7 +58,7 @@ export class SpeculativeTypeTracker {
// Delete all of the speculative type cache entries
// that were tracked in this context.
context!.entriesToUndo.forEach(entry => {
context!.entriesToUndo.forEach((entry) => {
entry.cache.delete(entry.id);
});
}
@ -83,7 +83,7 @@ export class SpeculativeTypeTracker {
if (stackSize > 0) {
this._speculativeContextStack[stackSize - 1].entriesToUndo.push({
cache,
id
id,
});
}
}

File diff suppressed because it is too large Load Diff

View File

@ -32,7 +32,7 @@ import {
TryNode,
TypeAnnotationNode,
WhileNode,
WithNode
WithNode,
} from '../parser/parseNodes';
import * as AnalyzerNodeInfo from './analyzerNodeInfo';
import * as ParseTreeUtils from './parseTreeUtils';
@ -72,12 +72,12 @@ class TrackedImportFrom extends TrackedImport {
}
addSymbol(symbol: Symbol | undefined, name: string, alias: string | undefined, isAccessed = false) {
if (!this.symbols.find(s => s.name === name)) {
if (!this.symbols.find((s) => s.name === name)) {
this.symbols.push({
symbol,
name,
alias,
isAccessed
isAccessed,
});
}
}
@ -163,7 +163,7 @@ export class TypeStubWriter extends ParseTreeWalker {
let line = `class ${className}`;
if (node.arguments.length > 0) {
line += `(${node.arguments
.map(arg => {
.map((arg) => {
let argString = '';
if (arg.name) {
argString = arg.name.value + '=';
@ -198,7 +198,7 @@ export class TypeStubWriter extends ParseTreeWalker {
this._emitDecorators(node.decorators);
let line = node.isAsync ? 'async ' : '';
line += `def ${functionName}`;
line += `(${node.parameters.map(param => this._printParameter(param)).join(', ')})`;
line += `(${node.parameters.map((param) => this._printParameter(param)).join(', ')})`;
if (node.returnTypeAnnotation) {
line += ' -> ' + this._printExpression(node.returnTypeAnnotation, true);
@ -367,7 +367,7 @@ export class TypeStubWriter extends ParseTreeWalker {
const currentScope = getScopeForNode(node);
if (currentScope) {
// Record the input for later.
node.list.forEach(imp => {
node.list.forEach((imp) => {
const moduleName = this._printModuleName(imp.module);
if (!this._trackedImportAs.has(moduleName)) {
const symbolName = imp.alias
@ -406,7 +406,7 @@ export class TypeStubWriter extends ParseTreeWalker {
this._trackedImportFrom.set(moduleName, trackedImportFrom);
}
node.imports.forEach(imp => {
node.imports.forEach((imp) => {
const symbolName = imp.alias ? imp.alias.value : imp.name.value;
const symbolInfo = currentScope.lookUpSymbolRecursive(symbolName);
if (symbolInfo) {
@ -461,10 +461,10 @@ export class TypeStubWriter extends ParseTreeWalker {
}
private _emitDecorators(decorators: DecoratorNode[]) {
decorators.forEach(decorator => {
decorators.forEach((decorator) => {
let line = '@' + this._printExpression(decorator.leftExpression);
if (decorator.arguments) {
line += `(${decorator.arguments.map(arg => this._printArgument(arg)).join(', ')})`;
line += `(${decorator.arguments.map((arg) => this._printArgument(arg)).join(', ')})`;
}
this._emitLine(line);
});
@ -499,7 +499,7 @@ export class TypeStubWriter extends ParseTreeWalker {
for (let i = 0; i < node.leadingDots; i++) {
line += '.';
}
line += node.nameParts.map(part => part.value).join('.');
line += node.nameParts.map((part) => part.value).join('.');
return line;
}
@ -510,7 +510,7 @@ export class TypeStubWriter extends ParseTreeWalker {
this._trackedImportFrom.set(importName, trackedImportFrom);
}
symbols.forEach(symbol => {
symbols.forEach((symbol) => {
trackedImportFrom!.addSymbol(undefined, symbol, undefined, true);
});
}
@ -593,7 +593,7 @@ export class TypeStubWriter extends ParseTreeWalker {
let lineEmitted = false;
// Emit the "import" statements.
this._trackedImportAs.forEach(imp => {
this._trackedImportAs.forEach((imp) => {
if (this._accessedImportedSymbols.get(imp.alias || imp.importName)) {
imp.isAccessed = true;
}
@ -609,8 +609,8 @@ export class TypeStubWriter extends ParseTreeWalker {
});
// Emit the "import from" statements.
this._trackedImportFrom.forEach(imp => {
imp.symbols.forEach(s => {
this._trackedImportFrom.forEach((imp) => {
imp.symbols.forEach((s) => {
if (this._accessedImportedSymbols.get(s.alias || s.name)) {
s.isAccessed = true;
}
@ -622,7 +622,7 @@ export class TypeStubWriter extends ParseTreeWalker {
}
const sortedSymbols = imp.symbols
.filter(s => s.isAccessed || this._includeAllImports)
.filter((s) => s.isAccessed || this._includeAllImports)
.sort((a, b) => {
if (a.name < b.name) {
return -1;
@ -636,7 +636,7 @@ export class TypeStubWriter extends ParseTreeWalker {
importStr += `from ${imp.importName} import `;
importStr += sortedSymbols
.map(symbol => {
.map((symbol) => {
let symStr = symbol.name;
if (symbol.alias) {
symStr += ' as ' + symbol.alias;

View File

@ -28,7 +28,7 @@ import {
Type,
TypeCategory,
TypeVarType,
UnknownType
UnknownType,
} from './types';
import { TypeVarMap } from './typeVarMap';
@ -66,7 +66,7 @@ export const enum ClassMemberLookupFlags {
// By default, the first symbol is returned even if it has only
// an inferred type associated with it. If this flag is set,
// the search looks only for symbols with declared types.
DeclaredTypesOnly = 1 << 4
DeclaredTypesOnly = 1 << 4,
}
export const enum CanAssignFlags {
@ -79,7 +79,7 @@ export const enum CanAssignFlags {
// The caller has swapped the source and dest types because
// the types are contravariant. Perform type var matching
// on dest type vars rather than source type var.
ReverseTypeVarMatching = 1 << 1
ReverseTypeVarMatching = 1 << 1,
}
export interface TypedDictEntry {
@ -90,7 +90,7 @@ export interface TypedDictEntry {
export function isOptionalType(type: Type): boolean {
if (type.category === TypeCategory.Union) {
return type.subtypes.some(t => isNoneOrNever(t));
return type.subtypes.some((t) => isNoneOrNever(t));
}
return false;
@ -102,7 +102,7 @@ export function doForSubtypes(type: Type, callback: (type: Type) => Type | undef
if (type.category === TypeCategory.Union) {
const newTypes: Type[] = [];
type.subtypes.forEach(typeEntry => {
type.subtypes.forEach((typeEntry) => {
const transformedType = callback(typeEntry);
if (transformedType) {
newTypes.push(transformedType);
@ -140,7 +140,7 @@ export function stripLiteralValue(type: Type): Type {
}
if (type.category === TypeCategory.Union) {
return doForSubtypes(type, subtype => {
return doForSubtypes(type, (subtype) => {
return stripLiteralValue(subtype);
});
}
@ -155,7 +155,7 @@ export function stripLiteralTypeArgsValue(type: Type, recursionCount = 0): Type
if (type.category === TypeCategory.Class) {
if (type.typeArguments) {
const strippedTypeArgs = type.typeArguments.map(t =>
const strippedTypeArgs = type.typeArguments.map((t) =>
stripLiteralTypeArgsValue(stripLiteralValue(t), recursionCount + 1)
);
return ClassType.cloneForSpecialization(type, strippedTypeArgs, type.skipAbstractClassTest);
@ -171,7 +171,7 @@ export function stripLiteralTypeArgsValue(type: Type, recursionCount = 0): Type
}
if (type.category === TypeCategory.Union) {
return doForSubtypes(type, subtype => {
return doForSubtypes(type, (subtype) => {
return stripLiteralTypeArgsValue(subtype, recursionCount + 1);
});
}
@ -179,12 +179,12 @@ export function stripLiteralTypeArgsValue(type: Type, recursionCount = 0): Type
if (type.category === TypeCategory.Function) {
if (type.specializedTypes) {
const strippedSpecializedTypes: SpecializedFunctionTypes = {
parameterTypes: type.specializedTypes.parameterTypes.map(t =>
parameterTypes: type.specializedTypes.parameterTypes.map((t) =>
stripLiteralTypeArgsValue(stripLiteralValue(t), recursionCount + 1)
),
returnType: type.specializedTypes.returnType
? stripLiteralTypeArgsValue(stripLiteralValue(type.specializedTypes.returnType), recursionCount + 1)
: undefined
: undefined,
};
type = FunctionType.cloneForSpecialization(type, strippedSpecializedTypes);
}
@ -195,7 +195,7 @@ export function stripLiteralTypeArgsValue(type: Type, recursionCount = 0): Type
if (type.category === TypeCategory.OverloadedFunction) {
const strippedOverload = OverloadedFunctionType.create();
strippedOverload.overloads = type.overloads.map(
t => stripLiteralTypeArgsValue(t, recursionCount + 1) as FunctionType
(t) => stripLiteralTypeArgsValue(t, recursionCount + 1) as FunctionType
);
return strippedOverload;
}
@ -388,7 +388,7 @@ export function specializeType(
if (type.category === TypeCategory.Union) {
const subtypes: Type[] = [];
type.subtypes.forEach(typeEntry => {
type.subtypes.forEach((typeEntry) => {
subtypes.push(specializeType(typeEntry, typeVarMap, makeConcrete, recursionLevel + 1));
});
@ -503,7 +503,7 @@ export function lookUpClassMember(
return {
symbol,
isInstanceMember: true,
classType: specializedMroClass
classType: specializedMroClass,
};
}
}
@ -533,7 +533,7 @@ export function lookUpClassMember(
return {
symbol,
isInstanceMember,
classType: specializedMroClass
classType: specializedMroClass,
};
}
}
@ -550,7 +550,7 @@ export function lookUpClassMember(
return {
symbol: Symbol.createWithType(SymbolFlags.None, UnknownType.create()),
isInstanceMember: false,
classType: UnknownType.create()
classType: UnknownType.create(),
};
}
} else if (isAnyOrUnknown(classType)) {
@ -559,7 +559,7 @@ export function lookUpClassMember(
return {
symbol: Symbol.createWithType(SymbolFlags.None, UnknownType.create()),
isInstanceMember: false,
classType: UnknownType.create()
classType: UnknownType.create(),
};
}
@ -570,12 +570,12 @@ export function addDefaultFunctionParameters(functionType: FunctionType) {
FunctionType.addParameter(functionType, {
category: ParameterCategory.VarArgList,
name: 'args',
type: AnyType.create()
type: AnyType.create(),
});
FunctionType.addParameter(functionType, {
category: ParameterCategory.VarArgDictionary,
name: 'kwargs',
type: AnyType.create()
type: AnyType.create(),
});
}
@ -608,7 +608,7 @@ export function getMetaclass(type: ClassType, recursionCount = 0): ClassType | U
// but removing any duplicates.
export function addTypeVarsToListIfUnique(list1: TypeVarType[], list2: TypeVarType[]) {
for (const type2 of list2) {
if (!list1.find(type1 => type1 === type2)) {
if (!list1.find((type1) => type1 === type2)) {
list1.push(type2);
}
}
@ -623,7 +623,7 @@ export function getTypeVarArgumentsRecursive(type: Type): TypeVarType[] {
const getTypeVarsFromClass = (classType: ClassType) => {
const combinedList: TypeVarType[] = [];
if (classType.typeArguments) {
classType.typeArguments.forEach(typeArg => {
classType.typeArguments.forEach((typeArg) => {
addTypeVarsToListIfUnique(combinedList, getTypeVarArgumentsRecursive(typeArg));
});
}
@ -646,7 +646,7 @@ export function getTypeVarArgumentsRecursive(type: Type): TypeVarType[] {
} else if (type.category === TypeCategory.Function) {
const combinedList: TypeVarType[] = [];
type.details.parameters.forEach(param => {
type.details.parameters.forEach((param) => {
addTypeVarsToListIfUnique(combinedList, getTypeVarArgumentsRecursive(param.type));
});
@ -689,14 +689,14 @@ export function setTypeArgumentsRecursive(destType: Type, srcType: Type, typeVar
switch (destType.category) {
case TypeCategory.Union:
destType.subtypes.forEach(subtype => {
destType.subtypes.forEach((subtype) => {
setTypeArgumentsRecursive(subtype, srcType, typeVarMap, recursionCount + 1);
});
break;
case TypeCategory.Class:
if (destType.typeArguments) {
destType.typeArguments.forEach(typeArg => {
destType.typeArguments.forEach((typeArg) => {
setTypeArgumentsRecursive(typeArg, srcType, typeVarMap, recursionCount + 1);
});
}
@ -708,7 +708,7 @@ export function setTypeArgumentsRecursive(destType: Type, srcType: Type, typeVar
case TypeCategory.Function:
if (destType.specializedTypes) {
destType.specializedTypes.parameterTypes.forEach(paramType => {
destType.specializedTypes.parameterTypes.forEach((paramType) => {
setTypeArgumentsRecursive(paramType, srcType, typeVarMap, recursionCount + 1);
});
if (destType.specializedTypes.returnType) {
@ -720,7 +720,7 @@ export function setTypeArgumentsRecursive(destType: Type, srcType: Type, typeVar
);
}
} else {
destType.details.parameters.forEach(param => {
destType.details.parameters.forEach((param) => {
setTypeArgumentsRecursive(param.type, srcType, typeVarMap, recursionCount + 1);
});
if (destType.details.declaredReturnType) {
@ -735,7 +735,7 @@ export function setTypeArgumentsRecursive(destType: Type, srcType: Type, typeVar
break;
case TypeCategory.OverloadedFunction:
destType.overloads.forEach(subtype => {
destType.overloads.forEach((subtype) => {
setTypeArgumentsRecursive(subtype, srcType, typeVarMap, recursionCount + 1);
});
break;
@ -821,7 +821,7 @@ export function derivesFromClassRecursive(classType: ClassType, baseClassToFind:
// and an "int", this method would strip off the "None"
// and return only the "int".
export function removeFalsinessFromType(type: Type): Type {
return doForSubtypes(type, subtype => {
return doForSubtypes(type, (subtype) => {
if (subtype.category === TypeCategory.Object) {
if (subtype.literalValue !== undefined) {
// If the object is already definitely truthy, it's fine to
@ -851,7 +851,7 @@ export function removeFalsinessFromType(type: Type): Type {
// method, this method would strip off the "Foo"
// and return only the "None".
export function removeTruthinessFromType(type: Type): Type {
return doForSubtypes(type, subtype => {
return doForSubtypes(type, (subtype) => {
if (subtype.category === TypeCategory.Object) {
if (subtype.literalValue !== undefined) {
// If the object is already definitely falsy, it's fine to
@ -931,7 +931,7 @@ export function getDeclaredGeneratorReturnType(functionType: FunctionType): Type
}
export function convertClassToObject(type: Type): Type {
return doForSubtypes(type, subtype => {
return doForSubtypes(type, (subtype) => {
if (subtype.category === TypeCategory.Class) {
return ObjectType.create(subtype);
}
@ -1014,7 +1014,7 @@ export function containsUnknown(type: Type, allowUnknownTypeArgsForClasses = fal
// See if a function has an unknown type.
if (type.category === TypeCategory.OverloadedFunction) {
return type.overloads.some(overload => {
return type.overloads.some((overload) => {
return containsUnknown(overload, false, recursionCount + 1);
});
}
@ -1052,7 +1052,7 @@ function _specializeClassType(
// If type args were previously provided, specialize them.
if (classType.typeArguments) {
newTypeArgs = classType.typeArguments.map(oldTypeArgType => {
newTypeArgs = classType.typeArguments.map((oldTypeArgType) => {
const newTypeArgType = specializeType(oldTypeArgType, typeVarMap, makeConcrete, recursionLevel + 1);
if (newTypeArgType !== oldTypeArgType) {
specializationNeeded = true;
@ -1060,7 +1060,7 @@ function _specializeClassType(
return newTypeArgType;
});
} else {
ClassType.getTypeParameters(classType).forEach(typeParam => {
ClassType.getTypeParameters(classType).forEach((typeParam) => {
let typeArgType: Type;
if (typeVarMap && typeVarMap.get(typeParam.name)) {
@ -1111,13 +1111,13 @@ function _specializeOverloadedFunctionType(
recursionLevel: number
): OverloadedFunctionType {
// Specialize each of the functions in the overload.
const overloads = type.overloads.map(entry =>
const overloads = type.overloads.map((entry) =>
_specializeFunctionType(entry, typeVarMap, makeConcrete, recursionLevel)
);
// Construct a new overload with the specialized function types.
const newOverloadType = OverloadedFunctionType.create();
overloads.forEach(overload => {
overloads.forEach((overload) => {
OverloadedFunctionType.addOverload(newOverloadType, overload);
});
@ -1141,7 +1141,7 @@ function _specializeFunctionType(
const specializedParameters: SpecializedFunctionTypes = {
parameterTypes: [],
returnType: specializedReturnType
returnType: specializedReturnType,
};
for (let i = 0; i < functionType.details.parameters.length; i++) {
@ -1190,7 +1190,7 @@ export function requiresSpecialization(type: Type, recursionCount = 0): boolean
}
return (
type.typeArguments.find(typeArg => requiresSpecialization(typeArg, recursionCount + 1)) !==
type.typeArguments.find((typeArg) => requiresSpecialization(typeArg, recursionCount + 1)) !==
undefined
);
}
@ -1233,11 +1233,13 @@ export function requiresSpecialization(type: Type, recursionCount = 0): boolean
}
case TypeCategory.OverloadedFunction: {
return type.overloads.find(overload => requiresSpecialization(overload, recursionCount + 1)) !== undefined;
return (
type.overloads.find((overload) => requiresSpecialization(overload, recursionCount + 1)) !== undefined
);
}
case TypeCategory.Union: {
return type.subtypes.find(type => requiresSpecialization(type, recursionCount + 1)) !== undefined;
return type.subtypes.find((type) => requiresSpecialization(type, recursionCount + 1)) !== undefined;
}
case TypeCategory.TypeVar: {
@ -1258,11 +1260,11 @@ export function computeMroLinearization(classType: ClassType): boolean {
// Construct the list of class lists that need to be merged.
const classListsToMerge: Type[][] = [];
classType.details.baseClasses.forEach(baseClass => {
classType.details.baseClasses.forEach((baseClass) => {
if (baseClass.category === TypeCategory.Class) {
const typeVarMap = buildTypeVarMapFromSpecializedClass(baseClass, false);
classListsToMerge.push(
baseClass.details.mro.map(mroClass => {
baseClass.details.mro.map((mroClass) => {
return specializeType(mroClass, typeVarMap);
})
);
@ -1272,7 +1274,7 @@ export function computeMroLinearization(classType: ClassType): boolean {
});
classListsToMerge.push(
classType.details.baseClasses.map(baseClass => {
classType.details.baseClasses.map((baseClass) => {
const typeVarMap = buildTypeVarMapFromSpecializedClass(classType, false);
return specializeType(baseClass, typeVarMap);
})
@ -1286,10 +1288,10 @@ export function computeMroLinearization(classType: ClassType): boolean {
// is found in the "tail" (i.e. in elements 1 through n) of any
// of the class lists.
const isInTail = (searchClass: ClassType, classLists: Type[][]) => {
return classLists.some(classList => {
return classLists.some((classList) => {
return (
classList.findIndex(
value =>
(value) =>
value.category === TypeCategory.Class && ClassType.isSameGenericClass(value, searchClass, false)
) > 0
);
@ -1299,7 +1301,7 @@ export function computeMroLinearization(classType: ClassType): boolean {
const filterClass = (classToFilter: ClassType, classLists: Type[][]) => {
for (let i = 0; i < classLists.length; i++) {
classLists[i] = classLists[i].filter(
value =>
(value) =>
value.category !== TypeCategory.Class || !ClassType.isSameGenericClass(value, classToFilter, false)
);
}

View File

@ -53,7 +53,7 @@ export const enum TypeCategory {
Union,
// Type variable (defined with TypeVar)
TypeVar
TypeVar,
}
export type Type =
@ -87,7 +87,7 @@ export interface UnboundType extends TypeBase {
export namespace UnboundType {
const _instance: UnboundType = {
category: TypeCategory.Unbound
category: TypeCategory.Unbound,
};
export function create() {
@ -102,7 +102,7 @@ export interface UnknownType extends TypeBase {
export namespace UnknownType {
const _instance: UnknownType = {
category: TypeCategory.Unknown
category: TypeCategory.Unknown,
};
export function create() {
@ -127,7 +127,7 @@ export namespace ModuleType {
const newModuleType: ModuleType = {
category: TypeCategory.Module,
fields: symbolTable || new Map<string, Symbol>(),
loaderFields: new Map<string, Symbol>()
loaderFields: new Map<string, Symbol>(),
};
return newModuleType;
}
@ -208,7 +208,7 @@ export const enum ClassTypeFlags {
// A class whose constructor (__init__ method) does not have
// annotated types and is treated as though each parameter
// is a generic type for purposes of type inference.
PseudoGenericClass = 1 << 12
PseudoGenericClass = 1 << 12,
}
interface ClassDetails {
@ -251,9 +251,9 @@ export namespace ClassType {
mro: [],
fields: new Map<string, Symbol>(),
typeParameters: [],
docString
docString,
},
skipAbstractClassTest: false
skipAbstractClassTest: false,
};
return newClass;
@ -530,7 +530,7 @@ export namespace ObjectType {
export function create(classType: ClassType) {
const newObjectType: ObjectType = {
category: TypeCategory.Object,
classType
classType,
};
return newObjectType;
}
@ -601,7 +601,7 @@ export const enum FunctionTypeFlags {
Final = 1 << 12,
// Function has one or more parameters that are missing type annotations
UnannotatedParams = 1 << 13
UnannotatedParams = 1 << 13,
}
interface FunctionDetails {
@ -642,8 +642,8 @@ export namespace FunctionType {
details: {
flags,
parameters: [],
docString
}
docString,
},
};
return newFunctionType;
}
@ -660,7 +660,7 @@ export namespace FunctionType {
declaredReturnType: type.details.declaredReturnType,
declaration: type.details.declaration,
builtInName: type.details.builtInName,
docString: type.details.docString
docString: type.details.docString,
};
// If we strip off the first parameter, this is no longer an
@ -674,7 +674,7 @@ export namespace FunctionType {
if (type.specializedTypes) {
newFunction.specializedTypes = {
parameterTypes: type.specializedTypes.parameterTypes.slice(startParam),
returnType: type.specializedTypes.returnType
returnType: type.specializedTypes.returnType,
};
}
@ -794,7 +794,7 @@ export namespace OverloadedFunctionType {
export function create() {
const newType: OverloadedFunctionType = {
category: TypeCategory.OverloadedFunction,
overloads: []
overloads: [],
};
return newType;
}
@ -810,7 +810,7 @@ export interface NoneType extends TypeBase {
export namespace NoneType {
const _noneInstance: NoneType = {
category: TypeCategory.None
category: TypeCategory.None,
};
export function create() {
@ -824,7 +824,7 @@ export interface NeverType extends TypeBase {
export namespace NeverType {
const _neverInstance: NeverType = {
category: TypeCategory.Never
category: TypeCategory.Never,
};
export function create() {
@ -840,11 +840,11 @@ export interface AnyType extends TypeBase {
export namespace AnyType {
const _anyInstance: AnyType = {
category: TypeCategory.Any,
isEllipsis: false
isEllipsis: false,
};
const _ellipsisInstance: AnyType = {
category: TypeCategory.Any,
isEllipsis: true
isEllipsis: true,
};
export function create(isEllipsis = false) {
@ -861,7 +861,7 @@ export namespace UnionType {
export function create() {
const newUnionType: UnionType = {
category: TypeCategory.Union,
subtypes: []
subtypes: [],
};
return newUnionType;
@ -876,7 +876,7 @@ export namespace UnionType {
}
export function containsType(unionType: UnionType, subtype: Type, recursionCount = 0): boolean {
return unionType.subtypes.find(t => isTypeSame(t, subtype, recursionCount + 1)) !== undefined;
return unionType.subtypes.find((t) => isTypeSame(t, subtype, recursionCount + 1)) !== undefined;
}
}
@ -901,7 +901,7 @@ export namespace TypeVarType {
constraints: [],
isCovariant: false,
isContravariant: false,
isSynthesized
isSynthesized,
};
return newTypeVarType;
}
@ -921,7 +921,7 @@ export function isAnyOrUnknown(type: Type): boolean {
}
if (type.category === TypeCategory.Union) {
return type.subtypes.find(t => !isAnyOrUnknown(t)) === undefined;
return type.subtypes.find((t) => !isAnyOrUnknown(t)) === undefined;
}
return false;
@ -937,7 +937,7 @@ export function isPossiblyUnbound(type: Type): boolean {
}
if (type.category === TypeCategory.Union) {
return type.subtypes.find(t => isPossiblyUnbound(t)) !== undefined;
return type.subtypes.find((t) => isPossiblyUnbound(t)) !== undefined;
}
return false;
@ -1070,7 +1070,7 @@ export function isTypeSame(type1: Type, type2: Type, recursionCount = 0): boolea
// The types do not have a particular order, so we need to
// do the comparison in an order-independent manner.
return subtypes1.find(t => !UnionType.containsType(unionType2, t, recursionCount + 1)) === undefined;
return subtypes1.find((t) => !UnionType.containsType(unionType2, t, recursionCount + 1)) === undefined;
}
case TypeCategory.TypeVar: {
@ -1163,7 +1163,7 @@ export function removeNoneFromUnion(type: Type): Type {
export function removeFromUnion(type: Type, removeFilter: (type: Type) => boolean) {
if (type.category === TypeCategory.Union) {
const remainingTypes = type.subtypes.filter(t => !removeFilter(t));
const remainingTypes = type.subtypes.filter((t) => !removeFilter(t));
if (remainingTypes.length < type.subtypes.length) {
return combineTypes(remainingTypes);
}
@ -1178,7 +1178,7 @@ export function removeFromUnion(type: Type, removeFilter: (type: Type) => boolea
// If no types remain in the end, a NeverType is returned.
export function combineTypes(types: Type[]): Type {
// Filter out any "Never" types.
types = types.filter(type => type.category !== TypeCategory.Never);
types = types.filter((type) => type.category !== TypeCategory.Never);
if (types.length === 0) {
return NeverType.create();
}

View File

@ -11,5 +11,5 @@ export const enum Commands {
createTypeStub = 'pyright.createtypestub',
restartServer = 'pyright.restartserver',
orderImports = 'pyright.organizeimports',
addMissingOptionalToParam = 'pyright.addoptionalforparam'
addMissingOptionalToParam = 'pyright.addoptionalforparam',
}

View File

@ -30,10 +30,10 @@ export class CreateTypeStubCommand implements ServerCommand {
rootPath: workspaceRoot,
rootUri: convertPathToUri(workspaceRoot),
serviceInstance: service,
disableLanguageServices: true
disableLanguageServices: true,
};
service.setCompletionCallback(results => {
service.setCompletionCallback((results) => {
if (results.filesRequiringAnalysis === 0) {
try {
service.writeTypeStub(token);
@ -72,7 +72,7 @@ export class CreateTypeStubCommand implements ServerCommand {
service.setMaxAnalysisDuration({
openFilesTimeInMs: 500,
noOpenFilesTimeInMs: 500
noOpenFilesTimeInMs: 500,
});
return service;

View File

@ -30,10 +30,10 @@ export class QuickActionCommand implements ServerCommand {
}
const edits: TextEdit[] = [];
editActions.forEach(editAction => {
editActions.forEach((editAction) => {
edits.push({
range: editAction.range,
newText: editAction.replacementText
newText: editAction.replacementText,
});
});

View File

@ -19,7 +19,7 @@ import {
Emitter,
ErrorCodes,
Event,
ResponseError
ResponseError,
} from 'vscode-languageserver';
class CancellationThrottle {

View File

@ -166,7 +166,7 @@ function indicesOf(array: readonly unknown[]): number[] {
export function stableSort<T>(array: readonly T[], comparer: Comparer<T>): SortedReadonlyArray<T> {
const indices = indicesOf(array);
stableSortIndices(array, indices, comparer);
return (indices.map(i => array[i]) as SortedArray<T>) as SortedReadonlyArray<T>;
return (indices.map((i) => array[i]) as SortedArray<T>) as SortedReadonlyArray<T>;
}
function stableSortIndices<T>(array: readonly T[], indices: number[], comparer: Comparer<T>) {

View File

@ -176,7 +176,7 @@ export function getBooleanDiagnosticSettings() {
return [
DiagnosticRule.strictListInference,
DiagnosticRule.strictDictionaryInference,
DiagnosticRule.strictParameterNoneValue
DiagnosticRule.strictParameterNoneValue,
// Do not include this this one because we don't
// want to override it in strict mode or support
@ -221,7 +221,7 @@ export function getDiagLevelSettings() {
DiagnosticRule.reportUnnecessaryCast,
DiagnosticRule.reportAssertAlwaysTrue,
DiagnosticRule.reportSelfClsParameterName,
DiagnosticRule.reportImplicitStringConcatenation
DiagnosticRule.reportImplicitStringConcatenation,
];
}
@ -265,7 +265,7 @@ export function getStrictDiagnosticSettings(): DiagnosticSettings {
reportUnnecessaryCast: 'error',
reportAssertAlwaysTrue: 'error',
reportSelfClsParameterName: 'error',
reportImplicitStringConcatenation: 'none'
reportImplicitStringConcatenation: 'none',
};
return diagSettings;
@ -311,7 +311,7 @@ export function getDefaultDiagnosticSettings(): DiagnosticSettings {
reportUnnecessaryCast: 'none',
reportAssertAlwaysTrue: 'warning',
reportSelfClsParameterName: 'warning',
reportImplicitStringConcatenation: 'none'
reportImplicitStringConcatenation: 'none',
};
return diagSettings;
@ -411,7 +411,7 @@ export class ConfigOptions {
// If no matching execution environment can be found, a default
// execution environment is used.
findExecEnvironment(filePath: string): ExecutionEnvironment {
let execEnv = this.executionEnvironments.find(env => {
let execEnv = this.executionEnvironments.find((env) => {
const envRoot = ensureTrailingDirectorySeparator(normalizePath(combinePaths(this.projectRoot, env.root)));
return filePath.startsWith(envRoot);
});
@ -792,7 +792,7 @@ export class ConfigOptions {
configObj.reportImplicitStringConcatenation,
DiagnosticRule.reportImplicitStringConcatenation,
defaultSettings.reportImplicitStringConcatenation
)
),
};
// Read the "venvPath".

View File

@ -11,7 +11,7 @@
export const enum Comparison {
LessThan = -1,
EqualTo = 0,
GreaterThan = 1
GreaterThan = 1,
}
/**

View File

@ -13,7 +13,7 @@ import { Range } from './textRange';
export const enum DiagnosticCategory {
Error,
Warning,
UnusedCode
UnusedCode,
}
export interface DiagnosticAction {
@ -127,6 +127,6 @@ export class DiagnosticAddendum {
// Prepend indentation for readability. Skip if there are no
// messages at this level.
const extraSpace = this._messages.length > 0 ? ' ' : '';
return this._messages.concat(childLines).map(line => extraSpace + line);
return this._messages.concat(childLines).map((line) => extraSpace + line);
}
}

View File

@ -48,5 +48,5 @@ export const enum DiagnosticRule {
reportUnnecessaryCast = 'reportUnnecessaryCast',
reportAssertAlwaysTrue = 'reportAssertAlwaysTrue',
reportSelfClsParameterName = 'reportSelfClsParameterName',
reportImplicitStringConcatenation = 'reportImplicitStringConcatenation'
reportImplicitStringConcatenation = 'reportImplicitStringConcatenation',
}

View File

@ -65,11 +65,11 @@ export class DiagnosticSink {
}
getErrors() {
return this._diagnosticList.filter(diag => diag.category === DiagnosticCategory.Error);
return this._diagnosticList.filter((diag) => diag.category === DiagnosticCategory.Error);
}
getWarnings() {
return this._diagnosticList.filter(diag => diag.category === DiagnosticCategory.Warning);
return this._diagnosticList.filter((diag) => diag.category === DiagnosticCategory.Warning);
}
}

View File

@ -10,6 +10,6 @@
/* eslint-disable @typescript-eslint/no-empty-function */
// Explicitly tells that promise should be run asynchronously.
Promise.prototype.ignoreErrors = function<T>(this: Promise<T>) {
Promise.prototype.ignoreErrors = function <T>(this: Promise<T>) {
this.catch(() => {});
};

View File

@ -70,7 +70,7 @@ class LowLevelWatcher implements FileWatcher {
constructor(private paths: string[]) {}
close(): void {
this.paths.forEach(p => fs.unwatchFile(p));
this.paths.forEach((p) => fs.unwatchFile(p));
}
}
@ -126,7 +126,7 @@ class RealFileSystem implements FileSystem {
recursive?: boolean,
listener?: (event: string, filename: string) => void
): FileWatcher {
paths.forEach(p => {
paths.forEach((p) => {
fs.watch(p, { recursive: recursive }, listener);
});
@ -146,7 +146,7 @@ class RealFileSystem implements FileSystem {
followSymlinks: true, // this is the default of chokidar and supports file events through symlinks
interval: 1000, // while not used in normal cases, if any error causes chokidar to fallback to polling, increase its intervals
binaryInterval: 1000,
disableGlobbing: true // fix https://github.com/Microsoft/vscode/issues/4586
disableGlobbing: true, // fix https://github.com/Microsoft/vscode/issues/4586
};
if (_isMacintosh) {
@ -157,7 +157,7 @@ class RealFileSystem implements FileSystem {
const excludes: string[] = [];
if (_isMacintosh || _isLinux) {
if (paths.some(path => path === '' || path === '/')) {
if (paths.some((path) => path === '' || path === '/')) {
excludes.push('/dev/**');
if (_isLinux) {
excludes.push('/proc/**', '/sys/**');
@ -167,7 +167,7 @@ class RealFileSystem implements FileSystem {
watcherOptions.ignored = excludes;
const watcher = chokidar.watch(paths, watcherOptions);
watcher.on('error', _ => {
watcher.on('error', (_) => {
this._console.log('Error returned from file system watcher.');
});

View File

@ -20,7 +20,7 @@ import {
compareStringsCaseSensitive,
equateStringsCaseInsensitive,
equateStringsCaseSensitive,
getStringComparer
getStringComparer,
} from './stringUtils';
export interface FileSpec {
@ -601,7 +601,7 @@ export function getWildcardRegexPattern(rootPath: string, fileSpec: string): str
component = escapedSeparator + component;
}
regExPattern += component.replace(reservedCharacterPattern, match => {
regExPattern += component.replace(reservedCharacterPattern, (match) => {
if (match === '*') {
return `[^${escapedSeparator}]*`;
} else if (match === '?') {
@ -666,7 +666,7 @@ export function getFileSpec(rootPath: string, fileSpec: string): FileSpec {
return {
wildcardRoot,
regExp
regExp,
};
}
@ -807,7 +807,7 @@ function getPathComponentsRelativeTo(
const enum FileSystemEntryKind {
File,
Directory
Directory,
}
function fileSystemEntryExists(fs: FileSystem, path: string, entryKind: FileSystemEntryKind): boolean {

View File

@ -18,7 +18,7 @@ export function convertOffsetToPosition(offset: number, lines: TextRangeCollecti
if (lines.end === 0) {
return {
line: 0,
character: 0
character: 0,
};
}
@ -33,7 +33,7 @@ export function convertOffsetToPosition(offset: number, lines: TextRangeCollecti
assert(lineRange !== undefined);
return {
line: itemIndex,
character: offset - lineRange.start
character: offset - lineRange.start,
};
}

View File

@ -24,7 +24,7 @@ export enum PythonVersion {
V35 = 0x0305,
V36 = 0x0306,
V37 = 0x0307,
V38 = 0x0308
V38 = 0x0308,
}
export const latestStablePythonVersion = PythonVersion.V38;

View File

@ -44,7 +44,7 @@ export namespace TextRange {
export function extend(range: TextRange, extension: TextRange | TextRange[] | undefined) {
if (extension) {
if (Array.isArray(extension)) {
extension.forEach(r => {
extension.forEach((r) => {
extend(range, r);
});
} else {
@ -94,7 +94,7 @@ export function comparePositions(a: Position, b: Position) {
export function getEmptyPosition(): Position {
return {
line: 0,
character: 0
character: 0,
};
}
@ -118,6 +118,6 @@ export function rangesAreEqual(a: Range, b: Range) {
export function getEmptyRange(): Range {
return {
start: getEmptyPosition(),
end: getEmptyPosition()
end: getEmptyPosition(),
};
}

View File

@ -34,7 +34,7 @@ import {
SymbolInformation,
TextDocumentSyncKind,
TextEdit,
WorkspaceEdit
WorkspaceEdit,
} from 'vscode-languageserver';
import { ImportResolver } from './analyzer/importResolver';
@ -137,7 +137,7 @@ export abstract class LanguageServerBase implements LanguageServerInterface {
const scopeUri = workspace.rootUri ? workspace.rootUri : undefined;
const item: ConfigurationItem = {
scopeUri,
section
section,
};
return this._connection.workspace.getConfiguration(item);
}
@ -184,10 +184,10 @@ export abstract class LanguageServerBase implements LanguageServerInterface {
// reporting results. This will keep it responsive.
service.setMaxAnalysisDuration({
openFilesTimeInMs: 50,
noOpenFilesTimeInMs: 200
noOpenFilesTimeInMs: 200,
});
service.setCompletionCallback(results => this.onAnalysisCompletedHandler(results));
service.setCompletionCallback((results) => this.onAnalysisCompletedHandler(results));
return service;
}
@ -197,13 +197,13 @@ export abstract class LanguageServerBase implements LanguageServerInterface {
}
reanalyze() {
this._workspaceMap.forEach(workspace => {
this._workspaceMap.forEach((workspace) => {
workspace.serviceInstance.invalidateAndForceReanalysis();
});
}
restart() {
this._workspaceMap.forEach(workspace => {
this._workspaceMap.forEach((workspace) => {
workspace.serviceInstance.restart();
});
}
@ -221,14 +221,14 @@ export abstract class LanguageServerBase implements LanguageServerInterface {
// Create a service instance for each of the workspace folders.
if (params.workspaceFolders) {
params.workspaceFolders.forEach(folder => {
params.workspaceFolders.forEach((folder) => {
const path = convertUriToPath(folder.uri);
this._workspaceMap.set(path, {
workspaceName: folder.name,
rootPath: path,
rootUri: folder.uri,
serviceInstance: this.createAnalyzerService(folder.name),
disableLanguageServices: false
disableLanguageServices: false,
});
});
} else if (params.rootPath) {
@ -237,7 +237,7 @@ export abstract class LanguageServerBase implements LanguageServerInterface {
rootPath: params.rootPath,
rootUri: '',
serviceInstance: this.createAnalyzerService(params.rootPath),
disableLanguageServices: false
disableLanguageServices: false,
});
}
@ -254,20 +254,20 @@ export abstract class LanguageServerBase implements LanguageServerInterface {
renameProvider: true,
completionProvider: {
triggerCharacters: ['.', '['],
resolveProvider: true
resolveProvider: true,
},
signatureHelpProvider: {
triggerCharacters: ['(', ',', ')']
triggerCharacters: ['(', ',', ')'],
},
codeActionProvider: {
codeActionKinds: [CodeActionKind.QuickFix, CodeActionKind.SourceOrganizeImports]
}
}
codeActionKinds: [CodeActionKind.QuickFix, CodeActionKind.SourceOrganizeImports],
},
},
};
}
);
this._connection.onDidChangeConfiguration(params => {
this._connection.onDidChangeConfiguration((params) => {
this._connection.console.log(`Received updated settings`);
if (params?.settings) {
this._defaultClientConfig = params?.settings;
@ -284,7 +284,7 @@ export abstract class LanguageServerBase implements LanguageServerInterface {
const position: Position = {
line: params.position.line,
character: params.position.character
character: params.position.character,
};
const workspace = this._workspaceMap.getWorkspaceForFile(filePath);
@ -295,7 +295,7 @@ export abstract class LanguageServerBase implements LanguageServerInterface {
if (!locations) {
return undefined;
}
return locations.map(loc => Location.create(convertPathToUri(loc.path), loc.range));
return locations.map((loc) => Location.create(convertPathToUri(loc.path), loc.range));
});
this._connection.onReferences((params, token) => {
@ -303,7 +303,7 @@ export abstract class LanguageServerBase implements LanguageServerInterface {
const position: Position = {
line: params.position.line,
character: params.position.character
character: params.position.character,
};
const workspace = this._workspaceMap.getWorkspaceForFile(filePath);
@ -319,7 +319,7 @@ export abstract class LanguageServerBase implements LanguageServerInterface {
if (!locations) {
return undefined;
}
return locations.map(loc => Location.create(convertPathToUri(loc.path), loc.range));
return locations.map((loc) => Location.create(convertPathToUri(loc.path), loc.range));
});
this._connection.onDocumentSymbol((params, token) => {
@ -340,7 +340,7 @@ export abstract class LanguageServerBase implements LanguageServerInterface {
this._connection.onWorkspaceSymbol((params, token) => {
const symbolList: SymbolInformation[] = [];
this._workspaceMap.forEach(workspace => {
this._workspaceMap.forEach((workspace) => {
if (!workspace.disableLanguageServices) {
workspace.serviceInstance.addSymbolsForWorkspace(symbolList, params.query, token);
}
@ -354,7 +354,7 @@ export abstract class LanguageServerBase implements LanguageServerInterface {
const position: Position = {
line: params.position.line,
character: params.position.character
character: params.position.character,
};
const workspace = this._workspaceMap.getWorkspaceForFile(filePath);
@ -367,7 +367,7 @@ export abstract class LanguageServerBase implements LanguageServerInterface {
const position: Position = {
line: params.position.line,
character: params.position.character
character: params.position.character,
};
const workspace = this._workspaceMap.getWorkspaceForFile(filePath);
@ -384,10 +384,10 @@ export abstract class LanguageServerBase implements LanguageServerInterface {
}
return {
signatures: signatureHelpResults.signatures.map(sig => {
signatures: signatureHelpResults.signatures.map((sig) => {
let paramInfo: ParameterInformation[] = [];
if (sig.parameters) {
paramInfo = sig.parameters.map(param => {
paramInfo = sig.parameters.map((param) => {
return ParameterInformation.create(
[param.startOffset, param.endOffset],
param.documentation
@ -399,7 +399,7 @@ export abstract class LanguageServerBase implements LanguageServerInterface {
activeSignature:
signatureHelpResults.activeSignature !== undefined ? signatureHelpResults.activeSignature : null,
activeParameter:
signatureHelpResults.activeParameter !== undefined ? signatureHelpResults.activeParameter : null
signatureHelpResults.activeParameter !== undefined ? signatureHelpResults.activeParameter : null,
};
});
@ -408,7 +408,7 @@ export abstract class LanguageServerBase implements LanguageServerInterface {
const position: Position = {
line: params.position.line,
character: params.position.character
character: params.position.character,
};
const workspace = this._workspaceMap.getWorkspaceForFile(filePath);
@ -450,7 +450,7 @@ export abstract class LanguageServerBase implements LanguageServerInterface {
const position: Position = {
line: params.position.line,
character: params.position.character
character: params.position.character,
};
const workspace = this._workspaceMap.getWorkspaceForFile(filePath);
@ -469,9 +469,9 @@ export abstract class LanguageServerBase implements LanguageServerInterface {
}
const edits: WorkspaceEdit = {
changes: {}
changes: {},
};
editActions.forEach(editAction => {
editActions.forEach((editAction) => {
const uri = convertPathToUri(editAction.filePath);
if (edits.changes![uri] === undefined) {
edits.changes![uri] = [];
@ -479,7 +479,7 @@ export abstract class LanguageServerBase implements LanguageServerInterface {
const textEdit: TextEdit = {
range: editAction.range,
newText: editAction.replacementText
newText: editAction.replacementText,
};
edits.changes![uri].push(textEdit);
});
@ -487,13 +487,13 @@ export abstract class LanguageServerBase implements LanguageServerInterface {
return edits;
});
this._connection.onDidOpenTextDocument(params => {
this._connection.onDidOpenTextDocument((params) => {
const filePath = convertUriToPath(params.textDocument.uri);
const service = this._workspaceMap.getWorkspaceForFile(filePath).serviceInstance;
service.setFileOpened(filePath, params.textDocument.version, params.textDocument.text);
});
this._connection.onDidChangeTextDocument(params => {
this._connection.onDidChangeTextDocument((params) => {
this.recordUserInteractionTime();
const filePath = convertUriToPath(params.textDocument.uri);
@ -501,27 +501,27 @@ export abstract class LanguageServerBase implements LanguageServerInterface {
service.updateOpenFileContents(filePath, params.textDocument.version, params.contentChanges[0].text);
});
this._connection.onDidCloseTextDocument(params => {
this._connection.onDidCloseTextDocument((params) => {
const filePath = convertUriToPath(params.textDocument.uri);
const service = this._workspaceMap.getWorkspaceForFile(filePath).serviceInstance;
service.setFileClosed(filePath);
});
this._connection.onInitialized(() => {
this._connection.workspace.onDidChangeWorkspaceFolders(event => {
event.removed.forEach(workspace => {
this._connection.workspace.onDidChangeWorkspaceFolders((event) => {
event.removed.forEach((workspace) => {
const rootPath = convertUriToPath(workspace.uri);
this._workspaceMap.delete(rootPath);
});
event.added.forEach(async workspace => {
event.added.forEach(async (workspace) => {
const rootPath = convertUriToPath(workspace.uri);
const newWorkspace: WorkspaceServiceInstance = {
workspaceName: workspace.name,
rootPath,
rootUri: workspace.uri,
serviceInstance: this.createAnalyzerService(workspace.name),
disableLanguageServices: false
disableLanguageServices: false,
};
this._workspaceMap.set(rootPath, newWorkspace);
await this.updateSettingsForWorkspace(newWorkspace);
@ -533,19 +533,19 @@ export abstract class LanguageServerBase implements LanguageServerInterface {
}
updateSettingsForAllWorkspaces(): void {
this._workspaceMap.forEach(workspace => {
this._workspaceMap.forEach((workspace) => {
this.updateSettingsForWorkspace(workspace).ignoreErrors();
});
}
protected onAnalysisCompletedHandler(results: AnalysisResults): void {
results.diagnostics.forEach(fileDiag => {
results.diagnostics.forEach((fileDiag) => {
const diagnostics = this._convertDiagnostics(fileDiag.diagnostics);
// Send the computed diagnostics to the client.
this._connection.sendDiagnostics({
uri: convertPathToUri(fileDiag.filePath),
diagnostics
diagnostics,
});
if (results.filesRequiringAnalysis > 0) {
@ -590,7 +590,7 @@ export abstract class LanguageServerBase implements LanguageServerInterface {
}
private _convertDiagnostics(diags: AnalyzerDiagnostic[]): Diagnostic[] {
return diags.map(diag => {
return diags.map((diag) => {
const severity =
diag.category === DiagnosticCategory.Error ? DiagnosticSeverity.Error : DiagnosticSeverity.Warning;
@ -609,7 +609,7 @@ export abstract class LanguageServerBase implements LanguageServerInterface {
const relatedInfo = diag.getRelatedInfo();
if (relatedInfo.length > 0) {
vsDiag.relatedInformation = relatedInfo.map(info => {
vsDiag.relatedInformation = relatedInfo.map((info) => {
return DiagnosticRelatedInformation.create(
Location.create(convertPathToUri(info.filePath), info.range),
info.message
@ -625,7 +625,7 @@ export abstract class LanguageServerBase implements LanguageServerInterface {
// Tell all of the services that the user is actively
// interacting with one or more editors, so they should
// back off from performing any work.
this._workspaceMap.forEach(workspace => {
this._workspaceMap.forEach((workspace) => {
workspace.serviceInstance.recordUserInteractionTime();
});
}

View File

@ -30,15 +30,15 @@ export class CodeActionProvider {
if (!workspace.disableLanguageServices) {
const diags = workspace.serviceInstance.getDiagnosticsForRange(filePath, range);
const typeStubDiag = diags.find(d => {
const typeStubDiag = diags.find((d) => {
const actions = d.getActions();
return actions && actions.find(a => a.action === Commands.createTypeStub);
return actions && actions.find((a) => a.action === Commands.createTypeStub);
});
if (typeStubDiag) {
const action = typeStubDiag
.getActions()!
.find(a => a.action === Commands.createTypeStub) as CreateTypeStubFileAction;
.find((a) => a.action === Commands.createTypeStub) as CreateTypeStubFileAction;
if (action) {
const createTypeStubAction = CodeAction.create(
`Create Type Stub For "${action.moduleName}"`,
@ -55,15 +55,15 @@ export class CodeActionProvider {
}
}
const addOptionalDiag = diags.find(d => {
const addOptionalDiag = diags.find((d) => {
const actions = d.getActions();
return actions && actions.find(a => a.action === Commands.addMissingOptionalToParam);
return actions && actions.find((a) => a.action === Commands.addMissingOptionalToParam);
});
if (addOptionalDiag) {
const action = addOptionalDiag
.getActions()!
.find(a => a.action === Commands.addMissingOptionalToParam) as AddMissingOptionalToParamAction;
.find((a) => a.action === Commands.addMissingOptionalToParam) as AddMissingOptionalToParamAction;
if (action) {
const addMissingOptionalAction = CodeAction.create(
`Add "Optional" to type annotation`,

View File

@ -15,7 +15,7 @@ import {
CompletionList,
MarkupKind,
Range,
TextEdit
TextEdit,
} from 'vscode-languageserver';
import { ImportLookup } from '../analyzer/analyzerFileInfo';
@ -52,7 +52,7 @@ import {
ParameterCategory,
ParseNode,
ParseNodeType,
StringNode
StringNode,
} from '../parser/parseNodes';
import { ParseResults } from '../parser/parser';
@ -93,7 +93,7 @@ const _keywords: string[] = [
'return',
'try',
'while',
'yield'
'yield',
];
enum SortCategory {
@ -135,7 +135,7 @@ enum SortCategory {
DunderSymbol,
// An auto-import symbol.
AutoImport
AutoImport,
}
// Completion items can have arbitrary data hanging off them.
@ -330,7 +330,7 @@ export class CompletionProvider {
}
const curIndex = CompletionProvider._mostRecentCompletions.findIndex(
item => item.label === label && item.autoImportText === autoImportText
(item) => item.label === label && item.autoImportText === autoImportText
);
if (curIndex > 0) {
@ -459,7 +459,7 @@ export class CompletionProvider {
if (isSimilar) {
const range: Range = {
start: { line: this._position.line, character: this._position.character - partialName.length },
end: { line: this._position.line, character: this._position.character }
end: { line: this._position.line, character: this._position.character },
};
const methodSignature = this._printMethodSignature(decl.node) + ':';
@ -474,7 +474,7 @@ export class CompletionProvider {
private _printMethodSignature(node: FunctionNode): string {
const paramList = node.parameters
.map(param => {
.map((param) => {
let paramString = '';
if (param.category === ParameterCategory.VarArgList) {
paramString += '*';
@ -508,7 +508,7 @@ export class CompletionProvider {
const symbolTable = new Map<string, Symbol>();
if (leftType) {
doForSubtypes(leftType, subtype => {
doForSubtypes(leftType, (subtype) => {
if (subtype.category === TypeCategory.Object) {
getMembersForClass(subtype.classType, symbolTable, true);
} else if (subtype.category === TypeCategory.Class) {
@ -522,7 +522,7 @@ export class CompletionProvider {
}
const completionList = CompletionList.create();
this._addSymbolsForSymbolTable(symbolTable, _ => true, priorWord, completionList);
this._addSymbolsForSymbolTable(symbolTable, (_) => true, priorWord, completionList);
return completionList;
}
@ -558,7 +558,7 @@ export class CompletionProvider {
this._addSymbols(parseNode, priorWord, completionList);
// Add keywords.
this._findMatchingKeywords(_keywords, priorWord).map(keyword => {
this._findMatchingKeywords(_keywords, priorWord).map((keyword) => {
const completionItem = CompletionItem.create(keyword);
completionItem.kind = CompletionItemKind.Keyword;
completionList.items.push(completionItem);
@ -628,7 +628,7 @@ export class CompletionProvider {
postText: string,
completionList: CompletionList
) {
signatureInfo.signatures.forEach(signature => {
signatureInfo.signatures.forEach((signature) => {
if (!signature.activeParam) {
return undefined;
}
@ -653,7 +653,7 @@ export class CompletionProvider {
completionList: CompletionList
) {
const quoteValue = this._getQuoteValueFromPriorText(priorText);
doForSubtypes(type, subtype => {
doForSubtypes(type, (subtype) => {
if (subtype.category === TypeCategory.Object) {
if (ClassType.isBuiltIn(subtype.classType, 'str')) {
if (subtype.literalValue !== undefined) {
@ -817,7 +817,7 @@ export class CompletionProvider {
const range: Range = {
start: { line: this._position.line, character: rangeStartCol },
end: { line: this._position.line, character: rangeEndCol }
end: { line: this._position.line, character: rangeEndCol },
};
completionItem.textEdit = TextEdit.replace(range, valueWithQuotes);
@ -850,7 +850,7 @@ export class CompletionProvider {
// this name, don't add an auto-import suggestion with
// the same name.
const localDuplicate = completionList.items.find(
item => item.label === name && !item.data.autoImport
(item) => item.label === name && !item.data.autoImport
);
const declarations = symbol.getDeclarations();
if (declarations && declarations.length > 0 && localDuplicate === undefined) {
@ -904,7 +904,7 @@ export class CompletionProvider {
if (moduleNameAndType.moduleName) {
const autoImportText = `Auto-import from ${moduleNameAndType.moduleName}`;
const isDuplicateEntry = completionList.items.find(item => {
const isDuplicateEntry = completionList.items.find((item) => {
if (item.label === name) {
// Don't add if there's already a local completion suggestion.
if (!item.data.autoImport) {
@ -1002,9 +1002,9 @@ export class CompletionProvider {
if (lookupResults) {
this._addSymbolsForSymbolTable(
lookupResults.symbolTable,
name => {
(name) => {
// Don't suggest symbols that have already been imported.
return !importFromNode.imports.find(imp => imp.name.value === name);
return !importFromNode.imports.find((imp) => imp.name.value === name);
},
priorWord,
completionList
@ -1012,8 +1012,8 @@ export class CompletionProvider {
}
// Add the implicit imports.
importInfo.implicitImports.forEach(implImport => {
if (!importFromNode.imports.find(imp => imp.name.value === implImport.name)) {
importInfo.implicitImports.forEach((implImport) => {
if (!importFromNode.imports.find((imp) => imp.name.value === implImport.name)) {
this._addNameToCompletionList(implImport.name, CompletionItemKind.Module, priorWord, completionList);
}
});
@ -1022,7 +1022,7 @@ export class CompletionProvider {
}
private _findMatchingKeywords(keywordList: string[], partialMatch: string): string[] {
return keywordList.filter(keyword => {
return keywordList.filter((keyword) => {
if (partialMatch) {
return StringUtils.computeCompletionSimilarity(partialMatch, keyword) > similarityLimit;
} else {
@ -1034,19 +1034,19 @@ export class CompletionProvider {
private _addNamedParameters(signatureInfo: CallSignatureInfo, priorWord: string, completionList: CompletionList) {
const argNameMap = new Map<string, string>();
signatureInfo.signatures.forEach(signature => {
signatureInfo.signatures.forEach((signature) => {
this._addNamedParametersToMap(signature.type, argNameMap);
});
// Remove any named parameters that are already provided.
signatureInfo.callNode.arguments!.forEach(arg => {
signatureInfo.callNode.arguments!.forEach((arg) => {
if (arg.name) {
argNameMap.delete(arg.name.value);
}
});
// Add the remaining unique parameter names to the completion list.
argNameMap.forEach(argName => {
argNameMap.forEach((argName) => {
const similarity = StringUtils.computeCompletionSimilarity(priorWord, argName);
if (similarity > similarityLimit) {
@ -1056,7 +1056,7 @@ export class CompletionProvider {
const completionItemData: CompletionItemData = {
workspacePath: this._workspacePath,
filePath: this._filePath,
position: this._position
position: this._position,
};
completionItem.data = completionItemData;
completionItem.sortText = this._makeSortText(SortCategory.NamedParameter, argName);
@ -1067,7 +1067,7 @@ export class CompletionProvider {
}
private _addNamedParametersToMap(type: FunctionType, paramMap: Map<string, string>) {
type.details.parameters.forEach(param => {
type.details.parameters.forEach((param) => {
if (param.name && !param.isNameSynthesized) {
// Don't add private or protected names. These are assumed
// not to be named parameters.
@ -1160,7 +1160,7 @@ export class CompletionProvider {
case DeclarationType.Function:
if (type.category === TypeCategory.OverloadedFunction) {
typeDetail = type.overloads
.map(overload => name + this._evaluator.printType(overload))
.map((overload) => name + this._evaluator.printType(overload))
.join('\n');
} else {
typeDetail = name + ': ' + this._evaluator.printType(type);
@ -1208,7 +1208,7 @@ export class CompletionProvider {
if (markdownString) {
this._itemToResolve.documentation = {
kind: MarkupKind.Markdown,
value: markdownString
value: markdownString,
};
}
}
@ -1275,7 +1275,7 @@ export class CompletionProvider {
const completionItemData: CompletionItemData = {
workspacePath: this._workspacePath,
filePath: this._filePath,
position: this._position
position: this._position,
};
completionItem.data = completionItemData;
@ -1319,7 +1319,7 @@ export class CompletionProvider {
if (markdownString) {
completionItem.documentation = {
kind: MarkupKind.Markdown,
value: markdownString
value: markdownString,
};
}
@ -1328,13 +1328,13 @@ export class CompletionProvider {
}
if (additionalTextEdits) {
completionItem.additionalTextEdits = additionalTextEdits.map(te => {
completionItem.additionalTextEdits = additionalTextEdits.map((te) => {
const textEdit: TextEdit = {
range: {
start: { line: te.range.start.line, character: te.range.start.character },
end: { line: te.range.end.line, character: te.range.end.character }
end: { line: te.range.end.line, character: te.range.end.character },
},
newText: te.replacementText
newText: te.replacementText,
};
return textEdit;
});
@ -1346,7 +1346,7 @@ export class CompletionProvider {
private _getRecentListIndex(name: string, autoImportText: string) {
return CompletionProvider._mostRecentCompletions.findIndex(
item => item.label === name && item.autoImportText === autoImportText
(item) => item.label === name && item.autoImportText === autoImportText
);
}
@ -1429,8 +1429,8 @@ export class CompletionProvider {
const moduleDescriptor: ImportedModuleDescriptor = {
leadingDots: node.leadingDots,
hasTrailingDot: node.hasTrailingDot,
nameParts: node.nameParts.map(part => part.value),
importedSymbols: []
nameParts: node.nameParts.map((part) => part.value),
importedSymbols: [],
};
const completions = this._importResolver.getCompletionSuggestions(
@ -1457,7 +1457,7 @@ export class CompletionProvider {
completionItem.sortText = this._makeSortText(SortCategory.Keyword, keyword);
}
completions.forEach(completionName => {
completions.forEach((completionName) => {
const completionItem = CompletionItem.create(completionName);
completionItem.kind = CompletionItemKind.Module;
completionList.items.push(completionItem);

View File

@ -44,12 +44,12 @@ export class DefinitionProvider {
if (node.nodeType === ParseNodeType.Name) {
const declarations = evaluator.getDeclarationsForNameNode(node);
if (declarations) {
declarations.forEach(decl => {
declarations.forEach((decl) => {
const resolvedDecl = evaluator.resolveAliasDeclaration(decl);
if (resolvedDecl && resolvedDecl.path) {
this._addIfUnique(definitions, {
path: resolvedDecl.path,
range: resolvedDecl.range
range: resolvedDecl.range,
});
}
});

View File

@ -129,7 +129,7 @@ class FindSymbolTreeWalker extends ParseTreeWalker {
const location: Location = {
uri: URI.file(this._filePath).toString(),
range: declaration.range
range: declaration.range,
};
const symbolKind = getSymbolKind(name, declaration, this._evaluator);
@ -140,7 +140,7 @@ class FindSymbolTreeWalker extends ParseTreeWalker {
const symbolInfo: SymbolInformation = {
name,
kind: symbolKind,
location
location,
};
if (containerName) {
@ -267,7 +267,7 @@ function getDocumentSymbolRecursive(
kind: symbolKind,
range,
selectionRange,
children
children,
};
docSymbolResults.push(symbolInfo);

View File

@ -57,8 +57,8 @@ export class HoverProvider {
parts: [],
range: {
start: convertOffsetToPosition(node.start, parseResults.tokenizerOutput.lines),
end: convertOffsetToPosition(TextRange.getEnd(node), parseResults.tokenizerOutput.lines)
}
end: convertOffsetToPosition(TextRange.getEnd(node), parseResults.tokenizerOutput.lines),
},
};
if (node.nodeType === ParseNodeType.Name) {
@ -212,7 +212,7 @@ export class HoverProvider {
} else if (type.category === TypeCategory.Function) {
this._addDocumentationResultsPart(parts, type.details.docString);
} else if (type.category === TypeCategory.OverloadedFunction) {
type.overloads.forEach(overload => {
type.overloads.forEach((overload) => {
this._addDocumentationResultsPart(parts, overload.details.docString);
});
}
@ -227,7 +227,7 @@ export class HoverProvider {
private static _addResultsPart(parts: HoverTextPart[], text: string, python = false) {
parts.push({
python,
text
text,
});
}
}
@ -238,7 +238,7 @@ export function convertHoverResults(hoverResults: HoverResults | undefined): Hov
}
const markupString = hoverResults.parts
.map(part => {
.map((part) => {
if (part.python) {
return '```python\n' + part.text + '\n```\n';
}
@ -249,8 +249,8 @@ export function convertHoverResults(hoverResults: HoverResults | undefined): Hov
return {
contents: {
kind: MarkupKind.Markdown,
value: markupString
value: markupString,
},
range: hoverResults.range
range: hoverResults.range,
};
}

View File

@ -28,7 +28,7 @@ export const enum ImportGroup {
BuiltIn = 0,
ThirdParty = 1,
Local = 2,
LocalRelative = 3
LocalRelative = 3,
}
export class ImportSorter {
@ -41,7 +41,7 @@ export class ImportSorter {
const importStatements = ImportStatementUtils.getTopLevelImports(this._parseResults.parseTree);
const sortedStatements = importStatements.orderedImports
.map(s => s)
.map((s) => s)
.sort((a, b) => {
return this._compareImportStatements(a, b);
});
@ -55,7 +55,7 @@ export class ImportSorter {
actions.push({
range: primaryRange,
replacementText: this._generateSortedImportText(sortedStatements)
replacementText: this._generateSortedImportText(sortedStatements),
});
this._addSecondaryReplacementRanges(importStatements.orderedImports, actions);
@ -101,7 +101,7 @@ export class ImportSorter {
// If there are other blocks of import statements separated by other statements,
// we'll ignore these other blocks for now.
private _getPrimaryReplacementRange(statements: ImportStatementUtils.ImportStatement[]): Range {
let statementLimit = statements.findIndex(s => s.followsNonImportStatement);
let statementLimit = statements.findIndex((s) => s.followsNonImportStatement);
if (statementLimit < 0) {
statementLimit = statements.length;
}
@ -109,7 +109,7 @@ export class ImportSorter {
const lastStatement = statements[statementLimit - 1].node;
return {
start: convertOffsetToPosition(statements[0].node.start, this._parseResults.tokenizerOutput.lines),
end: convertOffsetToPosition(TextRange.getEnd(lastStatement), this._parseResults.tokenizerOutput.lines)
end: convertOffsetToPosition(TextRange.getEnd(lastStatement), this._parseResults.tokenizerOutput.lines),
};
}
@ -119,7 +119,7 @@ export class ImportSorter {
statements: ImportStatementUtils.ImportStatement[],
actions: TextEditAction[]
) {
let secondaryBlockStart = statements.findIndex(s => s.followsNonImportStatement);
let secondaryBlockStart = statements.findIndex((s) => s.followsNonImportStatement);
if (secondaryBlockStart < 0) {
return;
}
@ -141,9 +141,9 @@ export class ImportSorter {
end: convertOffsetToPosition(
TextRange.getEnd(statements[secondaryBlockLimit - 1].node),
this._parseResults.tokenizerOutput.lines
)
),
},
replacementText: ''
replacementText: '',
});
secondaryBlockStart = secondaryBlockLimit;
@ -195,7 +195,7 @@ export class ImportSorter {
private _formatImportFromNode(node: ImportFromNode, moduleName: string): string {
const symbols = node.imports
.sort((a, b) => this._compareSymbols(a, b))
.map(symbol => {
.map((symbol) => {
let symbolText = symbol.name.value;
if (symbol.alias) {
symbolText += ` as ${symbol.alias.value}`;

View File

@ -64,16 +64,16 @@ function _addMissingOptionalToParam(
editActions.push({
range: { start: startPos, end: startPos },
replacementText: 'Optional['
replacementText: 'Optional[',
});
editActions.push({
range: { start: endPos, end: endPos },
replacementText: ']'
replacementText: ']',
});
// Add the import statement if necessary.
const importStatements = ImportStatementUtils.getTopLevelImports(parseResults.parseTree);
const importStatement = importStatements.orderedImports.find(imp => imp.moduleName === 'typing');
const importStatement = importStatements.orderedImports.find((imp) => imp.moduleName === 'typing');
// If there's an existing import statement, insert into it.
if (importStatement && importStatement.node.nodeType === ParseNodeType.ImportFrom) {

View File

@ -72,7 +72,7 @@ class FindReferencesTreeWalker extends ParseTreeWalker {
if (declarations && declarations.length > 0) {
// Does this name share a declaration with the symbol of interest?
if (declarations.some(decl => this._resultsContainsDeclaration(decl))) {
if (declarations.some((decl) => this._resultsContainsDeclaration(decl))) {
// Is it the same symbol?
if (this._includeDeclaration || node !== this._referencesResult.nodeAtOffset) {
this._referencesResult.locations.push({
@ -82,8 +82,8 @@ class FindReferencesTreeWalker extends ParseTreeWalker {
end: convertOffsetToPosition(
TextRange.getEnd(node),
this._parseResults.tokenizerOutput.lines
)
}
),
},
});
}
}
@ -100,7 +100,7 @@ class FindReferencesTreeWalker extends ParseTreeWalker {
// The reference results declarations are already resolved, so we don't
// need to call resolveAliasDeclaration on them.
return this._referencesResult.declarations.some(decl =>
return this._referencesResult.declarations.some((decl) =>
DeclarationUtils.areDeclarationsSame(decl, resolvedDecl)
);
}
@ -137,7 +137,7 @@ export class ReferencesProvider {
}
const resolvedDeclarations: Declaration[] = [];
declarations.forEach(decl => {
declarations.forEach((decl) => {
const resolvedDecl = evaluator.resolveAliasDeclaration(decl);
if (resolvedDecl) {
resolvedDeclarations.push(resolvedDecl);
@ -151,7 +151,7 @@ export class ReferencesProvider {
// Does this symbol require search beyond the current file? Determine whether
// the symbol is declared within an evaluation scope that is within the current
// file and cannot be imported directly from other modules.
const requiresGlobalSearch = resolvedDeclarations.some(decl => {
const requiresGlobalSearch = resolvedDeclarations.some((decl) => {
// If the declaration is outside of this file, a global search is needed.
if (decl.path !== filePath) {
return true;
@ -172,7 +172,7 @@ export class ReferencesProvider {
requiresGlobalSearch,
nodeAtOffset: node,
declarations: resolvedDeclarations,
locations: []
locations: [],
};
const refTreeWalker = new FindReferencesTreeWalker(

View File

@ -104,7 +104,7 @@ export class SignatureHelpProvider {
return {
signatures,
activeSignature,
activeParameter
activeParameter,
};
}
@ -119,7 +119,7 @@ export class SignatureHelpProvider {
parameters.push({
startOffset: label.length,
endOffset: label.length + paramString.length,
documentation: extractParameterDocumentation(functionDocString || '', paramName)
documentation: extractParameterDocumentation(functionDocString || '', paramName),
});
label += paramString;
@ -133,7 +133,7 @@ export class SignatureHelpProvider {
const sigInfo: SignatureInfo = {
label,
parameters,
documentation: functionDocString
documentation: functionDocString,
};
return sigInfo;

View File

@ -22,7 +22,7 @@ enum CharCategory {
StartIdentifierChar = 1,
// Character can appear only within identifier, not at beginning
IdentifierChar = 2
IdentifierChar = 2,
}
// Table of first 256 character codes (the most common cases).
@ -100,7 +100,7 @@ const _specialStartIdentifierChars: unicode.UnicodeRangeTable = [
0x2118,
0x212e,
0x309b,
0x309c
0x309c,
];
const _startIdentifierCharRanges = [
@ -110,7 +110,7 @@ const _startIdentifierCharRanges = [
unicode.unicodeLt,
unicode.unicodeLo,
unicode.unicodeLm,
unicode.unicodeNl
unicode.unicodeNl,
];
// Characters with the Other_ID_Start property.
@ -126,7 +126,7 @@ const _specialIdentifierChars: unicode.UnicodeRangeTable = [
0x136f,
0x1370,
0x1371,
0x19da
0x19da,
];
const _identifierCharRanges = [
@ -134,7 +134,7 @@ const _identifierCharRanges = [
unicode.unicodeMn,
unicode.unicodeMc,
unicode.unicodeNd,
unicode.unicodePc
unicode.unicodePc,
];
function _buildIdentifierLookupTableFromUnicodeRangeTable(
@ -172,11 +172,11 @@ function _buildIdentifierLookupTableFromUnicodeRangeTable(
function _buildIdentifierLookupTable(fastTableOnly: boolean) {
_identifierCharFastTable.fill(CharCategory.NotIdentifierChar);
_identifierCharRanges.forEach(table => {
_identifierCharRanges.forEach((table) => {
_buildIdentifierLookupTableFromUnicodeRangeTable(table, CharCategory.IdentifierChar, fastTableOnly);
});
_startIdentifierCharRanges.forEach(table => {
_startIdentifierCharRanges.forEach((table) => {
_buildIdentifierLookupTableFromUnicodeRangeTable(table, CharCategory.StartIdentifierChar, fastTableOnly);
});
}

View File

@ -17,7 +17,7 @@ import {
OperatorType,
StringToken,
Token,
TokenType
TokenType,
} from './tokenizerTypes';
export const enum ParseNodeType {
@ -90,7 +90,7 @@ export const enum ParseNodeType {
WithItem, // 60
Yield,
YieldFrom
YieldFrom,
}
export const enum ErrorExpressionCategory {
@ -104,7 +104,7 @@ export const enum ErrorExpressionCategory {
MissingMemberAccessName,
MissingTupleCloseParen,
MissingListCloseBracket,
MissingFunctionParameterList
MissingFunctionParameterList,
}
export interface ParseNodeBase extends TextRange {
@ -146,7 +146,7 @@ export namespace ModuleNode {
length: range.length,
nodeType: ParseNodeType.Module,
id: _nextNodeId++,
statements: []
statements: [],
};
return node;
@ -165,7 +165,7 @@ export namespace SuiteNode {
length: range.length,
nodeType: ParseNodeType.Suite,
id: _nextNodeId++,
statements: []
statements: [],
};
return node;
@ -193,7 +193,7 @@ export namespace IfNode {
id: _nextNodeId++,
testExpression,
ifSuite,
elseSuite
elseSuite,
};
testExpression.parent = node;
@ -225,7 +225,7 @@ export namespace WhileNode {
nodeType: ParseNodeType.While,
id: _nextNodeId++,
testExpression,
whileSuite
whileSuite,
};
testExpression.parent = node;
@ -260,7 +260,7 @@ export namespace ForNode {
id: _nextNodeId++,
targetExpression,
iterableExpression,
forSuite
forSuite,
};
targetExpression.parent = node;
@ -290,7 +290,7 @@ export namespace ListComprehensionForNode {
nodeType: ParseNodeType.ListComprehensionFor,
id: _nextNodeId++,
targetExpression,
iterableExpression
iterableExpression,
};
targetExpression.parent = node;
@ -315,7 +315,7 @@ export namespace ListComprehensionIfNode {
length: ifToken.length,
nodeType: ParseNodeType.ListComprehensionIf,
id: _nextNodeId++,
testExpression
testExpression,
};
testExpression.parent = node;
@ -342,7 +342,7 @@ export namespace TryNode {
nodeType: ParseNodeType.Try,
id: _nextNodeId++,
trySuite,
exceptClauses: []
exceptClauses: [],
};
trySuite.parent = node;
@ -367,7 +367,7 @@ export namespace ExceptNode {
length: exceptToken.length,
nodeType: ParseNodeType.Except,
id: _nextNodeId++,
exceptSuite
exceptSuite,
};
exceptSuite.parent = node;
@ -398,7 +398,7 @@ export namespace FunctionNode {
decorators: [],
name,
parameters: [],
suite
suite,
};
name.parent = node;
@ -413,7 +413,7 @@ export namespace FunctionNode {
export const enum ParameterCategory {
Simple,
VarArgList,
VarArgDictionary
VarArgDictionary,
}
export interface ParameterNode extends ParseNodeBase {
@ -431,7 +431,7 @@ export namespace ParameterNode {
length: startToken.length,
nodeType: ParseNodeType.Parameter,
id: _nextNodeId++,
category: paramCategory
category: paramCategory,
};
return node;
@ -456,7 +456,7 @@ export namespace ClassNode {
decorators: [],
name,
arguments: [],
suite
suite,
};
name.parent = node;
@ -487,9 +487,9 @@ export namespace ClassNode {
start: 0,
length: 0,
comments: [],
value: ''
value: '',
},
value: ''
value: '',
},
arguments: [],
suite: {
@ -497,11 +497,11 @@ export namespace ClassNode {
length: 0,
id: 0,
nodeType: ParseNodeType.Suite,
statements: []
}
statements: [],
},
};
decorators.forEach(decorator => {
decorators.forEach((decorator) => {
decorator.parent = node;
extendRange(node, decorator);
});
@ -525,7 +525,7 @@ export namespace WithNode {
nodeType: ParseNodeType.With,
id: _nextNodeId++,
withItems: [],
suite
suite,
};
suite.parent = node;
@ -549,7 +549,7 @@ export namespace WithItemNode {
length: expression.length,
nodeType: ParseNodeType.WithItem,
id: _nextNodeId++,
expression
expression,
};
expression.parent = node;
@ -572,7 +572,7 @@ export namespace DecoratorNode {
nodeType: ParseNodeType.Decorator,
id: _nextNodeId++,
leftExpression,
arguments: undefined
arguments: undefined,
};
leftExpression.parent = node;
@ -595,7 +595,7 @@ export namespace StatementListNode {
length: atToken.length,
nodeType: ParseNodeType.StatementList,
id: _nextNodeId++,
statements: []
statements: [],
};
return node;
@ -705,7 +705,7 @@ export namespace ErrorNode {
nodeType: ParseNodeType.Error,
id: _nextNodeId++,
category,
child
child,
};
if (child) {
@ -731,7 +731,7 @@ export namespace UnaryOperationNode {
nodeType: ParseNodeType.UnaryOperation,
id: _nextNodeId++,
operator,
expression
expression,
};
expression.parent = node;
@ -758,7 +758,7 @@ export namespace BinaryOperationNode {
id: _nextNodeId++,
leftExpression,
operator,
rightExpression
rightExpression,
};
leftExpression.parent = node;
@ -784,7 +784,7 @@ export namespace AssignmentExpressionNode {
nodeType: ParseNodeType.AssignmentExpression,
id: _nextNodeId++,
name,
rightExpression
rightExpression,
};
name.parent = node;
@ -811,7 +811,7 @@ export namespace AssignmentNode {
nodeType: ParseNodeType.Assignment,
id: _nextNodeId++,
leftExpression,
rightExpression
rightExpression,
};
leftExpression.parent = node;
@ -837,7 +837,7 @@ export namespace TypeAnnotationNode {
nodeType: ParseNodeType.TypeAnnotation,
id: _nextNodeId++,
valueExpression,
typeAnnotation
typeAnnotation,
};
valueExpression.parent = node;
@ -876,7 +876,7 @@ export namespace AugmentedAssignmentNode {
leftExpression,
operator,
rightExpression,
destExpression
destExpression,
};
leftExpression.parent = node;
@ -901,7 +901,7 @@ export namespace AwaitNode {
length: awaitToken.length,
nodeType: ParseNodeType.Await,
id: _nextNodeId++,
expression
expression,
};
expression.parent = node;
@ -932,7 +932,7 @@ export namespace TernaryNode {
id: _nextNodeId++,
ifExpression,
testExpression,
elseExpression
elseExpression,
};
ifExpression.parent = node;
@ -957,7 +957,7 @@ export namespace UnpackNode {
length: starToken.length,
nodeType: ParseNodeType.Unpack,
id: _nextNodeId++,
expression
expression,
};
expression.parent = node;
@ -980,7 +980,7 @@ export namespace TupleNode {
length: range.length,
nodeType: ParseNodeType.Tuple,
id: _nextNodeId++,
expressions: []
expressions: [],
};
return node;
@ -1001,7 +1001,7 @@ export namespace CallNode {
nodeType: ParseNodeType.Call,
id: _nextNodeId++,
leftExpression,
arguments: []
arguments: [],
};
leftExpression.parent = node;
@ -1024,7 +1024,7 @@ export namespace ListComprehensionNode {
nodeType: ParseNodeType.ListComprehension,
id: _nextNodeId++,
expression,
comprehensions: []
comprehensions: [],
};
expression.parent = node;
@ -1045,10 +1045,10 @@ export namespace IndexItemsNode {
length: openBracketToken.length,
nodeType: ParseNodeType.IndexItems,
id: _nextNodeId++,
items
items,
};
items.forEach(item => {
items.forEach((item) => {
item.parent = node;
});
@ -1072,7 +1072,7 @@ export namespace IndexNode {
nodeType: ParseNodeType.Index,
id: _nextNodeId++,
baseExpression,
items
items,
};
baseExpression.parent = node;
@ -1097,7 +1097,7 @@ export namespace SliceNode {
start: range.start,
length: range.length,
nodeType: ParseNodeType.Slice,
id: _nextNodeId++
id: _nextNodeId++,
};
return node;
@ -1116,7 +1116,7 @@ export namespace YieldNode {
length: yieldToken.length,
nodeType: ParseNodeType.Yield,
id: _nextNodeId++,
expression
expression,
};
if (expression) {
@ -1140,7 +1140,7 @@ export namespace YieldFromNode {
length: yieldToken.length,
nodeType: ParseNodeType.YieldFrom,
id: _nextNodeId++,
expression
expression,
};
expression.parent = node;
@ -1165,7 +1165,7 @@ export namespace MemberAccessNode {
nodeType: ParseNodeType.MemberAccess,
id: _nextNodeId++,
leftExpression,
memberName
memberName,
};
leftExpression.parent = node;
@ -1191,7 +1191,7 @@ export namespace LambdaNode {
nodeType: ParseNodeType.Lambda,
id: _nextNodeId++,
parameters: [],
expression
expression,
};
expression.parent = node;
@ -1216,7 +1216,7 @@ export namespace NameNode {
nodeType: ParseNodeType.Name,
id: _nextNodeId++,
token: nameToken,
value: nameToken.value
value: nameToken.value,
};
return node;
@ -1235,7 +1235,7 @@ export namespace ConstantNode {
length: token.length,
nodeType: ParseNodeType.Constant,
id: _nextNodeId++,
constType: token.keywordType
constType: token.keywordType,
};
return node;
@ -1252,7 +1252,7 @@ export namespace EllipsisNode {
start: range.start,
length: range.length,
nodeType: ParseNodeType.Ellipsis,
id: _nextNodeId++
id: _nextNodeId++,
};
return node;
@ -1275,7 +1275,7 @@ export namespace NumberNode {
id: _nextNodeId++,
value: token.value,
isInteger: token.isInteger,
isImaginary: token.isImaginary
isImaginary: token.isImaginary,
};
return node;
@ -1298,7 +1298,7 @@ export namespace StringNode {
id: _nextNodeId++,
token,
value: unescapedValue,
hasUnescapeErrors
hasUnescapeErrors,
};
return node;
@ -1328,10 +1328,10 @@ export namespace FormatStringNode {
token,
value: unescapedValue,
hasUnescapeErrors,
expressions
expressions,
};
expressions.forEach(expr => {
expressions.forEach((expr) => {
expr.parent = node;
});
@ -1356,11 +1356,11 @@ export namespace StringListNode {
length: strings[0].length,
nodeType: ParseNodeType.StringList,
id: _nextNodeId++,
strings
strings,
};
if (strings.length > 0) {
strings.forEach(str => {
strings.forEach((str) => {
str.parent = node;
});
extendRange(node, strings[strings.length - 1]);
@ -1382,7 +1382,7 @@ export namespace DictionaryNode {
length: range.length,
nodeType: ParseNodeType.Dictionary,
id: _nextNodeId++,
entries: []
entries: [],
};
return node;
@ -1403,7 +1403,7 @@ export namespace DictionaryKeyEntryNode {
nodeType: ParseNodeType.DictionaryKeyEntry,
id: _nextNodeId++,
keyExpression,
valueExpression
valueExpression,
};
keyExpression.parent = node;
@ -1427,7 +1427,7 @@ export namespace DictionaryExpandEntryNode {
length: expandExpression.length,
nodeType: ParseNodeType.DictionaryExpandEntry,
id: _nextNodeId++,
expandExpression
expandExpression,
};
expandExpression.parent = node;
@ -1450,7 +1450,7 @@ export namespace SetNode {
length: range.length,
nodeType: ParseNodeType.Set,
id: _nextNodeId++,
entries: []
entries: [],
};
return node;
@ -1469,7 +1469,7 @@ export namespace ListNode {
length: range.length,
nodeType: ParseNodeType.List,
id: _nextNodeId++,
entries: []
entries: [],
};
return node;
@ -1479,7 +1479,7 @@ export namespace ListNode {
export const enum ArgumentCategory {
Simple,
UnpackedList,
UnpackedDictionary
UnpackedDictionary,
}
export interface ArgumentNode extends ParseNodeBase {
@ -1497,7 +1497,7 @@ export namespace ArgumentNode {
nodeType: ParseNodeType.Argument,
id: _nextNodeId++,
valueExpression,
argumentCategory: argCategory
argumentCategory: argCategory,
};
valueExpression.parent = node;
@ -1520,7 +1520,7 @@ export namespace DelNode {
length: delToken.length,
nodeType: ParseNodeType.Del,
id: _nextNodeId++,
expressions: []
expressions: [],
};
return node;
@ -1537,7 +1537,7 @@ export namespace PassNode {
start: passToken.start,
length: passToken.length,
nodeType: ParseNodeType.Pass,
id: _nextNodeId++
id: _nextNodeId++,
};
return node;
@ -1556,7 +1556,7 @@ export namespace ImportNode {
length: passToken.length,
nodeType: ParseNodeType.Import,
id: _nextNodeId++,
list: []
list: [],
};
return node;
@ -1580,7 +1580,7 @@ export namespace ModuleNameNode {
nodeType: ParseNodeType.ModuleName,
id: _nextNodeId++,
leadingDots: 0,
nameParts: []
nameParts: [],
};
return node;
@ -1600,7 +1600,7 @@ export namespace ImportAsNode {
length: module.length,
nodeType: ParseNodeType.ImportAs,
id: _nextNodeId++,
module
module,
};
module.parent = node;
@ -1628,7 +1628,7 @@ export namespace ImportFromNode {
module,
imports: [],
isWildcardImport: false,
usesParens: false
usesParens: false,
};
module.parent = node;
@ -1652,7 +1652,7 @@ export namespace ImportFromAsNode {
length: name.length,
nodeType: ParseNodeType.ImportFromAs,
id: _nextNodeId++,
name
name,
};
name.parent = node;
@ -1673,7 +1673,7 @@ export namespace GlobalNode {
length: range.length,
nodeType: ParseNodeType.Global,
id: _nextNodeId++,
nameList: []
nameList: [],
};
return node;
@ -1692,7 +1692,7 @@ export namespace NonlocalNode {
length: range.length,
nodeType: ParseNodeType.Nonlocal,
id: _nextNodeId++,
nameList: []
nameList: [],
};
return node;
@ -1712,7 +1712,7 @@ export namespace AssertNode {
length: assertToken.length,
nodeType: ParseNodeType.Assert,
id: _nextNodeId++,
testExpression
testExpression,
};
testExpression.parent = node;
@ -1733,7 +1733,7 @@ export namespace BreakNode {
start: range.start,
length: range.length,
nodeType: ParseNodeType.Break,
id: _nextNodeId++
id: _nextNodeId++,
};
return node;
@ -1750,7 +1750,7 @@ export namespace ContinueNode {
start: range.start,
length: range.length,
nodeType: ParseNodeType.Continue,
id: _nextNodeId++
id: _nextNodeId++,
};
return node;
@ -1768,7 +1768,7 @@ export namespace ReturnNode {
start: range.start,
length: range.length,
nodeType: ParseNodeType.Return,
id: _nextNodeId++
id: _nextNodeId++,
};
return node;
@ -1788,7 +1788,7 @@ export namespace RaiseNode {
start: range.start,
length: range.length,
nodeType: ParseNodeType.Raise,
id: _nextNodeId++
id: _nextNodeId++,
};
return node;

View File

@ -93,7 +93,7 @@ import {
WithItemNode,
WithNode,
YieldFromNode,
YieldNode
YieldNode,
} from './parseNodes';
import * as StringTokenUtils from './stringTokenUtils';
import { Tokenizer, TokenizerOutput } from './tokenizer';
@ -109,7 +109,7 @@ import {
StringToken,
StringTokenFlags,
Token,
TokenType
TokenType,
} from './tokenizerTypes';
interface ExpressionListResult {
@ -210,7 +210,7 @@ export class Parser {
importedModules: this._importedModules,
futureImports: this._futureImportMap,
tokenizerOutput: this._tokenizerOutput!,
containsWildcardImport: this._containsWildcardImport
containsWildcardImport: this._containsWildcardImport,
};
}
@ -244,7 +244,7 @@ export class Parser {
return {
parseTree,
lines: this._tokenizerOutput!.lines,
diagnostics: diagSink.fetchAndClear()
diagnostics: diagSink.fetchAndClear(),
};
}
@ -496,7 +496,7 @@ export class Parser {
listCompNode.comprehensions = compList;
if (compList.length > 0) {
compList.forEach(comp => {
compList.forEach((comp) => {
comp.parent = listCompNode;
});
extendRange(listCompNode, compList[compList.length - 1]);
@ -704,13 +704,13 @@ export class Parser {
}
functionNode.parameters = paramList;
paramList.forEach(param => {
paramList.forEach((param) => {
param.parent = functionNode;
});
if (decorators) {
functionNode.decorators = decorators;
decorators.forEach(decorator => {
decorators.forEach((decorator) => {
decorator.parent = functionNode;
});
@ -922,7 +922,7 @@ export class Parser {
}
withNode.withItems = withItemList;
withItemList.forEach(withItem => {
withItemList.forEach((withItem) => {
withItem.parent = withNode;
});
@ -1022,7 +1022,7 @@ export class Parser {
if (this._consumeTokenIfType(TokenType.OpenParenthesis)) {
decoratorNode.arguments = this._parseArgList();
decoratorNode.arguments.forEach(arg => {
decoratorNode.arguments.forEach((arg) => {
arg.parent = decoratorNode;
extendRange(decoratorNode, arg);
});
@ -1066,14 +1066,14 @@ export class Parser {
const classNode = ClassNode.create(classToken, NameNode.create(nameToken), suite);
classNode.arguments = argList;
argList.forEach(arg => {
argList.forEach((arg) => {
arg.parent = classNode;
});
if (decorators) {
classNode.decorators = decorators;
if (decorators.length > 0) {
decorators.forEach(decorator => {
decorators.forEach((decorator) => {
decorator.parent = classNode;
});
extendRange(classNode, decorators[0]);
@ -1216,8 +1216,8 @@ export class Parser {
this._importedModules.push({
nameNode: importFromNode.module,
leadingDots: importFromNode.module.leadingDots,
nameParts: importFromNode.module.nameParts.map(p => p.value),
importedSymbols: importFromNode.imports.map(imp => imp.name.value)
nameParts: importFromNode.module.nameParts.map((p) => p.value),
importedSymbols: importFromNode.imports.map((imp) => imp.name.value),
});
return importFromNode;
@ -1252,8 +1252,8 @@ export class Parser {
this._importedModules.push({
nameNode: importAsNode.module,
leadingDots: importAsNode.module.leadingDots,
nameParts: importAsNode.module.nameParts.map(p => p.value),
importedSymbols: undefined
nameParts: importAsNode.module.nameParts.map((p) => p.value),
importedSymbols: undefined,
});
if (!this._consumeTokenIfType(TokenType.Comma)) {
@ -1316,7 +1316,7 @@ export class Parser {
const globalNode = GlobalNode.create(globalToken);
globalNode.nameList = this._parseNameList();
if (globalNode.nameList.length > 0) {
globalNode.nameList.forEach(name => {
globalNode.nameList.forEach((name) => {
name.parent = globalNode;
});
extendRange(globalNode, globalNode.nameList[globalNode.nameList.length - 1]);
@ -1330,7 +1330,7 @@ export class Parser {
const nonlocalNode = NonlocalNode.create(nonlocalToken);
nonlocalNode.nameList = this._parseNameList();
if (nonlocalNode.nameList.length > 0) {
nonlocalNode.nameList.forEach(name => {
nonlocalNode.nameList.forEach((name) => {
name.parent = nonlocalNode;
});
extendRange(nonlocalNode, nonlocalNode.nameList[nonlocalNode.nameList.length - 1]);
@ -1420,7 +1420,7 @@ export class Parser {
const delNode = DelNode.create(delToken);
delNode.expressions = exprListResult.list;
if (delNode.expressions.length > 0) {
delNode.expressions.forEach(expr => {
delNode.expressions.forEach((expr) => {
expr.parent = delNode;
});
extendRange(delNode, delNode.expressions[delNode.expressions.length - 1]);
@ -1570,7 +1570,7 @@ export class Parser {
const tupleNode = TupleNode.create(tupleStartRange);
tupleNode.expressions = exprListResult.list;
if (exprListResult.list.length > 0) {
exprListResult.list.forEach(expr => {
exprListResult.list.forEach((expr) => {
expr.parent = tupleNode;
});
extendRange(tupleNode, exprListResult.list[exprListResult.list.length - 1]);
@ -1975,7 +1975,7 @@ export class Parser {
const callNode = CallNode.create(atomExpression);
callNode.arguments = argList;
if (argList.length > 0) {
argList.forEach(arg => {
argList.forEach((arg) => {
arg.parent = callNode;
});
extendRange(callNode, argList[argList.length - 1]);
@ -2089,7 +2089,7 @@ export class Parser {
this._handleExpressionParseError(
ErrorExpressionCategory.MissingIndexOrSlice,
'Expected index or slice expression'
)
),
];
}
@ -2361,7 +2361,7 @@ export class Parser {
const lambdaNode = LambdaNode.create(lambdaToken, testExpr);
lambdaNode.parameters = argList;
argList.forEach(arg => {
argList.forEach((arg) => {
arg.parent = lambdaNode;
});
return lambdaNode;
@ -2419,7 +2419,7 @@ export class Parser {
const listAtom = ListNode.create(startBracket);
extendRange(listAtom, closeBracket);
if (exprListResult.list.length > 0) {
exprListResult.list.forEach(expr => {
exprListResult.list.forEach((expr) => {
expr.parent = listAtom;
});
extendRange(listAtom, exprListResult.list[exprListResult.list.length - 1]);
@ -2555,7 +2555,7 @@ export class Parser {
if (setEntries.length > 0) {
extendRange(setAtom, setEntries[setEntries.length - 1]);
}
setEntries.forEach(entry => {
setEntries.forEach((entry) => {
entry.parent = setAtom;
});
setAtom.entries = setEntries;
@ -2567,7 +2567,7 @@ export class Parser {
extendRange(dictionaryAtom, closeCurlyBrace);
}
if (dictionaryEntries.length > 0) {
dictionaryEntries.forEach(entry => {
dictionaryEntries.forEach((entry) => {
entry.parent = dictionaryAtom;
});
extendRange(dictionaryAtom, dictionaryEntries[dictionaryEntries.length - 1]);
@ -2802,7 +2802,7 @@ export class Parser {
true
);
parseResults.diagnostics.forEach(diag => {
parseResults.diagnostics.forEach((diag) => {
this._addError(diag.message, stringListNode);
});
@ -2835,7 +2835,7 @@ export class Parser {
false
);
parseResults.diagnostics.forEach(diag => {
parseResults.diagnostics.forEach((diag) => {
const textRangeStart =
(diag.range
? convertPositionToOffset(diag.range.start, parseResults.lines)
@ -2992,7 +2992,7 @@ export class Parser {
true
);
parseResults.diagnostics.forEach(diag => {
parseResults.diagnostics.forEach((diag) => {
this._addError(diag.message, stringNode);
});
@ -3152,7 +3152,7 @@ export class Parser {
// to an identifier token.
if (nextToken.type === TokenType.Keyword) {
const keywordType = this._peekKeywordType();
if (!disallowedKeywords.find(type => type === keywordType)) {
if (!disallowedKeywords.find((type) => type === keywordType)) {
const keywordText = this._fileContents!.substr(nextToken.start, nextToken.length);
this._getNextToken();
return IdentifierToken.create(nextToken.start, nextToken.length, keywordText, nextToken.comments);

View File

@ -33,7 +33,7 @@ export const enum UnescapeErrorType {
InvalidEscapeSequence,
EscapeWithinFormatExpression,
SingleCloseBraceWithinFormatLiteral,
UnterminatedFormatExpression
UnterminatedFormatExpression,
}
export interface UnescapeError {
@ -65,14 +65,14 @@ export function getUnescapedString(stringToken: StringToken): UnescapedString {
offset: 0,
length: 0,
value: '',
isExpression: false
isExpression: false,
};
let strOffset = 0;
const output: UnescapedString = {
value: '',
unescapeErrors: [],
nonAsciiInBytes: false,
formatStringSegments: []
formatStringSegments: [],
};
const addInvalidEscapeOffset = () => {
@ -81,7 +81,7 @@ export function getUnescapedString(stringToken: StringToken): UnescapedString {
output.unescapeErrors.push({
offset: strOffset - 1,
length: 2,
errorType: UnescapeErrorType.InvalidEscapeSequence
errorType: UnescapeErrorType.InvalidEscapeSequence,
});
}
};
@ -135,7 +135,7 @@ export function getUnescapedString(stringToken: StringToken): UnescapedString {
output.unescapeErrors.push({
offset: formatSegment.offset,
length: strOffset - formatSegment.offset,
errorType: UnescapeErrorType.UnterminatedFormatExpression
errorType: UnescapeErrorType.UnterminatedFormatExpression,
});
}
@ -154,7 +154,7 @@ export function getUnescapedString(stringToken: StringToken): UnescapedString {
output.unescapeErrors.push({
offset: strOffset,
length: 1,
errorType: UnescapeErrorType.EscapeWithinFormatExpression
errorType: UnescapeErrorType.EscapeWithinFormatExpression,
});
}
@ -326,7 +326,7 @@ export function getUnescapedString(stringToken: StringToken): UnescapedString {
offset: strOffset,
length: 0,
value: '',
isExpression: true
isExpression: true,
};
formatExpressionNestCount++;
}
@ -338,7 +338,7 @@ export function getUnescapedString(stringToken: StringToken): UnescapedString {
output.unescapeErrors.push({
offset: strOffset,
length: 1,
errorType: UnescapeErrorType.SingleCloseBraceWithinFormatLiteral
errorType: UnescapeErrorType.SingleCloseBraceWithinFormatLiteral,
});
strOffset++;
} else {
@ -355,7 +355,7 @@ export function getUnescapedString(stringToken: StringToken): UnescapedString {
offset: strOffset,
length: 0,
value: '',
isExpression: false
isExpression: false,
};
}
} else if (formatSegment.isExpression && (curChar === Char.SingleQuote || curChar === Char.DoubleQuote)) {

View File

@ -32,7 +32,7 @@ import {
StringToken,
StringTokenFlags,
Token,
TokenType
TokenType,
} from './tokenizerTypes';
const _keywords: { [key: string]: KeywordType } = {
@ -71,7 +71,7 @@ const _keywords: { [key: string]: KeywordType } = {
yield: KeywordType.Yield,
False: KeywordType.False,
None: KeywordType.None,
True: KeywordType.True
True: KeywordType.True,
};
const _operatorInfo: { [key: number]: OperatorFlags } = {
@ -116,7 +116,7 @@ const _operatorInfo: { [key: number]: OperatorFlags } = {
[OperatorType.Is]: OperatorFlags.Binary,
[OperatorType.IsNot]: OperatorFlags.Binary,
[OperatorType.In]: OperatorFlags.Binary,
[OperatorType.NotIn]: OperatorFlags.Binary
[OperatorType.NotIn]: OperatorFlags.Binary,
};
const _byteOrderMarker = 0xfeff;
@ -270,7 +270,7 @@ export class Tokenizer {
typeIgnoreAll: this._typeIgnoreAll,
predominantEndOfLineSequence,
predominantTabSequence,
predominantSingleQuoteCharacter: this._singleQuoteCount >= this._doubleQuoteCount ? "'" : '"'
predominantSingleQuoteCharacter: this._singleQuoteCount >= this._doubleQuoteCount ? "'" : '"',
};
}
@ -563,7 +563,7 @@ export class Tokenizer {
tab1Spaces,
tab8Spaces,
isSpacePresent,
isTabPresent
isTabPresent,
});
this._tokens.push(IndentToken.create(this._cs.position, 0, tab8Spaces, false, this._getComments()));
}
@ -588,7 +588,7 @@ export class Tokenizer {
tab1Spaces,
tab8Spaces,
isSpacePresent,
isTabPresent
isTabPresent,
});
this._tokens.push(
@ -924,7 +924,7 @@ export class Tokenizer {
const comment = Comment.create(start, length, value);
if (value.match(/^\s*type:\s*ignore(\s|$)/)) {
if (this._tokens.findIndex(t => t.type !== TokenType.NewLine && t && t.type !== TokenType.Indent) < 0) {
if (this._tokens.findIndex((t) => t.type !== TokenType.NewLine && t && t.type !== TokenType.Indent) < 0) {
this._typeIgnoreAll = true;
} else {
this._typeIgnoreLines[this._lineRanges.length] = true;
@ -962,10 +962,7 @@ export class Tokenizer {
}
if (this._cs.lookAhead(2) === Char.SingleQuote || this._cs.lookAhead(2) === Char.DoubleQuote) {
const prefix = this._cs
.getText()
.substr(this._cs.position, 2)
.toLowerCase();
const prefix = this._cs.getText().substr(this._cs.position, 2).toLowerCase();
switch (prefix) {
case 'rf':
case 'fr':

View File

@ -36,14 +36,14 @@ export const enum TokenType {
Ellipsis,
Dot,
Arrow,
Backtick
Backtick,
}
export const enum NewLineType {
CarriageReturn,
LineFeed,
CarriageReturnLineFeed,
Implied
Implied,
}
export const enum OperatorType {
@ -93,14 +93,14 @@ export const enum OperatorType {
Is,
IsNot,
In,
NotIn
NotIn,
}
export const enum OperatorFlags {
Unary = 1 << 0,
Binary = 1 << 1,
Assignment = 1 << 2,
Comparison = 1 << 3
Comparison = 1 << 3,
}
export const enum KeywordType {
@ -139,7 +139,7 @@ export const enum KeywordType {
Try,
While,
With,
Yield
Yield,
}
export const enum StringTokenFlags {
@ -157,7 +157,7 @@ export const enum StringTokenFlags {
Format = 1 << 6,
// Error conditions
Unterminated = 1 << 16
Unterminated = 1 << 16,
}
export interface Comment extends TextRange {
@ -171,7 +171,7 @@ export namespace Comment {
const comment: Comment = {
start,
length,
value
value,
};
return comment;
@ -193,7 +193,7 @@ export namespace Token {
start,
length,
type,
comments
comments,
};
return token;
@ -220,7 +220,7 @@ export namespace IndentToken {
type: TokenType.Indent,
isIndentAmbiguous,
comments,
indentAmount
indentAmount,
};
return token;
@ -247,7 +247,7 @@ export namespace DedentToken {
type: TokenType.Dedent,
comments,
indentAmount,
matchesIndent
matchesIndent,
};
return token;
@ -266,7 +266,7 @@ export namespace NewLineToken {
length,
type: TokenType.NewLine,
comments,
newLineType
newLineType,
};
return token;
@ -285,7 +285,7 @@ export namespace KeywordToken {
length,
type: TokenType.Keyword,
comments,
keywordType
keywordType,
};
return token;
@ -325,7 +325,7 @@ export namespace StringToken {
escapedValue,
prefixLength,
quoteMarkLength: flags & StringTokenFlags.Triplicate ? 3 : 1,
comments
comments,
};
return token;
@ -355,7 +355,7 @@ export namespace NumberToken {
isInteger,
isImaginary,
value,
comments
comments,
};
return token;
@ -374,7 +374,7 @@ export namespace OperatorToken {
length,
type: TokenType.Operator,
operatorType,
comments
comments,
};
return token;
@ -393,7 +393,7 @@ export namespace IdentifierToken {
length,
type: TokenType.Identifier,
value,
comments
comments,
};
return token;

View File

@ -639,7 +639,7 @@ export const unicodeLu: UnicodeRangeTable = [
[120662, 120686],
[120720, 120744],
120778,
[125184, 125217]
[125184, 125217],
];
export const unicodeLl: UnicodeRangeTable = [
@ -1275,7 +1275,7 @@ export const unicodeLl: UnicodeRangeTable = [
[120746, 120770],
[120772, 120777],
120779,
[125218, 125251]
[125218, 125251],
];
export const unicodeLt: UnicodeRangeTable = [
@ -1288,7 +1288,7 @@ export const unicodeLt: UnicodeRangeTable = [
[8104, 8111],
8124,
8140,
8188
8188,
];
export const unicodeLo: UnicodeRangeTable = [
@ -1750,7 +1750,7 @@ export const unicodeLo: UnicodeRangeTable = [
[177984, 178205],
[178208, 183969],
[183984, 191456],
[194560, 195101]
[194560, 195101],
];
export const unicodeLm: UnicodeRangeTable = [
@ -1810,7 +1810,7 @@ export const unicodeLm: UnicodeRangeTable = [
[65438, 65439],
[92992, 92995],
[94099, 94111],
[94176, 94177]
[94176, 94177],
];
export const unicodeNl: UnicodeRangeTable = [
@ -1825,7 +1825,7 @@ export const unicodeNl: UnicodeRangeTable = [
66369,
66378,
[66513, 66517],
[74752, 74862]
[74752, 74862],
];
export const unicodeMn: UnicodeRangeTable = [
@ -2129,7 +2129,7 @@ export const unicodeMn: UnicodeRangeTable = [
[122918, 122922],
[125136, 125142],
[125252, 125258],
[917760, 917999]
[917760, 917999],
];
export const unicodeMc: UnicodeRangeTable = [
@ -2292,7 +2292,7 @@ export const unicodeMc: UnicodeRangeTable = [
72884,
[94033, 94078],
[119141, 119142],
[119149, 119154]
[119149, 119154],
];
export const unicodeNd: UnicodeRangeTable = [
@ -2350,7 +2350,7 @@ export const unicodeNd: UnicodeRangeTable = [
[92768, 92777],
[93008, 93017],
[120782, 120831],
[125264, 125273]
[125264, 125273],
];
export const unicodePc: UnicodeRangeTable = [95, [8255, 8256], 8276, [65075, 65076], [65101, 65103], 65343];

View File

@ -33,7 +33,7 @@ enum ExitStatus {
NoErrors = 0,
ErrorsReported = 1,
FatalError = 2,
ConfigFileParseError = 3
ConfigFileParseError = 3,
}
interface PyrightJsonResults {
@ -65,13 +65,13 @@ interface DiagnosticResult {
const cancellationNone = Object.freeze({
isCancellationRequested: false,
onCancellationRequested: function() {
onCancellationRequested: function () {
return {
dispose() {
/* empty */
}
},
};
}
},
});
function processArgs() {
@ -88,7 +88,7 @@ function processArgs() {
{ name: 'venv-path', alias: 'v', type: String },
{ name: 'verbose', type: Boolean },
{ name: 'version', type: Boolean },
{ name: 'watch', alias: 'w', type: Boolean }
{ name: 'watch', alias: 'w', type: Boolean },
];
let args: CommandLineOptions;
@ -131,7 +131,7 @@ function processArgs() {
// Assume any relative paths are relative to the working directory.
if (args.files && Array.isArray(args.files)) {
options.fileSpecs = args.files;
options.fileSpecs = options.fileSpecs.map(f => combinePaths(process.cwd(), f));
options.fileSpecs = options.fileSpecs.map((f) => combinePaths(process.cwd(), f));
} else {
options.fileSpecs = [];
}
@ -167,7 +167,7 @@ function processArgs() {
const output = args.outputjson ? new NullConsole() : undefined;
const service = new AnalyzerService('<default>', createFromRealFileSystem(output), output);
service.setCompletionCallback(results => {
service.setCompletionCallback((results) => {
if (results.fatalErrorOccurred) {
process.exit(ExitStatus.FatalError);
}
@ -282,21 +282,21 @@ function reportDiagnosticsAsJson(
filesAnalyzed: filesInProgram,
errorCount: 0,
warningCount: 0,
timeInSec
}
timeInSec,
},
};
let errorCount = 0;
let warningCount = 0;
fileDiagnostics.forEach(fileDiag => {
fileDiag.diagnostics.forEach(diag => {
fileDiagnostics.forEach((fileDiag) => {
fileDiag.diagnostics.forEach((diag) => {
if (diag.category === DiagnosticCategory.Error || diag.category === DiagnosticCategory.Warning) {
report.diagnostics.push({
file: fileDiag.filePath,
severity: diag.category === DiagnosticCategory.Error ? 'error' : 'warning',
message: diag.message,
range: diag.range
range: diag.range,
});
if (diag.category === DiagnosticCategory.Error) {
@ -316,7 +316,7 @@ function reportDiagnosticsAsJson(
return {
errorCount,
warningCount,
diagnosticCount: errorCount + warningCount
diagnosticCount: errorCount + warningCount,
};
}
@ -324,15 +324,15 @@ function reportDiagnosticsAsText(fileDiagnostics: FileDiagnostics[]): Diagnostic
let errorCount = 0;
let warningCount = 0;
fileDiagnostics.forEach(fileDiagnostics => {
fileDiagnostics.forEach((fileDiagnostics) => {
// Don't report unused code diagnostics.
const fileErrorsAndWarnings = fileDiagnostics.diagnostics.filter(
diag => diag.category !== DiagnosticCategory.UnusedCode
(diag) => diag.category !== DiagnosticCategory.UnusedCode
);
if (fileErrorsAndWarnings.length > 0) {
console.log(`${fileDiagnostics.filePath}`);
fileErrorsAndWarnings.forEach(diag => {
fileErrorsAndWarnings.forEach((diag) => {
let message = ' ';
if (diag.range) {
message +=
@ -369,7 +369,7 @@ function reportDiagnosticsAsText(fileDiagnostics: FileDiagnostics[]): Diagnostic
return {
errorCount,
warningCount,
diagnosticCount: errorCount + warningCount
diagnosticCount: errorCount + warningCount,
};
}

View File

@ -178,7 +178,7 @@ test('Builtins1', () => {
'zip',
// These really shouldn't be exposed but are defined by builtins.pyi currently.
'function',
'ellipsis'
'ellipsis',
];
const moduleScope = AnalyzerNodeInfo.getScope(analysisResults[0].parseResults!.parseTree)!;

View File

@ -37,7 +37,7 @@ test('UtilsAppendUndefinedValue', () => {
test('UtilsFindEmpty', () => {
const data: number[] = [];
assert.equal(
utils.find(data, e => true),
utils.find(data, (e) => true),
undefined
);
});
@ -45,7 +45,7 @@ test('UtilsFindEmpty', () => {
test('UtilsFindNoMatch', () => {
const data = [1];
assert.equal(
utils.find(data, e => false),
utils.find(data, (e) => false),
undefined
);
});
@ -53,7 +53,7 @@ test('UtilsFindNoMatch', () => {
test('UtilsFindMatchSimple', () => {
const data = [1];
assert.equal(
utils.find(data, e => e === 1),
utils.find(data, (e) => e === 1),
1
);
});
@ -61,7 +61,7 @@ test('UtilsFindMatchSimple', () => {
test('UtilsFindMatch', () => {
const data = [new D(1, 'Hello')];
assert.equal(
utils.find(data, e => e.value === 1),
utils.find(data, (e) => e.value === 1),
data[0]
);
});
@ -81,21 +81,21 @@ test('UtilsStableSort', () => {
const sorted = utils.stableSort(data, (a, b) => compareValues(a.value, b.value));
const result: string[] = [];
sorted.forEach(e => result.push(e.name));
sorted.forEach((e) => result.push(e.name));
assert.deepEqual(result, ['Hello1', 'Hello2', 'Hello3', 'Hello4']);
});
test('UtilsBinarySearch', () => {
const data = [new D(1, 'Hello3'), new D(2, 'Hello1'), new D(3, 'Hello4'), new D(4, 'Hello2')];
const index = utils.binarySearch(data, new D(3, 'Unused'), v => v.value, compareValues, 0);
const index = utils.binarySearch(data, new D(3, 'Unused'), (v) => v.value, compareValues, 0);
assert.equal(index, 2);
});
test('UtilsBinarySearchMiss', () => {
const data = [new D(1, 'Hello3'), new D(2, 'Hello1'), new D(4, 'Hello4'), new D(5, 'Hello2')];
const index = utils.binarySearch(data, new D(3, 'Unused'), v => v.value, compareValues, 0);
const index = utils.binarySearch(data, new D(3, 'Unused'), (v) => v.value, compareValues, 0);
assert.equal(~index, 2);
});
@ -144,7 +144,7 @@ test('flatten', () => {
const data: number[][] = [
[1, 2],
[3, 4],
[5, 6]
[5, 6],
];
assert.deepEqual(utils.flatten(data), [1, 2, 3, 4, 5, 6]);
});

View File

@ -23,34 +23,34 @@ test('textRange overlap', () => {
const textRangeOne: Range = {
start: {
line: 0,
character: 0
character: 0,
},
end: {
line: 10,
character: 0
}
character: 0,
},
};
const textRangeTwo: Range = {
start: {
line: 11,
character: 0
character: 0,
},
end: {
line: 20,
character: 0
}
character: 0,
},
};
const textRangeThree: Range = {
start: {
line: 5,
character: 0
character: 0,
},
end: {
line: 15,
character: 0
}
character: 0,
},
};
assert.equal(doRangesOverlap(textRangeOne, textRangeTwo), false);
@ -62,12 +62,12 @@ test('textRange contain', () => {
const textRangeOne: Range = {
start: {
line: 0,
character: 5
character: 5,
},
end: {
line: 10,
character: 1
}
character: 1,
},
};
assert.equal(doesRangeContain(textRangeOne, { line: 0, character: 0 }), false);
@ -82,34 +82,34 @@ test('textRange equal', () => {
const textRangeOne: Range = {
start: {
line: 0,
character: 0
character: 0,
},
end: {
line: 10,
character: 0
}
character: 0,
},
};
const textRangeTwo: Range = {
start: {
line: 0,
character: 0
character: 0,
},
end: {
line: 10,
character: 0
}
character: 0,
},
};
const textRangeThree: Range = {
start: {
line: 5,
character: 0
character: 0,
},
end: {
line: 15,
character: 0
}
character: 0,
},
};
assert.equal(rangesAreEqual(textRangeOne, textRangeTwo), true);

View File

@ -54,7 +54,7 @@ test('FindFilesVirtualEnvAutoDetectExclude', () => {
// There are 3 python files in the workspace, outside of myvenv
// There is 1 python file in myvenv, which should be excluded
const fileNames = fileList.map(p => getBaseFileName(p)).sort();
const fileNames = fileList.map((p) => getBaseFileName(p)).sort();
assert.deepEqual(fileNames, ['sample1.py', 'sample2.py', 'sample3.py']);
});
@ -72,7 +72,7 @@ test('FindFilesVirtualEnvAutoDetectInclude', () => {
// There are 3 python files in the workspace, outside of myvenv
// There is 1 more python file in excluded folder
// There is 1 python file in myvenv, which should be included
const fileNames = fileList.map(p => getBaseFileName(p)).sort();
const fileNames = fileList.map((p) => getBaseFileName(p)).sort();
assert.deepEqual(fileNames, ['library1.py', 'sample1.py', 'sample2.py', 'sample3.py']);
});
@ -201,8 +201,8 @@ test('AutoSearchPathsOn', () => {
pythonPlatform: undefined,
pythonVersion: 776,
root: cwd,
extraPaths: [normalizePath(combinePaths(cwd, 'src'))]
}
extraPaths: [normalizePath(combinePaths(cwd, 'src'))],
},
];
assert.deepEqual(configOptions.executionEnvironments, expectedExecEnvs);
@ -255,8 +255,8 @@ test('AutoSearchPathsOnWithConfigExecEnv', () => {
pythonPlatform: undefined,
pythonVersion: 773,
root: cwd,
extraPaths: []
}
extraPaths: [],
},
];
assert.deepEqual(configOptions.executionEnvironments, expectedExecEnvs);

View File

@ -81,7 +81,7 @@ test('DebugAssertNever', () => {
const enum MyEnum {
A,
B,
C
C,
}
const unused = 5 as MyEnum;
@ -114,7 +114,7 @@ test('DebugFormatEnum', () => {
enum MyEnum {
A,
B,
C
C,
}
assert(debug.formatEnum(MyEnum.A, MyEnum, false) === 'A');
});

View File

@ -24,10 +24,10 @@ test('PlaintextIndention', () => {
['\nA\n B', 'A\n B'],
['\n A\n B', 'A\nB'],
['\nA\nB\n', 'A\nB'],
[' \n\nA \n \nB \n ', 'A\n\nB']
[' \n\nA \n \nB \n ', 'A\n\nB'],
];
all.forEach(v => _testConvertToMarkdown(v[0], v[1]));
all.forEach((v) => _testConvertToMarkdown(v[0], v[1]));
});
test('NormalText', () => {

View File

@ -66,7 +66,7 @@ test('CreateRich', () => {
[normalizeSlashes('/a/b/2.txt')]: new vfs.File('hello2'),
[normalizeSlashes('/a/3.txt')]: new vfs.File('hello3'),
[normalizeSlashes('/4.txt')]: new vfs.File('hello4', { encoding: 'utf16le' }),
[normalizeSlashes('/a/b/../c/./5.txt')]: new vfs.File('hello5', { encoding: 'ucs2' })
[normalizeSlashes('/a/b/../c/./5.txt')]: new vfs.File('hello5', { encoding: 'ucs2' }),
};
const fs = new vfs.FileSystem(/*ignoreCase*/ true, { cwd, files });
@ -151,7 +151,7 @@ test('createFromFileSystem1', () => {
// file system will map physical file system to virtual one
const fs = factory.createFromFileSystem(host.HOST, false, {
documents: [new factory.TextDocument(filepath, content)],
cwd: factory.srcFolder
cwd: factory.srcFolder,
});
// check existing typeshed folder on virtual path inherited from base snapshot from physical file system
@ -172,17 +172,17 @@ test('createFromFileSystemWithCustomTypeshedPath', () => {
const invalidpath = normalizeSlashes(combinePaths(host.HOST.getWorkspaceRoot(), '../docs'));
const fs = factory.createFromFileSystem(host.HOST, /* ignoreCase */ false, {
cwd: factory.srcFolder,
meta: { [factory.typeshedFolder]: invalidpath }
meta: { [factory.typeshedFolder]: invalidpath },
});
const entries = fs.readdirSync(factory.typeshedFolder);
assert(entries.filter(e => e.endsWith('.md')).length > 0);
assert(entries.filter((e) => e.endsWith('.md')).length > 0);
});
test('createFromFileSystemWithMetadata', () => {
const fs = factory.createFromFileSystem(host.HOST, /* ignoreCase */ false, {
cwd: factory.srcFolder,
meta: { unused: 'unused' }
meta: { unused: 'unused' },
});
assert(fs.existsSync(factory.srcFolder));

View File

@ -32,7 +32,7 @@ test('GlobalOptions', () => {
const data = parseTestData('.', code, 'test.py');
assertOptions(data.globalOptions, [
['libpath', '../dist/lib'],
['pythonversion', '3.7']
['pythonversion', '3.7'],
]);
assert.equal(data.files.length, 1);
@ -74,7 +74,7 @@ test('Extra file options', () => {
assertOptions(data.globalOptions, []);
assertOptions(data.files[0].fileOptions, [
['filename', 'file1.py'],
['library', 'false']
['library', 'false'],
]);
});
@ -220,12 +220,13 @@ test('Multiple Files', () => {
const data = parseTestData('.', code, 'test.py');
assert.equal(data.files.length, 3);
assert.equal(data.files.filter(f => f.fileName === normalizeSlashes('./src/A.py'))[0].content, getContent('A'));
assert.equal(data.files.filter((f) => f.fileName === normalizeSlashes('./src/A.py'))[0].content, getContent('A'));
assert.equal(
data.files.filter(f => f.fileName === normalizeSlashes(combinePaths(factory.libFolder, 'src/B.py')))[0].content,
data.files.filter((f) => f.fileName === normalizeSlashes(combinePaths(factory.libFolder, 'src/B.py')))[0]
.content,
getContent('B')
);
assert.equal(data.files.filter(f => f.fileName === normalizeSlashes('./src/C.py'))[0].content, getContent('C'));
assert.equal(data.files.filter((f) => f.fileName === normalizeSlashes('./src/C.py'))[0].content, getContent('C'));
});
test('Multiple Files with default name', () => {
@ -246,9 +247,12 @@ test('Multiple Files with default name', () => {
const data = parseTestData('.', code, './src/test.py');
assert.equal(data.files.length, 3);
assert.equal(data.files.filter(f => f.fileName === normalizeSlashes('./src/test.py'))[0].content, getContent('A'));
assert.equal(data.files.filter(f => f.fileName === normalizeSlashes('./src/B.py'))[0].content, getContent('B'));
assert.equal(data.files.filter(f => f.fileName === normalizeSlashes('./src/C.py'))[0].content, getContent('C'));
assert.equal(
data.files.filter((f) => f.fileName === normalizeSlashes('./src/test.py'))[0].content,
getContent('A')
);
assert.equal(data.files.filter((f) => f.fileName === normalizeSlashes('./src/B.py'))[0].content, getContent('B'));
assert.equal(data.files.filter((f) => f.fileName === normalizeSlashes('./src/C.py'))[0].content, getContent('C'));
});
test('Multiple Files with markers', () => {
@ -270,16 +274,16 @@ test('Multiple Files with markers', () => {
const data = parseTestData('.', code, 'test.py');
assert.equal(data.files.length, 3);
assert.equal(data.files.filter(f => f.fileName === normalizeSlashes('./src/A.py'))[0].content, getContent('A'));
assert.equal(data.files.filter(f => f.fileName === normalizeSlashes('./src/B.py'))[0].content, getContent('B'));
assert.equal(data.files.filter(f => f.fileName === normalizeSlashes('./src/C.py'))[0].content, getContent('C'));
assert.equal(data.files.filter((f) => f.fileName === normalizeSlashes('./src/A.py'))[0].content, getContent('A'));
assert.equal(data.files.filter((f) => f.fileName === normalizeSlashes('./src/B.py'))[0].content, getContent('B'));
assert.equal(data.files.filter((f) => f.fileName === normalizeSlashes('./src/C.py'))[0].content, getContent('C'));
assert.equal(data.ranges.length, 3);
assert(data.markerPositions.get('marker1'));
assert(data.markerPositions.get('marker2'));
assert.equal(data.ranges.filter(r => r.marker).length, 2);
assert.equal(data.ranges.filter((r) => r.marker).length, 2);
});
test('fourSlashWithFileSystem', () => {
@ -299,12 +303,12 @@ test('fourSlashWithFileSystem', () => {
const data = parseTestData('.', code, 'unused');
const documents = data.files.map(
f => new factory.TextDocument(f.fileName, f.content, new Map<string, string>(Object.entries(f.fileOptions)))
(f) => new factory.TextDocument(f.fileName, f.content, new Map<string, string>(Object.entries(f.fileOptions)))
);
const fs = factory.createFromFileSystem(host.HOST, /* ignoreCase */ false, {
documents,
cwd: normalizeSlashes('/')
cwd: normalizeSlashes('/'),
});
for (const file of data.files) {

View File

@ -22,13 +22,13 @@ describe('fourslash tests', () => {
testFiles.push(file);
}
testFiles.forEach(file => {
testFiles.forEach((file) => {
describe(file, () => {
const fn = normalizeSlashes(file);
const justName = fn.replace(/^.*[\\/]/, '');
// TODO: make these to use promise/async rather than callback token
it('fourslash test ' + justName + ' runs correctly', cb => {
it('fourslash test ' + justName + ' runs correctly', (cb) => {
runFourSlashTest(MODULE_PATH, fn, cb);
});
});

View File

@ -6,6 +6,6 @@
helper.verifyCompletion('excluded', {
marker1: {
completions: [{ label: 'capitalize' }]
}
completions: [{ label: 'capitalize' }],
},
});

View File

@ -22,16 +22,16 @@ helper.verifyCompletion('exact', {
label: 'some_func1',
documentation: {
kind: 'markdown',
value: '```python\nsome_func1: (a) -> None\n```\n---\nsome function docs'
}
value: '```python\nsome_func1: (a) -> None\n```\n---\nsome function docs',
},
},
{
label: 'some_func2',
documentation: {
kind: 'markdown',
value: '```python\nsome_func2: (a) -> None\n```\n---\nanother function docs'
}
}
]
}
value: '```python\nsome_func2: (a) -> None\n```\n---\nanother function docs',
},
},
],
},
});

View File

@ -6,6 +6,6 @@
helper.verifyCompletion('included', {
marker1: {
completions: [{ label: 'denominator' }, { label: 'imag' }, { label: 'numerator' }, { label: 'real' }]
}
completions: [{ label: 'denominator' }, { label: 'imag' }, { label: 'numerator' }, { label: 'real' }],
},
});

View File

@ -30,10 +30,10 @@ helper.verifyCompletion('included', {
label: 'Validator',
documentation: {
kind: 'markdown',
value: '```python\nclass Validator()\n```\n---\nThe validator class'
}
}
]
value: '```python\nclass Validator()\n```\n---\nThe validator class',
},
},
],
},
marker2: {
completions: [
@ -42,9 +42,9 @@ helper.verifyCompletion('included', {
documentation: {
kind: 'markdown',
value:
'```python\nis_valid: (self: Validator, text: str) -> bool\n```\n---\nChecks if the input string is valid.'
}
}
]
}
'```python\nis_valid: (self: Validator, text: str) -> bool\n```\n---\nChecks if the input string is valid.',
},
},
],
},
});

View File

@ -25,10 +25,10 @@ helper.verifyCompletion('included', {
label: 'Validator',
documentation: {
kind: 'markdown',
value: '```python\nclass Validator()\n```\n---\nThe validator class'
}
}
]
value: '```python\nclass Validator()\n```\n---\nThe validator class',
},
},
],
},
marker2: {
completions: [
@ -37,9 +37,9 @@ helper.verifyCompletion('included', {
documentation: {
kind: 'markdown',
value:
'```python\nis_valid: (self: Validator, text: str) -> bool\n```\n---\nChecks if the input string is valid.'
}
}
]
}
'```python\nis_valid: (self: Validator, text: str) -> bool\n```\n---\nChecks if the input string is valid.',
},
},
],
},
});

View File

@ -25,10 +25,10 @@ helper.verifyCompletion('included', {
label: 'Validator',
documentation: {
kind: 'markdown',
value: '```python\nclass Validator()\n```\n---\nThe validator class'
}
}
]
value: '```python\nclass Validator()\n```\n---\nThe validator class',
},
},
],
},
marker2: {
completions: [
@ -37,9 +37,9 @@ helper.verifyCompletion('included', {
documentation: {
kind: 'markdown',
value:
'```python\nis_valid: (self: Validator, text: str) -> bool\n```\n---\nChecks if the input string is valid.'
}
}
]
}
'```python\nis_valid: (self: Validator, text: str) -> bool\n```\n---\nChecks if the input string is valid.',
},
},
],
},
});

View File

@ -53,12 +53,12 @@ helper.verifyDiagnostics({
marker1: { category: 'error', message: `No parameter named "ddd"` },
marker2: {
category: 'error',
message: `Argument of type "Literal['hello']" cannot be assigned to parameter "bbb" of type "int"\n "str" is incompatible with "int"`
message: `Argument of type "Literal['hello']" cannot be assigned to parameter "bbb" of type "int"\n "str" is incompatible with "int"`,
},
marker3: { category: 'error', message: `Argument missing for parameter "ccc"` },
marker4: { category: 'error', message: 'Expected 3 positional arguments' },
marker5: {
category: 'error',
message: 'Data fields without default value cannot appear after data fields with default values'
}
message: 'Data fields without default value cannot appear after data fields with default values',
},
});

View File

@ -49,5 +49,5 @@
helper.verifyDiagnostics({
marker1: { category: 'error', message: `Expected no arguments to "D" constructor` },
marker2: { category: 'error', message: `"D(3)" has type "Type[D]" and is not callable` }
marker2: { category: 'error', message: `"D(3)" has type "Type[D]" and is not callable` },
});

View File

@ -30,5 +30,5 @@
helper.verifyDiagnostics({
marker1: { category: 'error', message: 'Expected 1 positional argument' },
marker2: { category: 'error', message: `Argument missing for parameter "y"` }
marker2: { category: 'error', message: `Argument missing for parameter "y"` },
});

View File

@ -56,10 +56,10 @@
helper.verifyDiagnostics({
marker1: {
category: 'error',
message: `Argument of type "C1" cannot be assigned to parameter "aa" of type "C2"\n "C1" is incompatible with "C2"`
message: `Argument of type "C1" cannot be assigned to parameter "aa" of type "C2"\n "C1" is incompatible with "C2"`,
},
marker2: {
category: 'error',
message: 'Data fields without default value cannot appear after data fields with default values'
}
message: 'Data fields without default value cannot appear after data fields with default values',
},
});

View File

@ -143,6 +143,6 @@ declare namespace Consts {
createTypeStub = 'pyright.createtypestub',
restartServer = 'pyright.restartserver',
orderImports = 'pyright.organizeimports',
addMissingOptionalToParam = 'pyright.addoptionalforparam'
addMissingOptionalToParam = 'pyright.addoptionalforparam',
}
}

View File

@ -18,6 +18,6 @@ helper.verifyHover({
marker1: { value: '```python\n(class) Validator\n```\nThe validator class', kind: 'markdown' },
marker2: {
value: '```python\n(method) is_valid: (text: str) -> bool\n```\nChecks if the input string is valid.',
kind: 'markdown'
}
kind: 'markdown',
},
});

View File

@ -27,6 +27,6 @@ helper.verifyHover({
marker1: { value: '```python\n(class) Validator\n```\nThe validator class', kind: 'markdown' },
marker2: {
value: '```python\n(method) is_valid: (text: str) -> bool\n```\nChecks if the input string is valid.',
kind: 'markdown'
}
kind: 'markdown',
},
});

View File

@ -22,6 +22,6 @@ helper.verifyHover({
marker1: { value: '```python\n(class) Validator\n```\nThe validator class', kind: 'markdown' },
marker2: {
value: '```python\n(method) is_valid: (text: str) -> bool\n```\nChecks if the input string is valid.',
kind: 'markdown'
}
kind: 'markdown',
},
});

View File

@ -22,6 +22,6 @@ helper.verifyHover({
marker1: { value: '```python\n(class) Validator\n```\nThe validator class', kind: 'markdown' },
marker2: {
value: '```python\n(method) is_valid: (text: str) -> bool\n```\nChecks if the input string is valid.',
kind: 'markdown'
}
kind: 'markdown',
},
});

View File

@ -9,5 +9,5 @@
helper.verifyDiagnostics({
marker1: { category: 'error', message: `Import "notexistant" could not be resolved` },
marker2: { category: 'error', message: `Import "django" could not be resolved` }
marker2: { category: 'error', message: `Import "django" could not be resolved` },
});

View File

@ -24,9 +24,9 @@ helper.verifyCodeActions({
command: {
title: 'Create Type Stub',
command: Consts.Commands.createTypeStub,
arguments: ['\\', 'testLib', '\\.src\\test.py']
}
}
]
}
arguments: ['\\', 'testLib', '\\.src\\test.py'],
},
},
],
},
});

View File

@ -20,7 +20,7 @@ const filename = helper.getMarkerByName('marker').fileName;
const command = {
title: 'Create Type Stub',
command: Consts.Commands.createTypeStub,
arguments: ['/', 'testLib', filename]
arguments: ['/', 'testLib', filename],
};
helper.verifyCommand(command, {
@ -34,5 +34,5 @@ class MyLibrary:
`
`,
});

View File

@ -16,5 +16,5 @@
//// import [|/*marker*/testLi|]b
helper.verifyDiagnostics({
marker: { category: 'warning', message: `Stub file not found for "testLib"` }
marker: { category: 'warning', message: `Stub file not found for "testLib"` },
});

View File

@ -30,7 +30,7 @@ class MyLibrary:
`
}
}
`,
},
},
});

View File

@ -10,11 +10,11 @@ helper.verifyCommand(
{
title: 'Quick action order imports 1',
command: Consts.Commands.orderImports,
arguments: ['quickActionOrganizeImportTest1.py']
arguments: ['quickActionOrganizeImportTest1.py'],
},
{
['quickActionOrganizeImportTest1.py']: `import os
import sys
import time`
import time`,
}
);

Some files were not shown because too many files have changed in this diff Show More