Skip to content

Commit

Permalink
Merge pull request #381 from DetachHead/merge-1.1.365
Browse files Browse the repository at this point in the history
Merge 1.1.365
  • Loading branch information
DetachHead authored May 29, 2024
2 parents fc74ccf + 9cd4da0 commit 652e949
Show file tree
Hide file tree
Showing 83 changed files with 1,025 additions and 578 deletions.
4 changes: 2 additions & 2 deletions .github/workflows/build_and_release.yml
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@ jobs:
pull-requests: read

steps:
- uses: actions/checkout@v3
- uses: actions/checkout@v4
with:
fetch-depth: 0

Expand Down Expand Up @@ -55,7 +55,7 @@ jobs:
pdm run npm run package
mv basedpyright-*.vsix ${{ env.VSIX_NAME }}
- uses: actions/upload-artifact@v3
- uses: actions/upload-artifact@v4
with:
name: ${{ env.ARTIFACT_NAME_VSIX }}
path: ${{ env.VSIX_DIR }}/${{ env.VSIX_NAME }}
Expand Down
2 changes: 1 addition & 1 deletion .github/workflows/codeql.yml
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,7 @@ jobs:

steps:
- name: Checkout repository
uses: actions/checkout@v3
uses: actions/checkout@v4

# Initializes the CodeQL tools for scanning.
- name: Initialize CodeQL
Expand Down
2 changes: 1 addition & 1 deletion .github/workflows/docs.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,7 @@ jobs:
uses: actions/configure-pages@v4
- run: cp README.md docs/README.md
- name: Upload artifact
uses: actions/upload-pages-artifact@v3
uses: actions/upload-pages-artifact@v4
with:
path: 'docs'
- name: Deploy to GitHub Pages
Expand Down
27 changes: 15 additions & 12 deletions .github/workflows/mypy_primer_comment.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@ jobs:
if: ${{ github.event.workflow_run.conclusion == 'success' }}
steps:
- name: Download diffs
uses: actions/github-script@v6
uses: actions/github-script@v7
with:
script: |
const fs = require('fs');
Expand All @@ -30,24 +30,27 @@ jobs:
repo: context.repo.repo,
run_id: ${{ github.event.workflow_run.id }},
});
const [matchArtifact] = artifacts.data.artifacts.filter((artifact) =>
artifact.name == "mypy_primer_diffs");
const matchedArtifacts = artifacts.data.artifacts.filter((artifact) =>
artifact.name.startsWith("mypy_primer_diffs"));
const download = await github.rest.actions.downloadArtifact({
owner: context.repo.owner,
repo: context.repo.repo,
artifact_id: matchArtifact.id,
archive_format: "zip",
});
fs.writeFileSync("diff.zip", Buffer.from(download.data));
for (let i = 0; i < matchedArtifacts.length; i++) {
const matchArtifact = matchedArtifacts[i];
const download = await github.rest.actions.downloadArtifact({
owner: context.repo.owner,
repo: context.repo.repo,
artifact_id: matchArtifact.id,
archive_format: "zip",
});
fs.writeFileSync(`diff_${i}.zip`, Buffer.from(download.data));
}
- run: unzip diff.zip
- run: unzip diff_\*.zip
- run: |
cat diff_*.txt | tee fulldiff.txt
- name: Post comment
id: post-comment
uses: actions/github-script@v6
uses: actions/github-script@v7
with:
github-token: ${{ secrets.GITHUB_TOKEN }}
script: |
Expand Down
16 changes: 8 additions & 8 deletions .github/workflows/mypy_primer_pr.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -41,11 +41,11 @@ jobs:
shard-index: [0, 1]
fail-fast: false
steps:
- uses: actions/checkout@v3
- uses: actions/checkout@v4
with:
path: pyright_to_test
fetch-depth: 0
- uses: actions/setup-python@v4
- uses: actions/setup-python@v5
with:
python-version: '3.10'
- name: Install dependencies
Expand Down Expand Up @@ -78,17 +78,17 @@ jobs:
| tee diff_${{ matrix.shard-index }}.txt
) || [ $? -eq 1 ]
- name: Upload mypy_primer diff
uses: actions/upload-artifact@v3
uses: actions/upload-artifact@v4
with:
name: mypy_primer_diffs
name: mypy_primer_diffs_${{ matrix.shard-index }}
path: diff_${{ matrix.shard-index }}.txt
- if: ${{ matrix.shard-index }} == 0
- if: ${{ matrix.shard-index == 0 }}
name: Save PR number
run: |
echo ${{ github.event.pull_request.number }} | tee pr_number.txt
- if: ${{ matrix.shard-index }} == 0
- if: ${{ matrix.shard-index == 0 }}
name: Upload PR number
uses: actions/upload-artifact@v3
uses: actions/upload-artifact@v4
with:
name: mypy_primer_diffs
name: mypy_primer_diffs_pr_number
path: pr_number.txt
6 changes: 3 additions & 3 deletions .github/workflows/mypy_primer_push.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -32,11 +32,11 @@ jobs:
strategy:
fail-fast: false
steps:
- uses: actions/checkout@v3
- uses: actions/checkout@v4
with:
path: pyright_to_test
fetch-depth: 0
- uses: actions/setup-python@v4
- uses: actions/setup-python@v5
with:
python-version: '3.10'
- name: Install dependencies
Expand All @@ -62,7 +62,7 @@ jobs:
| tee diff.txt
) || [ $? -eq 1 ]
- name: Upload mypy_primer diff
uses: actions/upload-artifact@v3
uses: actions/upload-artifact@v4
with:
name: mypy_primer_diffs
path: diff.txt
12 changes: 6 additions & 6 deletions .github/workflows/validation.yml
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@ jobs:
static_checks:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- uses: actions/checkout@v4

- run: ./pw pdm use ${{ env.PYTHON_VERSION }}

Expand Down Expand Up @@ -51,9 +51,9 @@ jobs:
name: Test ${{ matrix.os }}
runs-on: ${{ matrix.os }}
steps:
- uses: actions/checkout@v3
- uses: actions/checkout@v4

- uses: actions/setup-node@v3
- uses: actions/setup-node@v4
with:
node-version: ${{ env.NODE_VERSION }}

Expand Down Expand Up @@ -81,7 +81,7 @@ jobs:

# Install python so we can create a VENV for tests
- name: Use Python ${{env.PYTHON_VERSION}}
uses: actions/setup-python@v4
uses: actions/setup-python@v5
id: install_python
with:
python-version: ${{env.PYTHON_VERSION}}
Expand Down Expand Up @@ -127,9 +127,9 @@ jobs:
needs: static_checks

steps:
- uses: actions/checkout@v3
- uses: actions/checkout@v4

- uses: actions/setup-node@v3
- uses: actions/setup-node@v4
with:
node-version: ${{ env.NODE_VERSION }}

Expand Down
6 changes: 4 additions & 2 deletions docs/configuration.md
Original file line number Diff line number Diff line change
Expand Up @@ -8,14 +8,16 @@ Relative paths specified within the config file are relative to the config file

## Main Configuration Options

- **include** [array of paths, optional]: Paths of directories or files that should be included. If no paths are specified, pyright defaults to the directory that contains the config file. Paths may contain wildcard characters ** (a directory or multiple levels of directories), * (a sequence of zero or more characters), or ? (a single character). If no include paths are specified, the root path for the workspace is assumed.
- **include** [array of paths, optional]: Paths of directories or files that should be considered part of the project. If no paths are specified, pyright defaults to the directory that contains the config file. Paths may contain wildcard characters ** (a directory or multiple levels of directories), * (a sequence of zero or more characters), or ? (a single character). If no include paths are specified, the root path for the workspace is assumed.

- **exclude** [array of paths, optional]: Paths of directories or files that should not be included. These override the includes directories and files, allowing specific subdirectories to be ignored. Note that files in the exclude paths may still be included in the analysis if they are referenced (imported) by source files that are not excluded. Paths may contain wildcard characters ** (a directory or multiple levels of directories), * (a sequence of zero or more characters), or ? (a single character). If no exclude paths are specified, Pyright automatically excludes the following: `**/node_modules`, `**/__pycache__`, `**/.*`. Pylance also excludes any virtual environment directories regardless of the exclude paths specified. For more detail on Python environment specification and discovery, refer to the [import resolution](import-resolution.md#configuring-your-python-environment) documentation.
- **exclude** [array of paths, optional]: Paths of directories or files that should not be considered part of the project. These override the includes directories and files, allowing specific subdirectories to be excluded. Note that files in the exclude paths may still be included in the analysis if they are referenced (imported) by source files that are not excluded. Paths may contain wildcard characters ** (a directory or multiple levels of directories), * (a sequence of zero or more characters), or ? (a single character). If no exclude paths are specified, Pyright automatically excludes the following: `**/node_modules`, `**/__pycache__`, `**/.*`. Pylance also excludes any virtual environment directories regardless of the exclude paths specified. For more detail on Python environment specification and discovery, refer to the [import resolution](import-resolution.md#configuring-your-python-environment) documentation.

- **ignore** [array of paths, optional]: Paths of directories or files whose diagnostic output (errors and warnings) should be suppressed even if they are an included file or within the transitive closure of an included file. Paths may contain wildcard characters ** (a directory or multiple levels of directories), * (a sequence of zero or more characters), or ? (a single character).

- **strict** [array of paths, optional]: Paths of directories or files that should use “strict” analysis if they are included. This is the same as manually adding a “# pyright: strict” comment. In strict mode, most type-checking rules are enabled. Refer to [this table](configuration.md#diagnostic-settings-defaults) for details about which rules are enabled in strict mode. Paths may contain wildcard characters ** (a directory or multiple levels of directories), * (a sequence of zero or more characters), or ? (a single character).

- **extends** [path, optional]: Path to another `.json` or `.toml` file that is used as a “base configuration”, allowing this configuration to inherit configuration settings. Top-level keys within this configuration overwrite top-level keys in the base configuration. Multiple levels of inheritance are supported. Relative paths specified in a configuration file are resolved relative to the location of that configuration file.

- **defineConstant** [map of constants to values (boolean or string), optional]: Set of identifiers that should be assumed to contain a constant value wherever used within this program. For example, `{ "DEBUG": true }` indicates that pyright should assume that the identifier `DEBUG` will always be equal to `True`. If this identifier is used within a conditional expression (such as `if not DEBUG:`) pyright will use the indicated value to determine whether the guarded block is reachable or not. Member expressions that reference one of these constants (e.g. `my_module.DEBUG`) are also supported.

- **typeshedPath** [path, optional]: Path to a directory that contains typeshed type stub files. Pyright ships with a bundled copy of typeshed type stubs. If you want to use a different version of typeshed stubs, you can clone the [typeshed github repo](https://github.com/python/typeshed) to a local directory and reference the location with this path. This option is useful if you’re actively contributing updates to typeshed.
Expand Down
11 changes: 9 additions & 2 deletions docs/type-inference.md
Original file line number Diff line number Diff line change
Expand Up @@ -261,11 +261,11 @@ var1 = [4]
When inferring the type of a tuple expression (in the absence of bidirectional inference hints), Pyright assumes that the tuple has a fixed length, and each tuple element is typed as specifically as possible.

```python
# The inferred type is tuple[Literal[1], Literal["a"], Literal[True]]
# The inferred type is tuple[Literal[1], Literal["a"], Literal[True]].
var1 = (1, "a", True)

def func1(a: int):
# The inferred type is tuple[int, int]
# The inferred type is tuple[int, int].
var2 = (a, a)

# If you want the type to be tuple[int, ...]
Expand All @@ -274,6 +274,13 @@ def func1(a: int):
var3: tuple[int, ...] = (a, a)
```

Because tuples are typed as specifically as possible, literal types are normally retained. However, as an exception to this inference rule, if the tuple expression is nested within another tuple, set, list or dictionary expression, literal types are not retained. This is done to avoid the inference of complex types (e.g. unions with many subtypes) when evaluating tuple statements with many entries.

```python
# The inferred type is list[tuple[int, str, bool]].
var4 = [(1, "a", True), (2, "b", False), (3, "c", False)]
```

#### List Expressions

When inferring the type of a list expression (in the absence of bidirectional inference hints), Pyright uses the following heuristics:
Expand Down
2 changes: 1 addition & 1 deletion lerna.json
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@
"packages": [
"packages/*"
],
"version": "1.1.364",
"version": "1.1.365",
"command": {
"version": {
"push": false,
Expand Down
4 changes: 2 additions & 2 deletions packages/pyright-internal/package-lock.json

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

2 changes: 1 addition & 1 deletion packages/pyright-internal/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@
"name": "pyright-internal",
"displayName": "pyright",
"description": "Type checker for the Python language",
"version": "1.1.364",
"version": "1.1.365",
"license": "MIT",
"private": true,
"files": [
Expand Down
13 changes: 9 additions & 4 deletions packages/pyright-internal/src/analyzer/analysis.ts
Original file line number Diff line number Diff line change
Expand Up @@ -24,13 +24,18 @@ export interface AnalysisResults {
diagnostics: FileDiagnostics[];
filesInProgram: number;
checkingOnlyOpenFiles: boolean;
filesRequiringAnalysis: number;
requiringAnalysisCount: RequiringAnalysisCount;
fatalErrorOccurred: boolean;
configParseErrorOccurred: boolean;
elapsedTime: number;
error?: Error | undefined;
}

export interface RequiringAnalysisCount {
files: number;
cells: number;
}

export type AnalysisCompleteCallback = (results: AnalysisResults) => void;

export function analyzeProgram(
Expand All @@ -51,7 +56,7 @@ export function analyzeProgram(
const duration = new Duration();
moreToAnalyze = program.analyze(maxTime, token);

const filesLeftToAnalyze = program.getFilesToAnalyzeCount();
const requiringAnalysisCount = program.getFilesToAnalyzeCount();

// If we're using command-line mode, the maxTime will be undefined, and we'll
// want to report all diagnostics rather than just the ones that have changed.
Expand All @@ -66,7 +71,7 @@ export function analyzeProgram(
callback({
diagnostics,
filesInProgram: program.getFileCount(),
filesRequiringAnalysis: filesLeftToAnalyze,
requiringAnalysisCount: requiringAnalysisCount,
checkingOnlyOpenFiles: program.isCheckingOnlyOpenFiles(),
fatalErrorOccurred: false,
configParseErrorOccurred: false,
Expand All @@ -84,7 +89,7 @@ export function analyzeProgram(
callback({
diagnostics: [],
filesInProgram: 0,
filesRequiringAnalysis: 0,
requiringAnalysisCount: { files: 0, cells: 0 },
checkingOnlyOpenFiles: true,
fatalErrorOccurred: true,
configParseErrorOccurred: false,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -262,7 +262,7 @@ export class BackgroundAnalysisProgram {
this._onAnalysisCompletion({
diagnostics: fileDiags,
filesInProgram: this._program.getFileCount(),
filesRequiringAnalysis: this._program.getFilesToAnalyzeCount(),
requiringAnalysisCount: this._program.getFilesToAnalyzeCount(),
checkingOnlyOpenFiles: this._program.isCheckingOnlyOpenFiles(),
fatalErrorOccurred: false,
configParseErrorOccurred: false,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@ import { DiagnosticRule } from '../common/diagnosticRules';
import { LocMessage } from '../localization/localize';
import { ArgumentCategory, ExpressionNode, ParameterCategory } from '../parser/parseNodes';
import { createFunctionFromConstructor } from './constructors';
import { getParameterListDetails, ParameterSource } from './parameterUtils';
import { getParameterListDetails, ParameterKind } from './parameterUtils';
import { Symbol, SymbolFlags } from './symbol';
import { FunctionArgument, FunctionResult, TypeEvaluator } from './typeEvaluatorTypes';
import {
Expand Down Expand Up @@ -246,7 +246,7 @@ function applyPartialTransformToFunction(
// Does this positional argument map to a positional parameter?
if (
argIndex >= paramListDetails.params.length ||
paramListDetails.params[argIndex].source === ParameterSource.KeywordOnly
paramListDetails.params[argIndex].kind === ParameterKind.Keyword
) {
if (paramListDetails.argsIndex !== undefined) {
const paramType = FunctionType.getEffectiveParameterType(
Expand Down Expand Up @@ -329,8 +329,7 @@ function applyPartialTransformToFunction(
}
} else {
const matchingParam = paramListDetails.params.find(
(paramInfo) =>
paramInfo.param.name === arg.name?.value && paramInfo.source !== ParameterSource.PositionOnly
(paramInfo) => paramInfo.param.name === arg.name?.value && paramInfo.kind !== ParameterKind.Positional
);

if (!matchingParam) {
Expand Down
5 changes: 3 additions & 2 deletions packages/pyright-internal/src/analyzer/enums.ts
Original file line number Diff line number Diff line change
Expand Up @@ -587,9 +587,10 @@ export function getTypeOfEnumMember(

if (memberName === 'value' || memberName === '_value_') {
// Does the class explicitly override this member? Or it it using the
// standard behavior provided by the "Enum" class?
// standard behavior provided by the "Enum" class and other built-in
// subclasses like "StrEnum" and "IntEnum"?
const memberInfo = lookUpClassMember(classType, memberName);
if (memberInfo && isClass(memberInfo.classType) && !ClassType.isBuiltIn(memberInfo.classType, 'Enum')) {
if (memberInfo && isClass(memberInfo.classType) && !ClassType.isBuiltIn(memberInfo.classType)) {
return undefined;
}

Expand Down
Loading

0 comments on commit 652e949

Please sign in to comment.