diff --git a/bot/.pylintrc b/bot/.pylintrc deleted file mode 100644 index 0f8a95ad4..000000000 --- a/bot/.pylintrc +++ /dev/null @@ -1,661 +0,0 @@ -[MASTER] - -# A comma-separated list of package or module names from where C extensions may -# be loaded. Extensions are loading into the active Python interpreter and may -# run arbitrary code. -extension-pkg-whitelist=markdown_html_finder - -# Add files or directories to the blacklist. They should be base names, not -# paths. -ignore=CVS - -# Add files or directories matching the regex patterns to the blacklist. The -# regex matches against base names, not paths. -ignore-patterns= - -# Python code to execute, usually for sys.path manipulation such as -# pygtk.require(). -#init-hook= - -# Use multiple processes to speed up Pylint. Specifying 0 will auto-detect the -# number of processors available to use. -jobs=1 - -# Control the amount of potential inferred values when inferring a single -# object. This can help the performance when dealing with large functions or -# complex, nested conditions. -limit-inference-results=100 - -# List of plugins (as comma separated values of python modules names) to load, -# usually to register additional checkers. -load-plugins= - -# Pickle collected data for later comparisons. -persistent=yes - -# Specify a configuration file. -#rcfile= - -# When enabled, pylint would attempt to guess common misconfiguration and emit -# user-friendly hints instead of false-positive error messages. -suggestion-mode=yes - -# Allow loading of arbitrary C extensions. Extensions are imported into the -# active Python interpreter and may run arbitrary code. -unsafe-load-any-extension=no - - -[MESSAGES CONTROL] - -# Only show warnings with the listed confidence levels. Leave empty to show -# all. Valid levels: HIGH, INFERENCE, INFERENCE_FAILURE, UNDEFINED. -confidence= - -# Disable the message, report, category or checker with the given id(s). You -# can either give multiple identifiers separated by comma (,) or put this -# option multiple times (only on the command line, not in the configuration -# file where it should appear only once). You can also use "--disable=all" to -# disable everything first and then reenable specific checks. For example, if -# you want to run only the similarities checker, you can use "--disable=all -# --enable=similarities". If you want to run only the classes checker, but have -# no Warning level messages displayed, use "--disable=all --enable=classes -# --disable=W". -disable=print-statement, - parameter-unpacking, - unpacking-in-except, - old-raise-syntax, - backtick, - long-suffix, - old-ne-operator, - old-octal-literal, - import-star-module-level, - non-ascii-bytes-literal, - raw-checker-failed, - bad-inline-option, - locally-disabled, - file-ignored, - suppressed-message, - useless-suppression, - deprecated-pragma, - use-symbolic-message-instead, - apply-builtin, - basestring-builtin, - buffer-builtin, - cmp-builtin, - coerce-builtin, - execfile-builtin, - file-builtin, - long-builtin, - raw_input-builtin, - reduce-builtin, - standarderror-builtin, - unicode-builtin, - xrange-builtin, - coerce-method, - delslice-method, - getslice-method, - setslice-method, - no-absolute-import, - old-division, - dict-iter-method, - dict-view-method, - next-method-called, - metaclass-assignment, - indexing-exception, - raising-string, - reload-builtin, - oct-method, - hex-method, - nonzero-method, - cmp-method, - input-builtin, - round-builtin, - intern-builtin, - unichr-builtin, - map-builtin-not-iterating, - zip-builtin-not-iterating, - range-builtin-not-iterating, - filter-builtin-not-iterating, - using-cmp-argument, - eq-without-hash, - div-method, - idiv-method, - rdiv-method, - exception-message-attribute, - invalid-str-codec, - sys-max-int, - bad-python3-import, - deprecated-string-function, - deprecated-str-translate-call, - deprecated-itertools-function, - deprecated-types-field, - next-method-defined, - dict-items-not-iterating, - dict-keys-not-iterating, - dict-values-not-iterating, - deprecated-operator-function, - deprecated-urllib-function, - xreadlines-attribute, - deprecated-sys-function, - exception-escape, - comprehension-escape, - - # recipeyak - too-few-public-methods, - invalid-name, - no-member, - fixme, - line-too-long, - missing-docstring, - bad-continuation, - abstract-method, - unused-argument, - redefined-outer-name, - wrong-import-order, - no-self-use, - too-many-arguments, - too-many-return-statements, - too-many-ancestors, - too-many-branches, - too-many-lines, - arguments-differ, - duplicate-code, - invalid-envvar-default, - # see: https://docs.djangoproject.com/en/2.1/ref/contrib/postgres/operations/#createextension - super-init-not-called, - ungrouped-imports, - logging-too-many-args, - # doesn't handle python 3.7 scoping for type annotations - used-before-assignment, - # doesn't handle python 3.7 scoping for type annotations - undefined-variable, - # style is handled by formatter - trailing-newlines, - trailing-whitespace, - bad-whitespace, - bad-indentation, - # kodiak - too-many-locals, - no-method-argument, - too-many-statements, - too-many-instance-attributes, - # inaccurate - c-extension-no-member, - blacklisted-name, - redefined-builtin, - len-as-condition, - useless-return, - protected-access, - no-self-argument, - no-name-in-module,, - # handled by flake8 - unused-import, - unused-variable, - - # kodiak - # doesn't seem to work 100%. - cyclic-import, - # black handles this - multiple-statements, - # doesn't understand .pyi files - function-redefined, - # can't find imports in .pyi files - import-error, - # mypy already does this - signature-differs, - # doesn't understand annotations with builtin dict - unsubscriptable-object, - # doesn't handle .pyi files - useless-import-alias, - # conflicts with type stubs for structlog - keyword-arg-before-vararg, - # not smart enough about refinement - inconsistent-return-statements, - # we place imports in functions to prevent circular refs. - import-outside-toplevel, - # it's okay to use dict() - use-dict-literal, - # the default encoding works fine - unspecified-encoding, - # we don't need to specify `from` - raise-missing-from, - # this has false positives - method-hidden, - # flake8 handles this now - broad-except, - # we can format strings as we like - consider-using-f-string, - # mypy covers this and doesn't give false positives, - unexpected-keyword-arg, - - -# Enable the message, report, category or checker with the given id(s). You can -# either give multiple identifier separated by comma (,) or put this option -# multiple time (only on the command line, not in the configuration file where -# it should appear only once). See also the "--disable" option for examples. -enable=logging-not-lazy, - logging-format-interpolation, - logging-fstring-interpolation, - - -[REPORTS] - -# Python expression which should return a note less than 10 (10 is the highest -# note). You have access to the variables errors warning, statement which -# respectively contain the number of errors / warnings messages and the total -# number of statements analyzed. This is used by the global evaluation report -# (RP0004). -evaluation=10.0 - ((float(5 * error + warning + refactor + convention) / statement) * 10) - -# Template used to display messages. This is a python new-style format string -# used to format the message information. See doc for all details. -#msg-template= - -# Set the output format. Available formats are text, parseable, colorized, json -# and msvs (visual studio). You can also give a reporter class, e.g. -# mypackage.mymodule.MyReporterClass. -output-format=text - -# Tells whether to display a full report or only the messages. -reports=no - -# Activate the evaluation score. -score=no - - -[REFACTORING] - -# Maximum number of nested blocks for function / method body -max-nested-blocks=5 - -# Complete name of functions that never returns. When checking for -# inconsistent-return-statements if a never returning function is called then -# it will be considered as an explicit return statement and no message will be -# printed. -never-returning-functions=sys.exit - - -[BASIC] - -# Naming style matching correct argument names. -argument-naming-style=snake_case - -# Regular expression matching correct argument names. Overrides argument- -# naming-style. -#argument-rgx= - -# Naming style matching correct attribute names. -attr-naming-style=snake_case - -# Regular expression matching correct attribute names. Overrides attr-naming- -# style. -#attr-rgx= - -# Bad variable names which should always be refused, separated by a comma. -bad-names=foo, - bar, - baz, - toto, - tutu, - tata - -# Naming style matching correct class attribute names. -class-attribute-naming-style=any - -# Regular expression matching correct class attribute names. Overrides class- -# attribute-naming-style. -#class-attribute-rgx= - -# Naming style matching correct class names. -class-naming-style=PascalCase - -# Regular expression matching correct class names. Overrides class-naming- -# style. -#class-rgx= - -# Naming style matching correct constant names. -const-naming-style=UPPER_CASE - -# Regular expression matching correct constant names. Overrides const-naming- -# style. -#const-rgx= - -# Minimum line length for functions/classes that require docstrings, shorter -# ones are exempt. -docstring-min-length=-1 - -# Naming style matching correct function names. -function-naming-style=snake_case - -# Regular expression matching correct function names. Overrides function- -# naming-style. -#function-rgx= - -# Good variable names which should always be accepted, separated by a comma. -good-names=i, - j, - k, - ex, - Run, - _ - -# Include a hint for the correct naming format with invalid-name. -include-naming-hint=no - -# Naming style matching correct inline iteration names. -inlinevar-naming-style=any - -# Regular expression matching correct inline iteration names. Overrides -# inlinevar-naming-style. -#inlinevar-rgx= - -# Naming style matching correct method names. -method-naming-style=snake_case - -# Regular expression matching correct method names. Overrides method-naming- -# style. -#method-rgx= - -# Naming style matching correct module names. -module-naming-style=snake_case - -# Regular expression matching correct module names. Overrides module-naming- -# style. -#module-rgx= - -# Colon-delimited sets of names that determine each other's naming style when -# the name regexes allow several styles. -name-group= - -# Regular expression which should only match function or class names that do -# not require a docstring. -no-docstring-rgx=^_ - -# List of decorators that produce properties, such as abc.abstractproperty. Add -# to this list to register other decorators that produce valid properties. -# These decorators are taken in consideration only for invalid-name. -property-classes=abc.abstractproperty - -# Naming style matching correct variable names. -variable-naming-style=snake_case - -# Regular expression matching correct variable names. Overrides variable- -# naming-style. -#variable-rgx= - - -[FORMAT] - -# Expected format of line ending, e.g. empty (any line ending), LF or CRLF. -expected-line-ending-format= - -# Regexp for a line that is allowed to be longer than the limit. -ignore-long-lines=^\s*(# )??$ - -# Number of spaces of indent required inside a hanging or continued line. -indent-after-paren=4 - -# String used as indentation unit. This is usually " " (4 spaces) or "\t" (1 -# tab). -indent-string=' ' - -# Maximum number of characters on a single line. -max-line-length=100 - -# Maximum number of lines in a module. -max-module-lines=1000 - -# List of optional constructs for which whitespace checking is disabled. `dict- -# separator` is used to allow tabulation in dicts, etc.: {1 : 1,\n222: 2}. -# `trailing-comma` allows a space between comma and closing bracket: (a, ). -# `empty-line` allows space-only lines. -no-space-check=trailing-comma, - dict-separator - -# Allow the body of a class to be on the same line as the declaration if body -# contains single statement. -single-line-class-stmt=no - -# Allow the body of an if to be on the same line as the test if there is no -# else. -single-line-if-stmt=no - - -[LOGGING] - -# Format style used to check logging format string. `old` means using % -# formatting, while `new` is for `{}` formatting. -logging-format-style=old - -# Logging modules to check that the string format arguments are in logging -# function parameter format. -logging-modules=logging - - -[MISCELLANEOUS] - -# List of note tags to take in consideration, separated by a comma. -notes=FIXME, - XXX, - TODO - - -[SIMILARITIES] - -# Ignore comments when computing similarities. -ignore-comments=yes - -# Ignore docstrings when computing similarities. -ignore-docstrings=yes - -# Ignore imports when computing similarities. -ignore-imports=no - -# Minimum lines number of a similarity. -min-similarity-lines=4 - - -[SPELLING] - -# Limits count of emitted suggestions for spelling mistakes. -max-spelling-suggestions=4 - -# Spelling dictionary name. Available dictionaries: none. To make it working -# install python-enchant package.. -spelling-dict= - -# List of comma separated words that should not be checked. -spelling-ignore-words= - -# A path to a file that contains private dictionary; one word per line. -spelling-private-dict-file= - -# Tells whether to store unknown words to indicated private dictionary in -# --spelling-private-dict-file option instead of raising a message. -spelling-store-unknown-words=no - - -[STRING] - -# This flag controls whether the implicit-str-concat-in-sequence should -# generate a warning on implicit string concatenation in sequences defined over -# several lines. -check-str-concat-over-line-jumps=no - - -[TYPECHECK] - -# List of decorators that produce context managers, such as -# contextlib.contextmanager. Add to this list to register other decorators that -# produce valid context managers. -contextmanager-decorators=contextlib.contextmanager - -# List of members which are set dynamically and missed by pylint inference -# system, and so shouldn't trigger E1101 when accessed. Python regular -# expressions are accepted. -generated-members= - -# Tells whether missing members accessed in mixin class should be ignored. A -# mixin class is detected if its name ends with "mixin" (case insensitive). -ignore-mixin-members=yes - -# Tells whether to warn about missing members when the owner of the attribute -# is inferred to be None. -ignore-none=yes - -# This flag controls whether pylint should warn about no-member and similar -# checks whenever an opaque object is returned when inferring. The inference -# can return multiple potential results while evaluating a Python object, but -# some branches might not be evaluated, which results in partial inference. In -# that case, it might be useful to still emit no-member and other checks for -# the rest of the inferred objects. -ignore-on-opaque-inference=yes - -# List of class names for which member attributes should not be checked (useful -# for classes with dynamically set attributes). This supports the use of -# qualified names. -ignored-classes=optparse.Values,thread._local,_thread._local - -# List of module names for which member attributes should not be checked -# (useful for modules/projects where namespaces are manipulated during runtime -# and thus existing member attributes cannot be deduced by static analysis. It -# supports qualified module names, as well as Unix pattern matching. -ignored-modules= - -# Show a hint with possible names when a member name was not found. The aspect -# of finding the hint is based on edit distance. -missing-member-hint=yes - -# The minimum edit distance a name should have in order to be considered a -# similar match for a missing member name. -missing-member-hint-distance=1 - -# The total number of similar names that should be taken in consideration when -# showing a hint for a missing member. -missing-member-max-choices=1 - - -[VARIABLES] - -# List of additional names supposed to be defined in builtins. Remember that -# you should avoid defining new builtins when possible. -additional-builtins= - -# Tells whether unused global variables should be treated as a violation. -allow-global-unused-variables=yes - -# List of strings which can identify a callback function by name. A callback -# name must start or end with one of those strings. -callbacks=cb_, - _cb - -# A regular expression matching the name of dummy variables (i.e. expected to -# not be used). -dummy-variables-rgx=_+$|(_[a-zA-Z0-9_]*[a-zA-Z0-9]+?$)|dummy|^ignored_|^unused_ - -# Argument names that match this expression will be ignored. Default to name -# with leading underscore. -ignored-argument-names=_.*|^ignored_|^unused_ - -# Tells whether we should check for unused import in __init__ files. -init-import=no - -# List of qualified module names which can have objects that can redefine -# builtins. -redefining-builtins-modules=six.moves,past.builtins,future.builtins,builtins,io - - -[CLASSES] - -# List of method names used to declare (i.e. assign) instance attributes. -defining-attr-methods=__init__, - __new__, - setUp - -# List of member names, which should be excluded from the protected access -# warning. -exclude-protected=_asdict, - _fields, - _replace, - _source, - _make - -# List of valid names for the first argument in a class method. -valid-classmethod-first-arg=cls - -# List of valid names for the first argument in a metaclass class method. -valid-metaclass-classmethod-first-arg=cls - - -[DESIGN] - -# Maximum number of arguments for function / method. -max-args=5 - -# Maximum number of attributes for a class (see R0902). -max-attributes=7 - -# Maximum number of boolean expressions in an if statement. -max-bool-expr=5 - -# Maximum number of branch for function / method body. -max-branches=12 - -# Maximum number of locals for function / method body. -max-locals=15 - -# Maximum number of parents for a class (see R0901). -max-parents=7 - -# Maximum number of public methods for a class (see R0904). -max-public-methods=20 - -# Maximum number of return / yield for function / method body. -max-returns=6 - -# Maximum number of statements in function / method body. -max-statements=50 - -# Minimum number of public methods for a class (see R0903). -min-public-methods=2 - - -[IMPORTS] - -# Allow wildcard imports from modules that define __all__. -allow-wildcard-with-all=no - -# Analyse import fallback blocks. This can be used to support both Python 2 and -# 3 compatible code, which means that the block might have code that exists -# only in one or another interpreter, leading to false positives when analysed. -analyse-fallback-blocks=no - -# Deprecated modules which should not be used, separated by a comma. -deprecated-modules=optparse,tkinter.tix - -# Create a graph of external dependencies in the given file (report RP0402 must -# not be disabled). -ext-import-graph= - -# Create a graph of every (i.e. internal and external) dependencies in the -# given file (report RP0402 must not be disabled). -import-graph= - -# Create a graph of internal dependencies in the given file (report RP0402 must -# not be disabled). -int-import-graph= - -# Force import order to recognize a module as part of the standard -# compatibility libraries. -known-standard-library= - -# Force import order to recognize a module as part of a third party library. -known-third-party=enchant - - -[EXCEPTIONS] - -# Exceptions that will emit a warning when being caught. Defaults to -# "BaseException, Exception". -overgeneral-exceptions=BaseException, - Exception diff --git a/bot/.vscode/settings.json b/bot/.vscode/settings.json index fb8446766..fb5923fd5 100644 --- a/bot/.vscode/settings.json +++ b/bot/.vscode/settings.json @@ -9,6 +9,9 @@ "python.testing.pytestArgs": ["kodiak"], "python.testing.unittestEnabled": false, "python.testing.pytestEnabled": true, - "python.linting.flake8Enabled": true, - "python.linting.pylintEnabled": true + "python.linting.flake8Enabled": false, + "evenBetterToml.schema.enabled": false, + "[toml]": { + "editor.formatOnSave": false + } } diff --git a/bot/README.md b/bot/README.md index d31e584ec..cf328679e 100644 --- a/bot/README.md +++ b/bot/README.md @@ -13,7 +13,7 @@ The follow shows how to run commands for testing and development. For informatio poetry config virtualenvs.in-project true poetry install -# format and lint using black, isort, mypy, flake8, pylint +# format and lint s/lint # run tests using pytest @@ -36,6 +36,7 @@ s/dev-workers ``` If you have made any changes concerning the config, run the following command to update the schema: + ```shell poetry run kodiak gen-conf-json-schema > kodiak/test/fixtures/config/config-schema.json ``` diff --git a/bot/kodiak/cli.py b/bot/kodiak/cli.py index d8f49135c..a72657ec9 100644 --- a/bot/kodiak/cli.py +++ b/bot/kodiak/cli.py @@ -34,10 +34,10 @@ def list_installs() -> None: private_key=conf.PRIVATE_KEY, app_identifier=conf.GITHUB_APP_ID ) results: List[Dict[str, Any]] = [] - headers = dict( - Accept="application/vnd.github.machine-man-preview+json", - Authorization=f"Bearer {app_token}", - ) + headers = { + "Accept": "application/vnd.github.machine-man-preview+json", + "Authorization": f"Bearer {app_token}", + } url = conf.v3_url("/app/installations") while True: res = requests.get(url, headers=headers) diff --git a/bot/kodiak/config.py b/bot/kodiak/config.py index d803567c8..22d6b62e6 100644 --- a/bot/kodiak/config.py +++ b/bot/kodiak/config.py @@ -140,7 +140,7 @@ class Approve(BaseModel): auto_approve_labels: List[str] = [] -class InvalidVersion(ValueError): +class InvalidVersion(ValueError): # noqa: N818 pass diff --git a/bot/kodiak/entrypoints/ingest.py b/bot/kodiak/entrypoints/ingest.py index 3e675d0d0..b02990df9 100644 --- a/bot/kodiak/entrypoints/ingest.py +++ b/bot/kodiak/entrypoints/ingest.py @@ -35,7 +35,7 @@ async def get_redis() -> asyncio_redis.Pool: - global _redis # pylint: disable=global-statement + global _redis if _redis is None: _redis = await asyncio_redis.Pool.create( host=conf.REDIS_URL.hostname or "localhost", diff --git a/bot/kodiak/entrypoints/worker.py b/bot/kodiak/entrypoints/worker.py index 70708faa7..a0a1ffdf2 100644 --- a/bot/kodiak/entrypoints/worker.py +++ b/bot/kodiak/entrypoints/worker.py @@ -75,7 +75,7 @@ async def main() -> NoReturn: queue = RedisWebhookQueue() await queue.create() - ingest_workers = dict() + ingest_workers = {} redis = await redis_client.create_connection() ingest_queue_names = await redis.smembers(INGEST_QUEUE_NAMES) diff --git a/bot/kodiak/errors.py b/bot/kodiak/errors.py index 0c0d4c7ed..55ab47be8 100644 --- a/bot/kodiak/errors.py +++ b/bot/kodiak/errors.py @@ -1,12 +1,12 @@ -class RetryForSkippableChecks(Exception): +class RetryForSkippableChecks(Exception): # noqa: N818 pass -class PollForever(Exception): +class PollForever(Exception): # noqa: N818 pass -class ApiCallException(Exception): +class ApiCallException(Exception): # noqa: N818 def __init__(self, method: str, http_status_code: int, response: bytes) -> None: self.method = method self.status_code = http_status_code diff --git a/bot/kodiak/evaluation.py b/bot/kodiak/evaluation.py index f42d2a663..a7944bbb5 100644 --- a/bot/kodiak/evaluation.py +++ b/bot/kodiak/evaluation.py @@ -100,6 +100,7 @@ def get_body_content( if body_type is BodyText.html: return pull_request.bodyHTML assert_never(body_type) + return None EMPTY_STRING = "" diff --git a/bot/kodiak/events/__init__.py b/bot/kodiak/events/__init__.py index adb49ab1b..ab372a949 100644 --- a/bot/kodiak/events/__init__.py +++ b/bot/kodiak/events/__init__.py @@ -1,8 +1,8 @@ from kodiak.events.check_run import CheckRunEvent # noqa: F401 from kodiak.events.pull_request import PullRequestEvent # noqa: F401 from kodiak.events.pull_request_review import PullRequestReviewEvent # noqa: F401 -from kodiak.events.pull_request_review_thread import ( # noqa: F401 - PullRequestReviewThreadEvent, +from kodiak.events.pull_request_review_thread import ( + PullRequestReviewThreadEvent, # noqa: F401 ) from kodiak.events.push import PushEvent # noqa: F401 from kodiak.events.status import StatusEvent # noqa: F401 diff --git a/bot/kodiak/http.py b/bot/kodiak/http.py index cdfa514b5..b0076438a 100644 --- a/bot/kodiak/http.py +++ b/bot/kodiak/http.py @@ -2,15 +2,15 @@ import ssl -from httpx import ( # noqa: I251 +from httpx import ( # noqa: TID251 AsyncClient, HTTPError, HTTPStatusError, Request, Response, ) -from httpx._config import DEFAULT_TIMEOUT_CONFIG # noqa: I251 -from httpx._types import TimeoutTypes # noqa: I251 +from httpx._config import DEFAULT_TIMEOUT_CONFIG # noqa: TID251 +from httpx._types import TimeoutTypes # noqa: TID251 __all__ = ["Response", "Request", "HTTPError", "HttpClient", "HTTPStatusError"] diff --git a/bot/kodiak/pull_request.py b/bot/kodiak/pull_request.py index 120adb9d9..84bada28e 100644 --- a/bot/kodiak/pull_request.py +++ b/bot/kodiak/pull_request.py @@ -267,7 +267,7 @@ async def update_branch(self) -> None: except HTTPError: self.log.warning("failed to update branch", res=res, exc_info=True) # we raise an exception to retry this request. - raise ApiCallException( + raise ApiCallException( # noqa: B904 method="pull_request/update_branch", http_status_code=res.status_code, response=res.content, @@ -329,9 +329,9 @@ async def merge( "failed to merge pull request", res=res, exc_info=True ) if e.response is not None and e.response.status_code == 500: - raise GitHubApiInternalServerError + raise GitHubApiInternalServerError # noqa: B904 # we raise an exception to retry this request. - raise ApiCallException( + raise ApiCallException( # noqa: B904 method="pull_request/merge", http_status_code=res.status_code, response=res.content, @@ -351,7 +351,7 @@ async def update_ref(self, ref: str, sha: str) -> None: else: self.log.warning("failed to update ref", res=res, exc_info=True) # we raise an exception to retry this request. - raise ApiCallException( + raise ApiCallException( # noqa: B904 method="pull_request/update_ref", http_status_code=res.status_code, response=res.content, @@ -376,7 +376,7 @@ async def add_label(self, label: str) -> None: self.log.warning( "failed to add label", label=label, res=res, exc_info=True ) - raise ApiCallException( + raise ApiCallException( # noqa: B904 method="pull_request/add_label", http_status_code=res.status_code, response=res.content, @@ -398,7 +398,7 @@ async def remove_label(self, label: str) -> None: "failed to delete label", label=label, res=res, exc_info=True ) # we raise an exception to retry this request. - raise ApiCallException( + raise ApiCallException( # noqa: B904 method="pull_request/delete_label", http_status_code=res.status_code, response=res.content, diff --git a/bot/kodiak/queries/__init__.py b/bot/kodiak/queries/__init__.py index 8d30572da..1f3b6e129 100644 --- a/bot/kodiak/queries/__init__.py +++ b/bot/kodiak/queries/__init__.py @@ -18,9 +18,8 @@ from kodiak import http from kodiak.config import V1, MergeMethod from kodiak.http import HttpClient -from kodiak.queries.commits import Commit, CommitConnection, GitActor +from kodiak.queries.commits import Commit, CommitConnection, GitActor, get_commits from kodiak.queries.commits import User as PullRequestCommitUser -from kodiak.queries.commits import get_commits from kodiak.throttle import get_thottler_for_installation logger = structlog.get_logger() @@ -306,12 +305,12 @@ def get_event_info_query( } } -""" % dict( - requiresConversationResolution="requiresConversationResolution" +""" % { + "requiresConversationResolution": "requiresConversationResolution" if requires_conversation_resolution else "", - bodyHTMLQuery="bodyHTML" if fetch_body_html else "bodyHTML: body", - ) + "bodyHTMLQuery": "bodyHTML" if fetch_body_html else "bodyHTML: body", + } def get_org_config_default_branch(data: dict[Any, Any]) -> str | None: @@ -569,7 +568,7 @@ def expired(self) -> bool: return self.expires_at - timedelta(minutes=5) < datetime.now(timezone.utc) -installation_cache: MutableMapping[str, Optional[TokenResponse]] = dict() +installation_cache: MutableMapping[str, Optional[TokenResponse]] = {} # TODO(sbdchd): pass logging via TLS or async equivalent @@ -873,7 +872,7 @@ async def send_query( self.session.headers["Authorization"] = f"Bearer {token}" async with self.throttler: res = await self.session.post( - conf.GITHUB_V4_API_URL, json=(dict(query=query, variables=variables)) + conf.GITHUB_V4_API_URL, json=({"query": query, "variables": variables}) ) rate_limit_remaining = res.headers.get("x-ratelimit-remaining") rate_limit_max = res.headers.get("x-ratelimit-limit") @@ -897,7 +896,7 @@ async def get_api_features(self) -> ApiFeatures | None: first client to make an API request, we use their credentials to view schema metadata and cache the results. """ - global _api_features_cache # pylint: disable=global-statement + global _api_features_cache if _api_features_cache is not None: return _api_features_cache res = await self.send_query( @@ -910,7 +909,7 @@ async def get_api_features(self) -> ApiFeatures | None: } } """, - variables=dict(), + variables={}, installation_id=self.installation_id, ) if res is None: @@ -973,14 +972,14 @@ async def get_config_for_ref( ) res = await self.send_query( query=GET_CONFIG_QUERY, - variables=dict( - owner=self.owner, - repo=self.repo, - rootConfigFileExpression=repo_root_config_expression, - githubConfigFileExpression=repo_github_config_expression, - orgRootConfigFileExpression=org_root_config_expression, - orgGithubConfigFileExpression=org_github_config_file_expression, - ), + variables={ + "owner": self.owner, + "repo": self.repo, + "rootConfigFileExpression": repo_root_config_expression, + "githubConfigFileExpression": repo_github_config_expression, + "orgRootConfigFileExpression": org_root_config_expression, + "orgGithubConfigFileExpression": org_github_config_file_expression, + }, installation_id=self.installation_id, ) if res is None: @@ -1032,7 +1031,7 @@ async def get_event_info(self, pr_number: int) -> Optional[EventInfoResponse]: else True, fetch_body_html=True, ), - variables=dict(owner=self.owner, repo=self.repo, PRNumber=pr_number), + variables={"owner": self.owner, "repo": self.repo, "PRNumber": pr_number}, installation_id=self.installation_id, ) if res is None: @@ -1059,7 +1058,11 @@ async def get_event_info(self, pr_number: int) -> Optional[EventInfoResponse]: else True, fetch_body_html=False, ), - variables=dict(owner=self.owner, repo=self.repo, PRNumber=pr_number), + variables={ + "owner": self.owner, + "repo": self.repo, + "PRNumber": pr_number, + }, installation_id=self.installation_id, ) if res is None: @@ -1147,7 +1150,7 @@ async def get_open_pull_requests( headers = await get_headers( session=self.session, installation_id=self.installation_id ) - params = dict(state="open", sort="updated", per_page="100") + params = {"state": "open", "sort": "updated", "per_page": "100"} if base is not None: params["base"] = base if head is not None: @@ -1214,7 +1217,7 @@ async def approve_pull_request(self, *, pull_number: int) -> http.Response: headers = await get_headers( session=self.session, installation_id=self.installation_id ) - body = dict(event="APPROVE") + body = {"event": "APPROVE"} async with self.throttler: return await self.session.post( conf.v3_url( @@ -1239,7 +1242,7 @@ async def merge_pull_request( commit_title: Optional[str], commit_message: Optional[str], ) -> http.Response: - body = dict(merge_method=merge_method) + body = {"merge_method": merge_method} # we must not pass the keys for commit_title or commit_message when they # are null because GitHub will error saying the title/message cannot be # null. When the keys are not passed, GitHub creates a title and @@ -1264,7 +1267,7 @@ async def update_ref(self, *, ref: str, sha: str) -> http.Response: ) url = conf.v3_url(f"/repos/{self.owner}/{self.repo}/git/refs/heads/{ref}") async with self.throttler: - return await self.session.patch(url, headers=headers, json=dict(sha=sha)) + return await self.session.patch(url, headers=headers, json={"sha": sha}) async def create_notification( self, head_sha: str, message: str, summary: Optional[str] = None @@ -1273,14 +1276,14 @@ async def create_notification( session=self.session, installation_id=self.installation_id ) url = conf.v3_url(f"/repos/{self.owner}/{self.repo}/check-runs") - body = dict( - name=CHECK_RUN_NAME, - head_sha=head_sha, - status="completed", - completed_at=datetime.now(timezone.utc).isoformat(), - conclusion="neutral", - output=dict(title=message, summary=summary or ""), - ) + body = { + "name": CHECK_RUN_NAME, + "head_sha": head_sha, + "status": "completed", + "completed_at": datetime.now(timezone.utc).isoformat(), + "conclusion": "neutral", + "output": {"title": message, "summary": summary or ""}, + } async with self.throttler: return await self.session.post(url, headers=headers, json=body) @@ -1293,7 +1296,7 @@ async def add_label(self, label: str, pull_number: int) -> http.Response: conf.v3_url( f"/repos/{self.owner}/{self.repo}/issues/{pull_number}/labels" ), - json=dict(labels=[label]), + json={"labels": [label]}, headers=headers, ) @@ -1319,7 +1322,7 @@ async def create_comment(self, body: str, pull_number: int) -> http.Response: conf.v3_url( f"/repos/{self.owner}/{self.repo}/issues/{pull_number}/comments" ), - json=dict(body=body), + json={"body": body}, headers=headers, ) @@ -1372,9 +1375,11 @@ def generate_jwt(*, private_key: str, app_identifier: str) -> str: This is different from authenticating as an installation """ - issued_at = int(datetime.now().timestamp()) - expiration = int((datetime.now() + timedelta(minutes=9, seconds=30)).timestamp()) - payload = dict(iat=issued_at, exp=expiration, iss=app_identifier) + issued_at = int(datetime.now(timezone.utc).timestamp()) + expiration = int( + (datetime.now(timezone.utc) + timedelta(minutes=9, seconds=30)).timestamp() + ) + payload = {"iat": issued_at, "exp": expiration, "iss": app_identifier} return jwt.encode(payload=payload, key=private_key, algorithm="RS256").decode() @@ -1398,10 +1403,10 @@ async def get_token_for_install( async with throttler: res = await session.post( conf.v3_url(f"/app/installations/{installation_id}/access_tokens"), - headers=dict( - Accept="application/vnd.github.machine-man-preview+json", - Authorization=f"Bearer {app_token}", - ), + headers={ + "Accept": "application/vnd.github.machine-man-preview+json", + "Authorization": f"Bearer {app_token}", + }, ) if res.status_code > 300: raise Exception(f"Failed to get token, github response: {res.text}") @@ -1416,10 +1421,10 @@ async def get_headers( token = await get_token_for_install( session=session, installation_id=installation_id ) - return dict( - Authorization=f"token {token}", - Accept="application/vnd.github.machine-man-preview+json,application/vnd.github.antiope-preview+json,application/vnd.github.lydian-preview+json", - ) + return { + "Authorization": f"token {token}", + "Accept": "application/vnd.github.machine-man-preview+json,application/vnd.github.antiope-preview+json,application/vnd.github.lydian-preview+json", + } __all__ = ["Commit", "GitActor", "CommitConnection", "PullRequestCommitUser"] diff --git a/bot/kodiak/queue.py b/bot/kodiak/queue.py index 80fc7717a..319f2413f 100644 --- a/bot/kodiak/queue.py +++ b/bot/kodiak/queue.py @@ -216,7 +216,7 @@ async def push(queue: WebhookQueueProtocol, push_event: PushEvent) -> None: prs = await api_client.get_open_pull_requests(base=branch_name) if prs is None: log.info("api call to find pull requests failed") - return None + return for pr in prs: await queue.enqueue( event=WebhookEvent( @@ -233,7 +233,7 @@ async def push(queue: WebhookQueueProtocol, push_event: PushEvent) -> None: async def get_redis() -> asyncio_redis.Pool: - global _redis # pylint: disable=global-statement + global _redis if _redis is None: _redis = await asyncio_redis.Pool.create( host=conf.REDIS_URL.hostname or "localhost", @@ -264,7 +264,7 @@ async def handle_webhook_event( redis = await get_redis() await redis.rpush( b"kodiak:webhook_event", - [compress_payload(dict(event_name=event_name, payload=payload))], + [compress_payload({"event_name": event_name, "payload": payload})], ) await redis.ltrim(b"kodiak:webhook_event", 0, conf.USAGE_REPORTING_QUEUE_LENGTH) log = log.bind(usage_reported=True) diff --git a/bot/kodiak/refresh_pull_requests.py b/bot/kodiak/refresh_pull_requests.py index 69653aaa3..5763e18ce 100644 --- a/bot/kodiak/refresh_pull_requests.py +++ b/bot/kodiak/refresh_pull_requests.py @@ -112,10 +112,10 @@ async def get_login_for_install(*, http: HttpClient, installation_id: str) -> st ) res = await http.get( conf.v3_url(f"/app/installations/{installation_id}"), - headers=dict( - Accept="application/vnd.github.machine-man-preview+json", - Authorization=f"Bearer {app_token}", - ), + headers={ + "Accept": "application/vnd.github.machine-man-preview+json", + "Authorization": f"Bearer {app_token}", + }, ) res.raise_for_status() return cast(str, res.json()["account"]["login"]) @@ -131,8 +131,8 @@ async def refresh_pull_requests_for_installation( ) res = await http.post( conf.GITHUB_V4_API_URL, - json=dict(query=QUERY, variables=dict(login=login)), - headers=dict(Authorization=f"Bearer {token}"), + json={"query": QUERY, "variables": {"login": login}}, + headers={"Authorization": f"Bearer {token}"}, ) res.raise_for_status() diff --git a/bot/kodiak/test_evaluation.py b/bot/kodiak/test_evaluation.py index 7a74f1501..5594e97a7 100644 --- a/bot/kodiak/test_evaluation.py +++ b/bot/kodiak/test_evaluation.py @@ -1,5 +1,5 @@ import logging -from datetime import datetime +from datetime import datetime, timezone from typing import Any, Dict, List, Mapping, Optional, Sequence, Tuple, Type, Union import pydantic @@ -67,47 +67,47 @@ def __repr__(self) -> str: class MockDequeue(BaseMockFunc): async def __call__(self) -> None: - self.log_call(dict()) + self.log_call({}) class MockSetStatus(BaseMockFunc): async def __call__( self, msg: str, *, markdown_content: Optional[str] = None ) -> None: - self.log_call(dict(msg=msg, markdown_content=markdown_content)) + self.log_call({"msg": msg, "markdown_content": markdown_content}) class MockPullRequestsForRef(BaseMockFunc): return_value: Optional[int] = 0 async def __call__(self, ref: str) -> Optional[int]: - self.log_call(dict(ref=ref)) + self.log_call({"ref": ref}) return self.return_value class MockDeleteBranch(BaseMockFunc): async def __call__(self, branch_name: str) -> None: - self.log_call(dict(branch_name=branch_name)) + self.log_call({"branch_name": branch_name}) class MockRemoveLabel(BaseMockFunc): async def __call__(self, label: str) -> None: - self.log_call(dict(label=label)) + self.log_call({"label": label}) class MockAddLabel(BaseMockFunc): async def __call__(self, label: str) -> None: - self.log_call(dict(label=label)) + self.log_call({"label": label}) class MockCreateComment(BaseMockFunc): async def __call__(self, body: str) -> None: - self.log_call(dict(body=body)) + self.log_call({"body": body}) class MockTriggerTestCommit(BaseMockFunc): async def __call__(self) -> None: - self.log_call(dict()) + self.log_call({}) class MockMerge(BaseMockFunc): @@ -120,11 +120,11 @@ async def __call__( commit_message: Optional[str], ) -> None: self.log_call( - dict( - merge_method=merge_method, - commit_title=commit_title, - commit_message=commit_message, - ) + { + "merge_method": merge_method, + "commit_title": commit_title, + "commit_message": commit_message, + } ) if self.raises is not None: raise self.raises @@ -132,7 +132,7 @@ async def __call__( class MockUpdateRef(BaseMockFunc): async def __call__(self, *, ref: str, sha: str) -> None: - self.log_call(dict(ref=ref, sha=sha)) + self.log_call({"ref": ref, "sha": sha}) class MockQueueForMerge(BaseMockFunc): @@ -141,23 +141,23 @@ class MockQueueForMerge(BaseMockFunc): return_value: Optional[int] = 3 async def __call__(self, *, first: bool) -> Optional[int]: - self.log_call(dict(first=first)) + self.log_call({"first": first}) return self.return_value class MockUpdateBranch(BaseMockFunc): async def __call__(self) -> None: - self.log_call(dict()) + self.log_call({}) class MockApprovePullRequest(BaseMockFunc): async def __call__(self) -> None: - self.log_call(dict()) + self.log_call({}) class MockRequeue(BaseMockFunc): async def __call__(self) -> None: - self.log_call(dict()) + self.log_call({}) class MockPrApi: @@ -268,7 +268,7 @@ def create_branch_protection() -> BranchProtectionRule: def create_review() -> PRReview: return PRReview( state=PRReviewState.APPROVED, - createdAt=datetime(2015, 5, 25), + createdAt=datetime(2015, 5, 25, tzinfo=timezone.utc), author=PRReviewAuthor(login="ghost"), ) @@ -343,7 +343,6 @@ async def __call__( def create_mergeable() -> MergeableType: - # pylint: disable=dangerous-default-value async def mergeable( *, api: PRAPI = create_api(), @@ -352,12 +351,12 @@ async def mergeable( config_path: str = create_config_path(), pull_request: PullRequest = create_pull_request(), branch_protection: Optional[BranchProtectionRule] = create_branch_protection(), - review_requests: List[PRReviewRequest] = [], - bot_reviews: List[PRReview] = [create_review()], - contexts: List[StatusContext] = [create_context()], - check_runs: List[CheckRun] = [create_check_run()], - commits: List[Commit] = [], - valid_merge_methods: List[MergeMethod] = [ + review_requests: List[PRReviewRequest] = [], # noqa: B006 + bot_reviews: List[PRReview] = [create_review()], # noqa: B006 + contexts: List[StatusContext] = [create_context()], # noqa: B006 + check_runs: List[CheckRun] = [create_check_run()], # noqa: B006 + commits: List[Commit] = [], # noqa: B006 + valid_merge_methods: List[MergeMethod] = [ # noqa: B006 MergeMethod.merge, MergeMethod.squash, MergeMethod.rebase, @@ -398,7 +397,6 @@ async def mergeable( app_id=app_id, ) - # pylint: enable=dangerous-default-value return mergeable @@ -1740,7 +1738,7 @@ async def test_mergeable_api_call_retry_timeout() -> None: api_call_error = APICallError( api_name="pull_request/merge", http_status="405", - response_body=str( + response_body=str( # noqa: UP018 b'{"message":"This branch can\'t be rebased","documentation_url":"https://developer.github.com/v3/pulls/#merge-a-pull-request-merge-button"}' ), ) diff --git a/bot/kodiak/test_logging.py b/bot/kodiak/test_logging.py index 929be9f09..067c98bd4 100644 --- a/bot/kodiak/test_logging.py +++ b/bot/kodiak/test_logging.py @@ -158,7 +158,7 @@ def test_get_logging_level(level: str, expected: int) -> None: def test_add_request_info_processor() -> None: url = "https://api.example.com/v1/me" - payload = dict(user_id=54321) + payload = {"user_id": 54321} req = Request("POST", url, json=payload) res = Response() res.status_code = 500 @@ -169,7 +169,7 @@ def test_add_request_info_processor() -> None: )._content = b"Your request could not be completed due to an internal error." res.request = cast(PreparedRequest, req.prepare()) # type: ignore event_dict = add_request_info_processor( - None, None, dict(event="request failed", res=res) + None, None, {"event": "request failed", "res": res} ) assert event_dict["response_content"] == cast(Any, res)._content assert event_dict["response_status_code"] == res.status_code diff --git a/bot/kodiak/test_pull_request.py b/bot/kodiak/test_pull_request.py index b46be3b07..f3daa2666 100644 --- a/bot/kodiak/test_pull_request.py +++ b/bot/kodiak/test_pull_request.py @@ -124,12 +124,12 @@ async def __call__( self, number: int, merge_method: str, commit_title: str, commit_message: str ) -> requests.Response: self.log_call( - dict( - number=number, - merge_method=merge_method, - commit_title=commit_title, - commit_message=commit_message, - ) + { + "number": number, + "merge_method": merge_method, + "commit_title": commit_title, + "commit_message": commit_message, + } ) return self.response @@ -138,7 +138,7 @@ class MockDeleteLabel(BaseMockFunc): response: requests.Response async def __call__(self, label: str, pull_number: int) -> requests.Response: - self.log_call(dict(label=label, pull_number=pull_number)) + self.log_call({"label": label, "pull_number": pull_number}) return self.response @@ -146,7 +146,7 @@ class MockAddLabel(BaseMockFunc): response: requests.Response async def __call__(self, label: str, pull_number: int) -> requests.Response: - self.log_call(dict(label=label, pull_number=pull_number)) + self.log_call({"label": label, "pull_number": pull_number}) return self.response @@ -154,7 +154,7 @@ class MockUpdateBranch(BaseMockFunc): response: requests.Response async def __call__(self, pull_number: int) -> requests.Response: - self.log_call(dict(pull_number=pull_number)) + self.log_call({"pull_number": pull_number}) return self.response @@ -162,7 +162,7 @@ class MockUpdateRef(BaseMockFunc): response: requests.Response async def __call__(self, *, ref: str, sha: str) -> requests.Response: - self.log_call(dict(ref=ref, sha=sha)) + self.log_call({"ref": ref, "sha": sha}) return self.response @@ -396,9 +396,10 @@ async def test_update_ref_ok() -> None: pr_v2 = create_prv2(client=client) await pr_v2.update_ref(ref="master", sha="aa218f56b14c9653891f9e74264a383fa43fefbd") assert client.update_ref.call_count == 1 - assert client.update_ref.calls[0] == dict( - ref="master", sha="aa218f56b14c9653891f9e74264a383fa43fefbd" - ) + assert client.update_ref.calls[0] == { + "ref": "master", + "sha": "aa218f56b14c9653891f9e74264a383fa43fefbd", + } async def test_update_ref_service_unavailable() -> None: @@ -415,9 +416,10 @@ async def test_update_ref_service_unavailable() -> None: ref="master", sha="aa218f56b14c9653891f9e74264a383fa43fefbd" ) assert client.update_ref.call_count == 1 - assert client.update_ref.calls[0] == dict( - ref="master", sha="aa218f56b14c9653891f9e74264a383fa43fefbd" - ) + assert client.update_ref.calls[0] == { + "ref": "master", + "sha": "aa218f56b14c9653891f9e74264a383fa43fefbd", + } assert e.value.method == "pull_request/update_ref" assert e.value.status_code == 503 assert b"Service Unavailable" in e.value.response diff --git a/bot/kodiak/test_queries.py b/bot/kodiak/test_queries.py index 0e780eae7..8818f63ef 100644 --- a/bot/kodiak/test_queries.py +++ b/bot/kodiak/test_queries.py @@ -71,7 +71,7 @@ async def test_get_config_for_ref_error( mocker.patch.object( api_client, "send_query", - return_value=wrap_future(dict(data=None, errors=[{"test": 123}])), + return_value=wrap_future({"data": None, "errors": [{"test": 123}]}), ) res = await api_client.get_config_for_ref(ref="main", org_repo_default_branch=None) @@ -88,16 +88,16 @@ async def test_get_config_for_ref_dot_github( api_client, "send_query", return_value=wrap_future( - dict( - data=dict( - repository=dict( - rootConfigFile=None, - githubConfigFile=dict( - text="# .github/.kodiak.toml\nversion = 1\nmerge.method = 'rebase'" - ), - ) - ) - ) + { + "data": { + "repository": { + "rootConfigFile": None, + "githubConfigFile": { + "text": "# .github/.kodiak.toml\nversion = 1\nmerge.method = 'rebase'" + }, + } + } + } ), ) @@ -395,10 +395,10 @@ async def test_get_event_info_no_latest_sha( ] -MOCK_HEADERS = dict( - Authorization="token some-json-web-token", - Accept="application/vnd.github.machine-man-preview+json,application/vnd.github.antiope-preview+json", -) +MOCK_HEADERS = { + "Authorization": "token some-json-web-token", + "Accept": "application/vnd.github.machine-man-preview+json,application/vnd.github.antiope-preview+json", +} @pytest.fixture @@ -443,7 +443,7 @@ async def asdict() -> Any: class FakeRedis: @staticmethod - async def hgetall(key: bytes) -> Any: + async def hgetall(key: bytes) -> Any: # noqa: ARG004 return FakeDictReply return FakeRedis diff --git a/bot/poetry.lock b/bot/poetry.lock index d1572d734..05103922e 100644 --- a/bot/poetry.lock +++ b/bot/poetry.lock @@ -38,20 +38,6 @@ typing-extensions = {version = "*", markers = "python_version < \"3.8\""} [package.extras] tests = ["pytest", "pytest-asyncio", "mypy (>=0.800)"] -[[package]] -name = "astroid" -version = "2.8.0" -description = "An abstract syntax tree for Python with inference support." -category = "dev" -optional = false -python-versions = "~=3.6" - -[package.dependencies] -lazy-object-proxy = ">=1.4.0" -typed-ast = {version = ">=1.4.0,<1.5", markers = "implementation_name == \"cpython\" and python_version < \"3.8\""} -typing-extensions = {version = ">=3.10", markers = "python_version < \"3.10\""} -wrapt = ">=1.11,<1.13" - [[package]] name = "asyncio-redis" version = "0.1.0" @@ -236,53 +222,6 @@ python-versions = ">=3.7" [package.extras] test = ["pytest (>=6)"] -[[package]] -name = "flake8" -version = "3.8.4" -description = "the modular source code checker: pep8 pyflakes and co" -category = "dev" -optional = false -python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,>=2.7" - -[package.dependencies] -importlib-metadata = {version = "*", markers = "python_version < \"3.8\""} -mccabe = ">=0.6.0,<0.7.0" -pycodestyle = ">=2.6.0a1,<2.7.0" -pyflakes = ">=2.2.0,<2.3.0" - -[[package]] -name = "flake8-pie" -version = "0.7.1" -description = "A flake8 extension that implements misc. lints" -category = "dev" -optional = false -python-versions = ">=3.7" - -[[package]] -name = "flake8-pyi" -version = "20.10.0" -description = "A plugin for flake8 to enable linting .pyi files." -category = "dev" -optional = false -python-versions = ">=3.6" - -[package.dependencies] -attrs = "*" -flake8 = ">=3.2.1" -pyflakes = ">=2.1.1" - -[[package]] -name = "flake8-tidy-imports" -version = "4.8.0" -description = "A flake8 plugin that helps you write tidier imports." -category = "dev" -optional = false -python-versions = ">=3.7" - -[package.dependencies] -flake8 = ">=3.8.0" -importlib-metadata = {version = "*", markers = "python_version < \"3.8\""} - [[package]] name = "greenlet" version = "1.1.1" @@ -421,20 +360,6 @@ parallel = ["ipyparallel"] qtconsole = ["qtconsole"] test = ["nose (>=0.10.1)", "requests", "testpath", "pygments", "nbformat", "ipykernel", "numpy (>=1.17)"] -[[package]] -name = "isort" -version = "4.3.21" -description = "A Python utility / library to sort Python imports." -category = "dev" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" - -[package.extras] -pipfile = ["pipreqs", "requirementslib"] -pyproject = ["toml"] -requirements = ["pipreqs", "pip-api"] -xdg_home = ["appdirs (>=1.4.0)"] - [[package]] name = "jedi" version = "0.18.0" @@ -463,14 +388,6 @@ cffi = ">=1.5.0" milksnake = "*" six = "*" -[[package]] -name = "lazy-object-proxy" -version = "1.6.0" -description = "A fast and thorough lazy object proxy." -category = "dev" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*" - [[package]] name = "markdown-html-finder" version = "0.2.3" @@ -498,14 +415,6 @@ python-versions = ">=3.5" [package.dependencies] traitlets = "*" -[[package]] -name = "mccabe" -version = "0.6.1" -description = "McCabe checker, plugin for flake8" -category = "dev" -optional = false -python-versions = "*" - [[package]] name = "milksnake" version = "0.1.5" @@ -639,14 +548,6 @@ category = "dev" optional = false python-versions = "*" -[[package]] -name = "pycodestyle" -version = "2.6.0" -description = "Python style guide checker" -category = "dev" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" - [[package]] name = "pycparser" version = "2.20" @@ -670,14 +571,6 @@ typing-extensions = ">=3.7.4.3" dotenv = ["python-dotenv (>=0.10.4)"] email = ["email-validator (>=1.0.3)"] -[[package]] -name = "pyflakes" -version = "2.2.0" -description = "passive checker of Python programs" -category = "dev" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" - [[package]] name = "pygments" version = "2.10.0" @@ -699,23 +592,6 @@ crypto = ["cryptography (>=1.4)"] flake8 = ["flake8", "flake8-import-order", "pep8-naming"] test = ["pytest (>=4.0.1,<5.0.0)", "pytest-cov (>=2.6.0,<3.0.0)", "pytest-runner (>=4.2,<5.0.0)"] -[[package]] -name = "pylint" -version = "2.11.1" -description = "python code static checker" -category = "dev" -optional = false -python-versions = "~=3.6" - -[package.dependencies] -astroid = ">=2.8.0,<2.9" -colorama = {version = "*", markers = "sys_platform == \"win32\""} -isort = ">=4.2.5,<6" -mccabe = ">=0.6,<0.7" -platformdirs = ">=2.2.0" -toml = ">=0.7.1" -typing-extensions = {version = ">=3.10.0", markers = "python_version < \"3.10\""} - [[package]] name = "pyparsing" version = "2.4.7" @@ -830,6 +706,14 @@ idna = {version = "*", optional = true, markers = "extra == \"idna2008\""} [package.extras] idna2008 = ["idna"] +[[package]] +name = "ruff" +version = "0.0.209" +description = "An extremely fast Python linter, written in Rust." +category = "dev" +optional = false +python-versions = ">=3.7" + [[package]] name = "sentry-sdk" version = "1.5.12" @@ -1048,14 +932,6 @@ category = "dev" optional = false python-versions = "*" -[[package]] -name = "wrapt" -version = "1.12.1" -description = "Module for decorators, wrappers and monkey patching." -category = "dev" -optional = false -python-versions = "*" - [[package]] name = "zipp" version = "3.5.0" @@ -1079,7 +955,7 @@ python-versions = "*" [metadata] lock-version = "1.1" python-versions = "^3.7" -content-hash = "66e1d52ef5a34f66f3fd7f8fc8559ba8b503caba77489bfcdf5c1b80860e5131" +content-hash = "cb507b5b98507a13d88cce036136e2c958f56ca4f3920b7062a6f2c3a1153fa4" [metadata.files] anyio = [ @@ -1094,10 +970,6 @@ asgiref = [ {file = "asgiref-3.4.1-py3-none-any.whl", hash = "sha256:ffc141aa908e6f175673e7b1b3b7af4fdb0ecb738fc5c8b88f69f055c2415214"}, {file = "asgiref-3.4.1.tar.gz", hash = "sha256:4ef1ab46b484e3c706329cedeff284a5d40824200638503f5768edb6de7d58e9"}, ] -astroid = [ - {file = "astroid-2.8.0-py3-none-any.whl", hash = "sha256:dcc06f6165f415220013801642bd6c9808a02967070919c4b746c6864c205471"}, - {file = "astroid-2.8.0.tar.gz", hash = "sha256:fe81f80c0b35264acb5653302ffbd935d394f1775c5e4487df745bf9c2442708"}, -] asyncio-redis = [] attrs = [ {file = "attrs-21.2.0-py2.py3-none-any.whl", hash = "sha256:149e90d6d8ac20db7a955ad60cf0e6881a3f20d37096140088356da6c716b0b1"}, @@ -1258,19 +1130,6 @@ decorator = [ {file = "decorator-5.1.0.tar.gz", hash = "sha256:e59913af105b9860aa2c8d3272d9de5a56a4e608db9a2f167a8480b323d529a7"}, ] exceptiongroup = [] -flake8 = [ - {file = "flake8-3.8.4-py2.py3-none-any.whl", hash = "sha256:749dbbd6bfd0cf1318af27bf97a14e28e5ff548ef8e5b1566ccfb25a11e7c839"}, - {file = "flake8-3.8.4.tar.gz", hash = "sha256:aadae8761ec651813c24be05c6f7b4680857ef6afaae4651a4eccaef97ce6c3b"}, -] -flake8-pie = [ - {file = "flake8-pie-0.7.1.tar.gz", hash = "sha256:198baa85d85dea2bc42c28baa5978bc771fced814bac9a9688441efe65485711"}, - {file = "flake8_pie-0.7.1-py3-none-any.whl", hash = "sha256:b75f7b0383a7c7b2660a86491d0cefd9d8c0eb47caae36064157f47b7c864a81"}, -] -flake8-pyi = [ - {file = "flake8-pyi-20.10.0.tar.gz", hash = "sha256:cee3b20a5123152c697870e7e800b60e3c95eb89e272a2b63d8cf55cafb0472c"}, - {file = "flake8_pyi-20.10.0-py2.py3-none-any.whl", hash = "sha256:ff5dfc40bffa878f6ce95bcfd9a6ad14c44b85cbe99c4864e729301bf54267f0"}, -] -flake8-tidy-imports = [] greenlet = [ {file = "greenlet-1.1.1-cp27-cp27m-macosx_10_14_x86_64.whl", hash = "sha256:476ba9435afaead4382fbab8f1882f75e3fb2285c35c9285abb3dd30237f9142"}, {file = "greenlet-1.1.1-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:44556302c0ab376e37939fd0058e1f0db2e769580d340fb03b01678d1ff25f68"}, @@ -1358,10 +1217,6 @@ ipython = [ {file = "ipython-7.27.0-py3-none-any.whl", hash = "sha256:75b5e060a3417cf64f138e0bb78e58512742c57dc29db5a5058a2b1f0c10df02"}, {file = "ipython-7.27.0.tar.gz", hash = "sha256:58b55ebfdfa260dad10d509702dc2857cb25ad82609506b070cf2d7b7df5af13"}, ] -isort = [ - {file = "isort-4.3.21-py2.py3-none-any.whl", hash = "sha256:6e811fcb295968434526407adb8796944f1988c5b65e8139058f2014cbe100fd"}, - {file = "isort-4.3.21.tar.gz", hash = "sha256:54da7e92468955c4fceacd0c86bd0ec997b0e1ee80d97f67c35a78b719dccab1"}, -] jedi = [ {file = "jedi-0.18.0-py2.py3-none-any.whl", hash = "sha256:18456d83f65f400ab0c2d3319e48520420ef43b23a086fdc05dff34132f0fb93"}, {file = "jedi-0.18.0.tar.gz", hash = "sha256:92550a404bad8afed881a137ec9a461fed49eca661414be45059329614ed0707"}, @@ -1371,30 +1226,6 @@ kodiak-rure = [ {file = "kodiak_rure-0.2.2-py2.py3-none-macosx_12_0_universal2.whl", hash = "sha256:a8faf72d440d0e9ebf89bcdaf8ce273d562a9c0fa61959513c962a88d589e2cd"}, {file = "kodiak_rure-0.2.2-py2.py3-none-manylinux1_x86_64.whl", hash = "sha256:21d1d55ddb33718a1f3261f6e033445f11cd039c4cb4b90039a096e5ea1ea563"}, ] -lazy-object-proxy = [ - {file = "lazy-object-proxy-1.6.0.tar.gz", hash = "sha256:489000d368377571c6f982fba6497f2aa13c6d1facc40660963da62f5c379726"}, - {file = "lazy_object_proxy-1.6.0-cp27-cp27m-macosx_10_14_x86_64.whl", hash = "sha256:c6938967f8528b3668622a9ed3b31d145fab161a32f5891ea7b84f6b790be05b"}, - {file = "lazy_object_proxy-1.6.0-cp27-cp27m-win32.whl", hash = "sha256:ebfd274dcd5133e0afae738e6d9da4323c3eb021b3e13052d8cbd0e457b1256e"}, - {file = "lazy_object_proxy-1.6.0-cp27-cp27m-win_amd64.whl", hash = "sha256:ed361bb83436f117f9917d282a456f9e5009ea12fd6de8742d1a4752c3017e93"}, - {file = "lazy_object_proxy-1.6.0-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:d900d949b707778696fdf01036f58c9876a0d8bfe116e8d220cfd4b15f14e741"}, - {file = "lazy_object_proxy-1.6.0-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:5743a5ab42ae40caa8421b320ebf3a998f89c85cdc8376d6b2e00bd12bd1b587"}, - {file = "lazy_object_proxy-1.6.0-cp36-cp36m-manylinux2014_aarch64.whl", hash = "sha256:bf34e368e8dd976423396555078def5cfc3039ebc6fc06d1ae2c5a65eebbcde4"}, - {file = "lazy_object_proxy-1.6.0-cp36-cp36m-win32.whl", hash = "sha256:b579f8acbf2bdd9ea200b1d5dea36abd93cabf56cf626ab9c744a432e15c815f"}, - {file = "lazy_object_proxy-1.6.0-cp36-cp36m-win_amd64.whl", hash = "sha256:4f60460e9f1eb632584c9685bccea152f4ac2130e299784dbaf9fae9f49891b3"}, - {file = "lazy_object_proxy-1.6.0-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:d7124f52f3bd259f510651450e18e0fd081ed82f3c08541dffc7b94b883aa981"}, - {file = "lazy_object_proxy-1.6.0-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:22ddd618cefe54305df49e4c069fa65715be4ad0e78e8d252a33debf00f6ede2"}, - {file = "lazy_object_proxy-1.6.0-cp37-cp37m-win32.whl", hash = "sha256:9d397bf41caad3f489e10774667310d73cb9c4258e9aed94b9ec734b34b495fd"}, - {file = "lazy_object_proxy-1.6.0-cp37-cp37m-win_amd64.whl", hash = "sha256:24a5045889cc2729033b3e604d496c2b6f588c754f7a62027ad4437a7ecc4837"}, - {file = "lazy_object_proxy-1.6.0-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:17e0967ba374fc24141738c69736da90e94419338fd4c7c7bef01ee26b339653"}, - {file = "lazy_object_proxy-1.6.0-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:410283732af311b51b837894fa2f24f2c0039aa7f220135192b38fcc42bd43d3"}, - {file = "lazy_object_proxy-1.6.0-cp38-cp38-win32.whl", hash = "sha256:85fb7608121fd5621cc4377a8961d0b32ccf84a7285b4f1d21988b2eae2868e8"}, - {file = "lazy_object_proxy-1.6.0-cp38-cp38-win_amd64.whl", hash = "sha256:d1c2676e3d840852a2de7c7d5d76407c772927addff8d742b9808fe0afccebdf"}, - {file = "lazy_object_proxy-1.6.0-cp39-cp39-macosx_10_14_x86_64.whl", hash = "sha256:b865b01a2e7f96db0c5d12cfea590f98d8c5ba64ad222300d93ce6ff9138bcad"}, - {file = "lazy_object_proxy-1.6.0-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:4732c765372bd78a2d6b2150a6e99d00a78ec963375f236979c0626b97ed8e43"}, - {file = "lazy_object_proxy-1.6.0-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:9698110e36e2df951c7c36b6729e96429c9c32b3331989ef19976592c5f3c77a"}, - {file = "lazy_object_proxy-1.6.0-cp39-cp39-win32.whl", hash = "sha256:1fee665d2638491f4d6e55bd483e15ef21f6c8c2095f235fef72601021e64f61"}, - {file = "lazy_object_proxy-1.6.0-cp39-cp39-win_amd64.whl", hash = "sha256:f5144c75445ae3ca2057faac03fda5a902eff196702b0a24daf1d6ce0650514b"}, -] markdown-html-finder = [ {file = "markdown_html_finder-0.2.3-cp310-cp310-macosx_10_7_x86_64.whl", hash = "sha256:45dc89c5d0c0cc79d00fe7befd3cbbdd23f7e238969f74d344e1226f5159f84c"}, {file = "markdown_html_finder-0.2.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:d84227ebe6a595e2ff473da4c8bc151cf1e113a97e62c0042ea62168c93334a4"}, @@ -1469,10 +1300,6 @@ matplotlib-inline = [ {file = "matplotlib-inline-0.1.3.tar.gz", hash = "sha256:a04bfba22e0d1395479f866853ec1ee28eea1485c1d69a6faf00dc3e24ff34ee"}, {file = "matplotlib_inline-0.1.3-py3-none-any.whl", hash = "sha256:aed605ba3b72462d64d475a21a9296f400a19c4f74a31b59103d2a99ffd5aa5c"}, ] -mccabe = [ - {file = "mccabe-0.6.1-py2.py3-none-any.whl", hash = "sha256:ab8a6258860da4b6677da4bd2fe5dc2c659cff31b3ee4f7f5d64e79735b80d42"}, - {file = "mccabe-0.6.1.tar.gz", hash = "sha256:dd8d182285a0fe56bace7f45b5e7d1a6ebcbf524e8f3bd87eb0f125271b8831f"}, -] milksnake = [ {file = "milksnake-0.1.5-py2.py3-none-any.whl", hash = "sha256:550ca1fc4222724149ee5a933e6bb8347630c0ed023a2a97701ab94fa256f6b4"}, {file = "milksnake-0.1.5.zip", hash = "sha256:dfcd43b78bcf93897a75eea1dadf71c848319f19451cff4f3f3a628a5abe1688"}, @@ -1542,10 +1369,6 @@ ptyprocess = [ {file = "ptyprocess-0.7.0-py2.py3-none-any.whl", hash = "sha256:4b41f3967fce3af57cc7e94b888626c18bf37a083e3651ca8feeb66d492fef35"}, {file = "ptyprocess-0.7.0.tar.gz", hash = "sha256:5c5d0a3b48ceee0b48485e0c26037c0acd7d29765ca3fbb5cb3831d347423220"}, ] -pycodestyle = [ - {file = "pycodestyle-2.6.0-py2.py3-none-any.whl", hash = "sha256:2295e7b2f6b5bd100585ebcb1f616591b652db8a741695b3d8f5d28bdc934367"}, - {file = "pycodestyle-2.6.0.tar.gz", hash = "sha256:c58a7d2815e0e8d7972bf1803331fb0152f867bd89adf8a01dfd55085434192e"}, -] pycparser = [ {file = "pycparser-2.20-py2.py3-none-any.whl", hash = "sha256:7582ad22678f0fcd81102833f60ef8d0e57288b6b5fb00323d101be910e35705"}, {file = "pycparser-2.20.tar.gz", hash = "sha256:2d475327684562c3a96cc71adf7dc8c4f0565175cf86b6d7a404ff4c771f15f0"}, @@ -1587,10 +1410,6 @@ pydantic = [ {file = "pydantic-1.9.1-py3-none-any.whl", hash = "sha256:4988c0f13c42bfa9ddd2fe2f569c9d54646ce84adc5de84228cfe83396f3bd58"}, {file = "pydantic-1.9.1.tar.gz", hash = "sha256:1ed987c3ff29fff7fd8c3ea3a3ea877ad310aae2ef9889a119e22d3f2db0691a"}, ] -pyflakes = [ - {file = "pyflakes-2.2.0-py2.py3-none-any.whl", hash = "sha256:0d94e0e05a19e57a99444b6ddcf9a6eb2e5c68d3ca1e98e90707af8152c90a92"}, - {file = "pyflakes-2.2.0.tar.gz", hash = "sha256:35b2d75ee967ea93b55750aa9edbbf72813e06a66ba54438df2cfac9e3c27fc8"}, -] pygments = [ {file = "Pygments-2.10.0-py3-none-any.whl", hash = "sha256:b8e67fe6af78f492b3c4b3e2970c0624cbf08beb1e493b2c99b9fa1b67a20380"}, {file = "Pygments-2.10.0.tar.gz", hash = "sha256:f398865f7eb6874156579fdf36bc840a03cab64d1cde9e93d68f46a425ec52c6"}, @@ -1599,10 +1418,6 @@ pyjwt = [ {file = "PyJWT-1.7.1-py2.py3-none-any.whl", hash = "sha256:5c6eca3c2940464d106b99ba83b00c6add741c9becaec087fb7ccdefea71350e"}, {file = "PyJWT-1.7.1.tar.gz", hash = "sha256:8d59a976fb773f3e6a39c85636357c4f0e242707394cadadd9814f5cbaa20e96"}, ] -pylint = [ - {file = "pylint-2.11.1-py3-none-any.whl", hash = "sha256:0f358e221c45cbd4dad2a1e4b883e75d28acdcccd29d40c76eb72b307269b126"}, - {file = "pylint-2.11.1.tar.gz", hash = "sha256:2c9843fff1a88ca0ad98a256806c82c5a8f86086e7ccbdb93297d86c3f90c436"}, -] pyparsing = [ {file = "pyparsing-2.4.7-py2.py3-none-any.whl", hash = "sha256:ef9d7589ef3c200abe66653d3f1ab1033c3c419ae9b9bdb1240a85b024efc88b"}, {file = "pyparsing-2.4.7.tar.gz", hash = "sha256:c203ec8783bf771a155b207279b9bccb8dea02d8f0c9e5f8ead507bc3246ecc1"}, @@ -1668,6 +1483,7 @@ rfc3986 = [ {file = "rfc3986-1.5.0-py2.py3-none-any.whl", hash = "sha256:a86d6e1f5b1dc238b218b012df0aa79409667bb209e58da56d0b94704e712a97"}, {file = "rfc3986-1.5.0.tar.gz", hash = "sha256:270aaf10d87d0d4e095063c65bf3ddbc6ee3d0b226328ce21e036f946e421835"}, ] +ruff = [] sentry-sdk = [ {file = "sentry-sdk-1.5.12.tar.gz", hash = "sha256:259535ba66933eacf85ab46524188c84dcb4c39f40348455ce15e2c0aca68863"}, {file = "sentry_sdk-1.5.12-py2.py3-none-any.whl", hash = "sha256:778b53f0a6c83b1ee43d3b7886318ba86d975e686cb2c7906ccc35b334360be1"}, @@ -1793,9 +1609,6 @@ wcwidth = [ {file = "wcwidth-0.2.5-py2.py3-none-any.whl", hash = "sha256:beb4802a9cebb9144e99086eff703a642a13d6a0052920003a230f3294bbe784"}, {file = "wcwidth-0.2.5.tar.gz", hash = "sha256:c4d647b99872929fdb7bdcaa4fbe7f01413ed3d98077df798530e5b04f116c83"}, ] -wrapt = [ - {file = "wrapt-1.12.1.tar.gz", hash = "sha256:b62ffa81fb85f4332a4f609cab4ac40709470da05643a082ec1eb88e6d9b97d7"}, -] zipp = [ {file = "zipp-3.5.0-py3-none-any.whl", hash = "sha256:957cfda87797e389580cb8b9e3870841ca991e2125350677b2ca83a0e99390a3"}, {file = "zipp-3.5.0.tar.gz", hash = "sha256:f5812b1e007e48cff63449a5e9f4e7ebea716b4111f9c4f9a645f91d579bf0c4"}, diff --git a/bot/pyproject.toml b/bot/pyproject.toml index 679e27a9b..ff1c49ee3 100644 --- a/bot/pyproject.toml +++ b/bot/pyproject.toml @@ -37,28 +37,69 @@ mypy = "^0.960" ipdb = "^0.13.9" pytest-mock = "3.3.1" typing_extensions = "^3.7" -pylint = "^2.3" -flake8 = "3.8.4" -flake8-pie = "0.7.1" -isort = "^4.3" pytest-cov = "^2.10" -flake8-pyi = "^20.10" types-requests = "^2.28.0" types-toml = "^0.10.7" -flake8-tidy-imports = "^4.8.0" pytest-asyncio = "0.20.1" pytest = "^7.2.0" +ruff = "^0.0.209" [tool.poetry.plugins."pytest11"] "pytest_plugin" = "pytest_plugin.plugin" -[tool.isort] -multi_line_output = 3 -include_trailing_comma = true -force_grid_wrap = 0 -use_parentheses = true -line_length = 88 -known_third_party = ["redis", "asyncio_redis"] +[tool.ruff] +select = [ + "E", + "F", + "TID251", + "TID252", + "I001", + "T20", + "C4", + "UP", + "N", + "BLE", + "B", + "RET", + "SIM", + "ARG", + "DTZ", + "ERA", + "RUF", +] +src = ["kodiak", "typings"] +ignore = [ + "E501", # line length is handled by black + "ARG001", # pytest fixtures mess with this + "ARG002", # sometimes parent classes require params + "N806", # django migrations violate this a lot + "B008", # type.Argument violates this + "N815", # we mix and match casing in serializers + "ARG005", # monkey patching false positive with *args, and **kwargs + "N805", # false positive with pydantic + "N802", # DRF serializers mess with this + "RET505", # if, elif, else are fine with returns + "RET507", # if, elif, else with continue is also fine + "RET508", # if, elif, else with break is also fine +] +unfixable = [ + "UP018" # see: https://github.com/charliermarsh/ruff/issues/1615 +] +update-check = false +target-version = "py37" + +[tool.ruff.pyupgrade] +keep-runtime-typing = true + +[tool.ruff.isort] +known-third-party = ["redis", "asyncio_redis", "structlog", "rure", "uvicorn"] +known-first-party = ["kodiak"] + +[tool.ruff.flake8-tidy-imports] +# Disallow all relative imports. +ban-relative-imports = "all" +[tool.ruff.flake8-tidy-imports.banned-api] +"httpx".msg = "Use kodiak.http" [tool.pytest.ini_options] asyncio_mode = "auto" diff --git a/bot/s/lint b/bot/s/lint index 49bcd4153..869e91ff4 100755 --- a/bot/s/lint +++ b/bot/s/lint @@ -1,18 +1,14 @@ #!/usr/bin/env bash set -ex -# format code +# format & lint code if [[ $CI ]]; then ./.venv/bin/black --check . - ./.venv/bin/isort --check-only + ./.venv/bin/ruff . else ./.venv/bin/black . - ./.venv/bin/isort -y + ./.venv/bin/ruff . --fix fi # type check code ./.venv/bin/mypy . - -# lint -./.venv/bin/flake8 kodiak -./.venv/bin/pylint --rcfile='.pylintrc' kodiak diff --git a/bot/typings/asyncio_redis/connection.pyi b/bot/typings/asyncio_redis/connection.pyi index de92e7c67..7bce325f7 100644 --- a/bot/typings/asyncio_redis/connection.pyi +++ b/bot/typings/asyncio_redis/connection.pyi @@ -1,6 +1,6 @@ from __future__ import annotations -from typing import Any, Dict, List, Optional, Union +from typing import Any, Union from asyncio_redis.encoders import BaseEncoder from asyncio_redis.protocol import Transaction @@ -14,10 +14,10 @@ from asyncio_redis.replies import ( ) class Subscription: - async def subscribe(self, channels: List[_Key]) -> None: ... - async def unsubscribe(self, channels: List[_Key]) -> None: ... - async def psubscribe(self, channels: List[_Key]) -> None: ... - async def punsubscribe(self, channels: List[_Key]) -> None: ... + async def subscribe(self, channels: list[_Key]) -> None: ... + async def unsubscribe(self, channels: list[_Key]) -> None: ... + async def psubscribe(self, channels: list[_Key]) -> None: ... + async def punsubscribe(self, channels: list[_Key]) -> None: ... async def next_published(self) -> PubSubReply: ... _Key = Union[bytes, str] @@ -29,34 +29,34 @@ class Connection: host: str = ..., port: int = ..., *, - password: Optional[Union[str, bytes]] = ..., + password: str | bytes | None = ..., db: int = ..., - encoder: Optional[BaseEncoder] = ..., + encoder: BaseEncoder | None = ..., auto_reconnect: bool = ..., - loop: Optional[Any] = ..., + loop: Any | None = ..., protocol_class: Any = ..., - ssl: Optional[bool] = ..., + ssl: bool | None = ..., ) -> Connection: ... def close(self) -> None: ... async def hgetall(self, key: _Key) -> DictReply: ... async def hset(self, key: _Key, field: _Key, value: _Key) -> int: ... - async def delete(self, keys: List[_Key]) -> int: ... - async def blpop(self, keys: List[_Key], timeout: int = ...) -> BlockingPopReply: ... + async def delete(self, keys: list[_Key]) -> int: ... + async def blpop(self, keys: list[_Key], timeout: int = ...) -> BlockingPopReply: ... async def bzpopmin( - self, keys: List[_Key], timeout: int = ... + self, keys: list[_Key], timeout: int = ... ) -> BlockingZPopReply: ... - async def get(self, key: _Key) -> Optional[str]: ... - async def rpush(self, key: _Key, values: List[_Key]) -> int: ... + async def get(self, key: _Key) -> str | None: ... + async def rpush(self, key: _Key, values: list[_Key]) -> int: ... async def ltrim( self, key: _Key, start: int = ..., stop: int = ... ) -> StatusReply: ... - async def sadd(self, key: _Key, values: List[_Key]) -> int: ... + async def sadd(self, key: _Key, values: list[_Key]) -> int: ... async def expire(self, key: _Key, seconds: int) -> int: ... - async def zrem(self, key: _Key, members: List[_Key]) -> int: ... + async def zrem(self, key: _Key, members: list[_Key]) -> int: ... async def zadd( self, key: _Key, - values: Dict[str, Any], + values: dict[str, Any], only_if_not_exists: bool = ..., only_if_exists: bool = ..., return_num_changed: bool = ..., @@ -66,11 +66,11 @@ class Connection: self, key: _Key, value: _Key, - expire: Optional[int] = ..., - pexpire: Optional[int] = ..., + expire: int | None = ..., + pexpire: int | None = ..., only_if_not_exists: bool = ..., only_if_exists: bool = ..., - ) -> Optional[StatusReply]: ... + ) -> StatusReply | None: ... async def smembers(self, key: _Key) -> SetReply: ... async def start_subscribe(self) -> Subscription: ... async def multi(self) -> Transaction: ... diff --git a/bot/typings/asyncio_redis/pool.pyi b/bot/typings/asyncio_redis/pool.pyi index b69a7005d..e22b33df6 100644 --- a/bot/typings/asyncio_redis/pool.pyi +++ b/bot/typings/asyncio_redis/pool.pyi @@ -28,7 +28,8 @@ class Pool: loop: Optional[Any] = ..., protocol_class: Any = ..., ssl: Optional[bool] = ..., - ) -> Pool: ... + # false positive, see: https://github.com/charliermarsh/ruff/issues/1613 + ) -> Pool: ... # noqa: F821 # NOTE(sbdchd): asyncio_redis does some hackery with __getattr__, so we copy # the methods from Connection def close(self) -> None: ... diff --git a/bot/typings/structlog/__init__.pyi b/bot/typings/structlog/__init__.pyi index 17f2daea0..56a407607 100644 --- a/bot/typings/structlog/__init__.pyi +++ b/bot/typings/structlog/__init__.pyi @@ -1,6 +1,6 @@ -from structlog import processors as processors # noqa: F401 -from structlog import stdlib as stdlib # noqa: F401 -from structlog._config import configure as configure # noqa: F401 -from structlog._config import get_logger as get_logger # noqa: F401 -from structlog._config import reset_defaults as reset_defaults # noqa: F401 -from structlog.stdlib import BoundLogger as BoundLogger # noqa: F401 +from structlog import processors as processors +from structlog import stdlib as stdlib +from structlog._config import configure as configure +from structlog._config import get_logger as get_logger +from structlog._config import reset_defaults as reset_defaults +from structlog.stdlib import BoundLogger as BoundLogger