from _typeshed import Incomplete
from argparse import Namespace
from ast import AST
from collections.abc import Generator
from logging import Logger
from tokenize import TokenInfo
from typing import Any, Final
from typing_extensions import TypeAlias

from .plugins.finder import LoadedPlugin

LOG: Logger
NEWLINE: Final[frozenset[int]]
SKIP_TOKENS: Final[frozenset[int]]

_LogicalMapping: TypeAlias = list[tuple[int, tuple[int, int]]]
_Logical: TypeAlias = tuple[list[str], list[str], _LogicalMapping]

class FileProcessor:
    noqa: bool
    options: Incomplete
    filename: Incomplete
    lines: Incomplete
    blank_before: int
    blank_lines: int
    checker_state: Incomplete
    hang_closing: Incomplete
    indent_char: Incomplete
    indent_level: int
    indent_size: Incomplete
    line_number: int
    logical_line: str
    max_line_length: Incomplete
    max_doc_length: Incomplete
    multiline: bool
    previous_indent_level: int
    previous_logical: str
    previous_unindented_logical_line: str
    tokens: Incomplete
    total_lines: Incomplete
    verbose: Incomplete
    statistics: Incomplete
    def __init__(self, filename: str, options: Namespace, lines: list[str] | None = None) -> None: ...
    @property
    def file_tokens(self) -> list[TokenInfo]: ...
    def tstring_start(self, lineno: int) -> None: ...
    def fstring_start(self, lineno: int) -> None: ...
    def multiline_string(self, token: TokenInfo) -> Generator[str, None, None]: ...
    def reset_blank_before(self) -> None: ...
    def delete_first_token(self) -> None: ...
    def visited_new_blank_line(self) -> None: ...
    def update_state(self, mapping: _LogicalMapping) -> None: ...
    def update_checker_state_for(self, plugin: LoadedPlugin) -> None: ...
    def next_logical_line(self) -> None: ...
    def build_logical_line_tokens(self) -> _Logical: ...
    def build_ast(self) -> AST: ...
    def build_logical_line(self) -> tuple[str, str, _LogicalMapping]: ...
    def keyword_arguments_for(self, parameters: dict[str, bool], arguments: dict[str, Any]) -> dict[str, Any]: ...
    def generate_tokens(self) -> Generator[TokenInfo, None, None]: ...
    def noqa_line_for(self, line_number: int) -> str | None: ...
    def next_line(self) -> str: ...
    def read_lines(self) -> list[str]: ...
    def read_lines_from_filename(self) -> list[str]: ...
    def read_lines_from_stdin(self) -> list[str]: ...
    def should_ignore_file(self) -> bool: ...
    def strip_utf_bom(self) -> None: ...

def is_eol_token(token: TokenInfo) -> bool: ...
def is_multiline_string(token: TokenInfo) -> bool: ...
def token_is_newline(token: TokenInfo) -> bool: ...
def count_parentheses(current_parentheses_count: int, token_text: str) -> int: ...
def expand_indent(line: str) -> int: ...
def mutate_string(text: str) -> str: ...
