diff --git a/.github/workflows/linting.yml b/.github/workflows/linting.yml index 551addf..7a277f7 100644 --- a/.github/workflows/linting.yml +++ b/.github/workflows/linting.yml @@ -1,4 +1,4 @@ -name: pyright +name: Linting on: push: @@ -14,19 +14,19 @@ jobs: steps: - uses: actions/checkout@v4 - - name: Install Poetry - run: pipx install poetry - - uses: actions/setup-python@v4 + - name: Set up Python + uses: actions/setup-python@v5 with: - cache: 'poetry' + python-version: "3.12" - name: Install dependencies run: | - poetry install - echo "$(poetry env info --path)/bin" >> $GITHUB_PATH + python -m pip install --upgrade pip + python -m pip install flit + flit install - name: Run Pylint - run: python -m pylint --rcfile=pyproject.toml + run: python -m pylint --rcfile=pyproject.toml src/ - name: Run pyright run: python -m pyright diff --git a/.github/workflows/publish.yml b/.github/workflows/publish.yml index 15ea778..6a0b6b1 100644 --- a/.github/workflows/publish.yml +++ b/.github/workflows/publish.yml @@ -15,7 +15,7 @@ jobs: steps: - uses: actions/checkout@v4 - name: Set up Python - uses: actions/setup-python@v4 + uses: actions/setup-python@v5 with: python-version: "3.12" - name: Install dependencies diff --git a/.github/workflows/run-tests.yml b/.github/workflows/run-tests.yml index 63d1103..150e087 100644 --- a/.github/workflows/run-tests.yml +++ b/.github/workflows/run-tests.yml @@ -15,7 +15,7 @@ jobs: fail-fast: true matrix: os: [ubuntu-latest, windows-latest] - python: [3.8, 3.9, 3.10, 3.11, 3.12] + python: ["3.10", "3.11", "3.12"] name: ${{ matrix.python }} - ${{ matrix.os }} @@ -23,17 +23,15 @@ jobs: - name: Checkout code uses: actions/checkout@v4 - - name: Install Poetry - run: pipx install poetry - - - name: Setup Python - uses: actions/setup-python@v4 + - name: Set up Python + uses: actions/setup-python@v5 with: python-version: ${{ matrix.python }} - cache: 'poetry' - - name: Install dependencies - run: poetry install + run: | + python -m pip install --upgrade pip + python -m pip install flit + flit install --deps develop - name: Execute tests run: python3 -m pytest diff --git a/.github/workflows/sphinx.yml b/.github/workflows/sphinx.yml index d8498e6..d83d3de 100644 --- a/.github/workflows/sphinx.yml +++ b/.github/workflows/sphinx.yml @@ -16,3 +16,5 @@ jobs: steps: - id: deployment uses: sphinx-notes/pages@v3 + with: + documentation_path: ./docs/source \ No newline at end of file diff --git a/.idea/python-package-template.iml b/.idea/python-package-template.iml index 4ac1f33..5c46c34 100644 --- a/.idea/python-package-template.iml +++ b/.idea/python-package-template.iml @@ -2,13 +2,14 @@ + + - - \ No newline at end of file diff --git a/CHANGELOG.md b/CHANGELOG.md index 46b0203..e6b23dd 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,3 +1,7 @@ # Changelog All notable changes to `sharkey-crawler` will be documented in this file. + +## v0.0.1 - 2024-07-08 + +**Full Changelog**: https://github.com/Hexafuchs/sharkey-crawler/commits/v0.0.1 diff --git a/README.md b/README.md index 01fcbd3..b905518 100644 --- a/README.md +++ b/README.md @@ -56,6 +56,15 @@ Checkout the docstring for more usage information. ## Development +### Installing flit + +```bash +python3 -m venv venv +./venv/bin/python -m pip install --upgrade pip +./venv/bin/python -m pip install flit +./venv/bin/flit install --only-deps --deps develop +``` + ### Installing new dependencies Either add the dependency to the optional dependencies, or create a new dependency within the `[project]` namespace, e.g.: @@ -72,8 +81,23 @@ Then, install dependencies with flit: ```bash ./venv/bin/flit install --only-deps --deps develop +# or: ./venv/bin/flit install --only-deps --deps all ``` +## Future Development + +You might be asking yourself why this project does not expose more endpoints. It could, and it likely will, but +currently, the endpoints are not well documented and it takes a lot of effort to even add a single endpoint, which +Sharkey has a lot of. Since Sharkey is not very old and the future is still unclear, I will not take the effort it +takes to support more endpoints until I have a use case for it or I see great demand. If you want more endpoints, +there are two recommended solutions for this: +* open a discussion, so I and possibly other developers can see which endpoints are requested a lot of have an interesting use case + * also, vote for endpoints you want to see added in the future +* contribute the endpoints yourself + +There might also be solutions to automate parts of the development like creating Pydantic classes. If you are interested +in this, feel free to contribute or open a discussion to organize this. + ## Changelog Please see [CHANGELOG](CHANGELOG.md) for more information on what has changed recently. diff --git a/docs/Makefile b/docs/Makefile index d4bb2cb..fa4c6fc 100644 --- a/docs/Makefile +++ b/docs/Makefile @@ -4,8 +4,8 @@ # You can set these variables from the command line, and also # from the environment for the first two. SPHINXOPTS ?= -SPHINXBUILD ?= sphinx-build -SOURCEDIR = . +SPHINXBUILD ?= ../venv/bin/sphinx-build +SOURCEDIR = source BUILDDIR = _build # Put it first so that "make" without argument is like "make help". diff --git a/docs/index.rst b/docs/index.rst deleted file mode 100644 index b10c570..0000000 --- a/docs/index.rst +++ /dev/null @@ -1,20 +0,0 @@ -.. ai-python docs documentation master file, created by - sphinx-quickstart on Thu May 5 14:06:45 2022. - You can adapt this file completely to your liking, but it should at least - contain the root `toctree` directive. - -Welcome to ai-python docs's documentation! -========================================== - -.. toctree:: - :maxdepth: 2 - :caption: Contents: - modules - - -Indices and tables -================== - -* :ref:`genindex` -* :ref:`modindex` -* :ref:`search` diff --git a/docs/requirements.txt b/docs/requirements.txt index b75b86b..20f981c 100644 --- a/docs/requirements.txt +++ b/docs/requirements.txt @@ -1,3 +1,5 @@ sphinx sphinx-copybutton sphinx-rtd-theme +sphinx-autodoc-typehints +autodocsumm \ No newline at end of file diff --git a/docs/sharkey_crawler.main.rst b/docs/sharkey_crawler.main.rst deleted file mode 100644 index 0890562..0000000 --- a/docs/sharkey_crawler.main.rst +++ /dev/null @@ -1,21 +0,0 @@ -python\_package.hello\_world package -==================================== - -Submodules ----------- - -python\_package.hello\_world.hello\_world module ------------------------------------------------- - -.. automodule:: sharkey_crawler.main.hello_world - :members: - :undoc-members: - :show-inheritance: - -Module contents ---------------- - -.. automodule:: sharkey_crawler.main - :members: - :undoc-members: - :show-inheritance: diff --git a/docs/sharkey_crawler.rst b/docs/sharkey_crawler.rst deleted file mode 100644 index fa47038..0000000 --- a/docs/sharkey_crawler.rst +++ /dev/null @@ -1,29 +0,0 @@ -python\_package package -======================= - -Subpackages ------------ - -.. toctree:: - :maxdepth: 4 - - sharkey_crawler.main - -Submodules ----------- - -python\_package.setup module ----------------------------- - -.. automodule:: sharkey_crawler.setup - :members: - :undoc-members: - :show-inheritance: - -Module contents ---------------- - -.. automodule:: sharkey_crawler - :members: - :undoc-members: - :show-inheritance: diff --git a/docs/conf.py b/docs/source/conf.py similarity index 78% rename from docs/conf.py rename to docs/source/conf.py index 20cb0f2..ea75e62 100644 --- a/docs/conf.py +++ b/docs/source/conf.py @@ -13,17 +13,18 @@ import os import sys -sys.path.insert(0, os.path.abspath("../src/")) +sys.path.insert(0, os.path.abspath("../../src/")) +import sharkey_crawler # -- Project information ----------------------------------------------------- -project = "sharkey_crawler docs" +project = "Sharkey Crawler Documentation" copyright = "2024, Hexafuchs" author = "Hexafuchs" # The full version, including alpha/beta/rc tags -release = "0.0.1" +release = sharkey_crawler.__version__ # -- General configuration --------------------------------------------------- @@ -36,8 +37,11 @@ "sphinx.ext.doctest", "sphinx.ext.intersphinx", "sphinx.ext.ifconfig", - "sphinx.ext.viewcode", # Add links to highlighted source code "sphinx.ext.napoleon", # to render Google format docstrings + "sphinx_autodoc_typehints", # Use type hints + "sphinx.ext.viewcode", # Add links to highlighted source code + "sphinx.ext.autosummary", + "autodocsumm", "sphinx.ext.githubpages", ] @@ -65,3 +69,17 @@ # Napoleon settings napoleon_include_init_with_doc = True napoleon_include_private_with_doc = True + +autodoc_typehints = "both" +autodoc_class_signature = "mixed" +autodoc_default_options = { + "members": True, + "member-order": "bysource", + "special-members": "__init__", + "undoc-members": True, + "show-inheritance": True, + "autosummary": True, +} +autodoc_type_aliases = {"SharkeyId": "SharkeyId"} + +autosummary_generate = True diff --git a/docs/source/index.rst b/docs/source/index.rst new file mode 100644 index 0000000..299e7f1 --- /dev/null +++ b/docs/source/index.rst @@ -0,0 +1,16 @@ +Welcome to Sharkey Crawler's documentation! +=========================================== + +.. toctree:: + :maxdepth: 2 + :caption: Contents: + + modules + + +Indices and tables +================== + +* :ref:`genindex` +* :ref:`modindex` +* :ref:`search` diff --git a/docs/modules.rst b/docs/source/modules.rst similarity index 100% rename from docs/modules.rst rename to docs/source/modules.rst diff --git a/docs/source/sharkey_crawler.rst b/docs/source/sharkey_crawler.rst new file mode 100644 index 0000000..2f7571f --- /dev/null +++ b/docs/source/sharkey_crawler.rst @@ -0,0 +1,15 @@ +sharkey\_crawler package +======================== + +.. toctree:: + :maxdepth: 2 + :caption: Contents: + + sharkey_crawler.types + +Module contents +--------------- + +.. automodule:: sharkey_crawler.main + :show-inheritance: + :imported-members: diff --git a/docs/source/sharkey_crawler.types.rst b/docs/source/sharkey_crawler.types.rst new file mode 100644 index 0000000..b12d516 --- /dev/null +++ b/docs/source/sharkey_crawler.types.rst @@ -0,0 +1,11 @@ +sharkey\_crawler.types package +============================== + +Module contents +--------------- + +.. automodule:: sharkey_crawler.types + :members: + :undoc-members: + :show-inheritance: + :imported-members: diff --git a/pyproject.toml b/pyproject.toml index c5cf94f..4e0e5c0 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -24,32 +24,43 @@ requires-python = ">=3.10.0" dynamic = ["version"] dependencies = [ "requests==2.32.3", - "pydantic==2.8.2" + "pydantic==2.8.2", + "annotated-types==0.7.0" ] [project.optional-dependencies] -spark = [ - "pyspark>=3.0.0" -] test = [ "bandit[toml]==1.7.9", - "black==23.3.0", + "black==24.4.2", "check-manifest==0.49", - "flake8-bugbear==23.5.9", + "flake8-bugbear==24.4.26", "flake8-docstrings", "flake8-formatter_junit_xml", "flake8", "flake8-pyproject", "pre-commit==3.7.1", - "pylint==2.17.4", + "pylint==3.2.5", "pylint_junit", - "pytest-cov==4.0.0", + "pytest-cov==5.0.0", "pytest-mock<3.10.1", "pytest-runner", "pytest==8.2.2", "pytest-github-actions-annotate-failures", - "shellcheck-py==0.9.0.2" + "shellcheck-py==0.10.0.1", + "pyright", + "tox", +] +dev = [ + "bpython", + "pipdeptree" +] +doc = [ + "sphinx", + "sphinx-copybutton", + "sphinx-rtd-theme", + "sphinx-autodoc-typehints", + "autodocsumm" ] [project.urls] @@ -89,7 +100,10 @@ exclude = [ "packages", "pywin32", "tests", - "swagger_client" + "swagger_client", + ".github", + "venv", + "docs", ] ignore = [ "E722", @@ -104,12 +118,12 @@ exclude = [ "**/node_modules", "**/__pycache__", ] -venv = "env310" +venv = "venv" reportMissingImports = true reportMissingTypeStubs = false -pythonVersion = "3.10" +pythonVersion = "3.12" pythonPlatform = "Linux" executionEnvironments = [ @@ -125,9 +139,6 @@ testpaths = "tests" junit_family = "xunit2" markers = [ "integration: marks as integration test", - "notebooks: marks as notebook test", - "gpu: marks as gpu test", - "spark: marks tests which need Spark", "slow: marks tests as slow", "unit: fast offline tests", ] @@ -135,43 +146,32 @@ markers = [ [tool.tox] legacy_tox_ini = """ [tox] -envlist = py, integration, spark, all +envlist = pytest, pylint, pyright, flake8 -[testenv] +[testenv:pytest] +extras = test commands = - pytest -m "not integration and not spark" {posargs} + python -m pytest -[testenv:integration] +[testenv:pylint] +extras = test commands = - pytest -m "integration" {posargs} + python -m pylint --rcfile=pyproject.toml src/ -[testenv:spark] -extras = spark -setenv = - PYSPARK_DRIVER_PYTHON = {envpython} - PYSPARK_PYTHON = {envpython} +[testenv:pyright] +extras = test commands = - pytest -m "spark" {posargs} + python -m pyright -[testenv:all] -extras = all -setenv = - PYSPARK_DRIVER_PYTHON = {envpython} - PYSPARK_PYTHON = {envpython} +[testenv:flake8] +extras = test commands = - pytest {posargs} + python -m flake8 --toml-config=pyproject.toml """ [tool.pylint] extension-pkg-whitelist= [ - "numpy", - "torch", - "cv2", - "pyodbc", "pydantic", - "ciso8601", - "netcdf4", - "scipy" ] ignore="CVS" ignore-patterns="test.*?py,conftest.py" @@ -258,12 +258,12 @@ check-str-concat-over-line-jumps="no" [tool.pylint.'TYPECHECK'] contextmanager-decorators="contextlib.contextmanager" -generated-members="numpy.*,np.*,pyspark.sql.functions,collect_list" +generated-members="collect_list" ignore-mixin-members="yes" ignore-none="yes" ignore-on-opaque-inference="yes" -ignored-classes="optparse.Values,thread._local,_thread._local,numpy,torch,swagger_client" -ignored-modules="numpy,torch,swagger_client,netCDF4,scipy" +ignored-classes="optparse.Values,thread._local,_thread._local,swagger_client" +ignored-modules="swagger_client" missing-member-hint="yes" missing-member-hint-distance=1 missing-member-max-choices=1 @@ -298,16 +298,16 @@ valid-classmethod-first-arg="cls" valid-metaclass-classmethod-first-arg="cls" [tool.pylint.'DESIGN'] -max-args=5 -max-attributes=7 +max-args=30 +max-attributes=30 max-bool-expr=5 max-branches=12 -max-locals=15 +max-locals=30 max-parents=7 max-public-methods=20 max-returns=6 max-statements=50 -min-public-methods=2 +min-public-methods=1 [tool.pylint.'IMPORTS'] allow-wildcard-with-all="no" @@ -316,6 +316,6 @@ deprecated-modules="optparse,tkinter.tix" [tool.pylint.'EXCEPTIONS'] overgeneral-exceptions= [ - "BaseException", - "Exception" + "builtins.BaseException", + "builtins.Exception" ] diff --git a/src/sharkey_crawler/__init__.py b/src/sharkey_crawler/__init__.py index ef43cf9..ebd2ae0 100644 --- a/src/sharkey_crawler/__init__.py +++ b/src/sharkey_crawler/__init__.py @@ -2,10 +2,12 @@ # Copyright (c) Hexafuchs. All rights reserved. # Licensed under the MIT License. See LICENSE in project root for information. # ------------------------------------------------------------- +# flake8: noqa: F403,F401 """Python library to crawl user data from sharkey instances""" + from __future__ import annotations -__version__ = "0.0.1" +__version__ = "0.1.0" from .main import * from .types import * diff --git a/src/sharkey_crawler/convert.py b/src/sharkey_crawler/convert.py index 67bd2d7..c657b6f 100644 --- a/src/sharkey_crawler/convert.py +++ b/src/sharkey_crawler/convert.py @@ -12,12 +12,35 @@ def to_snake_case(camel_case_str: str) -> str: + """ + Converts a camelCase string to a snake_case string. + + :param camel_case_str: string in camel case notation + :return: converted string snake case notation + """ return "".join(map(lambda e: "_" + e.lower() if e.isupper() else e, list(camel_case_str))) def dict_keys_to_snake_case(data: T) -> T: - if type(data) is list: - return [dict_keys_to_snake_case(e) for e in data] - if type(data) is not dict: + """ + Converts all keys in a dictionary into from camelCase into snake_case. + + It recursively handles dictionaries and lists. + + :param data: dictionary to convert, or list with dictionaries + :return: all keys turned into snake case + """ + if isinstance(data, list): + # yes, we could to this in one line, but pyright does not like this + for i, element in enumerate(data): + data[i] = dict_keys_to_snake_case(element) return data - return {to_snake_case(key): dict_keys_to_snake_case(value) for key, value in data.items()} + if not isinstance(data, dict): + return data + + # yes, we could to this in one line as well, but pyright does not like this either + for key in list(data.keys()): + cache = data[key] + del data[key] + data[to_snake_case(key)] = dict_keys_to_snake_case(cache) + return data diff --git a/src/sharkey_crawler/main.py b/src/sharkey_crawler/main.py index 8b9c665..14ad445 100644 --- a/src/sharkey_crawler/main.py +++ b/src/sharkey_crawler/main.py @@ -2,28 +2,39 @@ # Copyright (c) Hexafuchs. All rights reserved. # Licensed under the MIT License. See LICENSE in project root for information. # --------------------------------------------------------------------------------- -"""This provides a sharkey accessor instance for the /users/notes endpoint.""" +"""This provides a sharkey accessor instance.""" from __future__ import annotations -import requests +from typing import Annotated -from pydantic import conint +import requests +from annotated_types import Interval -from .types import Post +from .types import Post, SharkeyId from .convert import dict_keys_to_snake_case __all__ = ["SharkeyServer"] -SharkeyId = str - class SharkeyServer: + """ + Local representation of a sharkey server, exposes server api endpoints and parses data. + + If you require more endpoints, feel free to open a pull request or discussion. + """ + def __init__(self, base_url: str): + """ + :param base_url: base url of the sharkey server. if no scheme is passed, https is assumed + :returns: new sharkey proxy instance + """ + self.base_url = base_url.rstrip("/") if not self.base_url.startswith("http://") and not self.base_url.startswith("https://"): self.base_url = f"https://{self.base_url}" + # noinspection PyTypeHints def user_notes( self, user_id: SharkeyId, @@ -31,37 +42,39 @@ def user_notes( with_renotes: bool = True, with_files: bool = False, with_replies: bool = False, - limit: conint(ge=0, le=100) = 10, + limit: Annotated[int, Interval(ge=0, le=100)] = 10, allow_partial: bool = False, since_date: int | None = None, until_date: int | None = None, since_id: SharkeyId | None = None, until_id: SharkeyId | None = None, + timeout: int | float | None = 300, ) -> list[Post]: """ This function returns the latest posts about a user. - WARNING: Because the functionality is not documented, I will take an educated guess about the meaning of the - arguments. I can only spend looking into other peoples codes for so much time. Please open an issue if I - got something wrong. If you want to contribute, have a look at the code yourself at - https://activitypub.software/TransFem-org/Sharkey - - @param user_id: user id you want to crawl - @param with_channel_notes: - @param with_renotes: include boosts (boosts that quote something are always included) - @param with_files: include posts with files - @param with_replies: include replies to other users - @param limit: maximum number of posts, between 1 and 100 - @param allow_partial: read only from redis, do not resort to the database to fill the limit - @param since_date: get posts after or from this date, expressed as milliseconds since epoch, - do not use with other 'since_' or 'until_' argument - @param until_date: get posts before or from this date, expressed as milliseconds since epoch, - do not use with other 'since_' or 'until_' argument - @param since_id: get posts after this id (and this id), expressed as milliseconds since epoch, - do not use with other 'since_' or 'until_' argument - @param until_id: get posts before this id (and this id), expressed as milliseconds since epoch, - do not use with other 'since_' or 'until_' argument - @return: + **WARNING: Because the functionality is not documented, I will take an educated guess about the meaning of the + arguments. I can only spend looking into other peoples codes for so much time. Please open an issue if I + got something wrong. If you want to contribute, have a look at the code yourself at + https://activitypub.software/TransFem-org/Sharkey** + + :param user_id: user id you want to crawl + :param with_channel_notes: + :param with_renotes: include boosts (boosts that quote something are always included) + :param with_files: include posts with files + :param with_replies: include replies to other users + :param limit: maximum number of posts, between 1 and 100 + :param allow_partial: read only from redis, do not resort to the database to fill the limit + :param since_date: get posts after or from this date, expressed as milliseconds since epoch, + do not use with other `since_` or `until_` argument + :param until_date: get posts before or from this date, expressed as milliseconds since epoch, + do not use with other `since_` or `until_` argument + :param since_id: get posts after this id (and this id), expressed as milliseconds since epoch, + do not use with other `since_` or `until_` argument + :param until_id: get posts before this id (and this id), expressed as milliseconds since epoch, + do not use with other `since_` or `until_` argument + :param timeout: timeout of the request + :returns: list of posts """ payload = { "userId": user_id, @@ -81,15 +94,12 @@ def user_notes( if until_id: payload["untilId"] = until_id - response = requests.post(self.base_url + "/api/users/notes", json=payload) + response = requests.post(self.base_url + "/api/users/notes", json=payload, timeout=timeout) data = response.json() posts = [] for post in data: - from pprint import pprint - - pprint(dict_keys_to_snake_case(post)) posts.append(Post.model_validate(dict_keys_to_snake_case(post))) return posts diff --git a/src/sharkey_crawler/types/__init__.py b/src/sharkey_crawler/types/__init__.py index 913ba8f..28b14d1 100644 --- a/src/sharkey_crawler/types/__init__.py +++ b/src/sharkey_crawler/types/__init__.py @@ -1,2 +1,10 @@ +# ------------------------------------------------------------- +# Copyright (c) Hexafuchs. All rights reserved. +# Licensed under the MIT License. See LICENSE in project root for information. +# ------------------------------------------------------------- +# flake8: noqa: F403,F401 +"""This provides access to all types.""" + +from .id import * from .post import * from .user import * diff --git a/src/sharkey_crawler/types/id.py b/src/sharkey_crawler/types/id.py new file mode 100644 index 0000000..7f41bac --- /dev/null +++ b/src/sharkey_crawler/types/id.py @@ -0,0 +1,13 @@ +# ------------------------------------------------------------- +# Copyright (c) Hexafuchs. All rights reserved. +# Licensed under the MIT License. See LICENSE in project root for information. +# ------------------------------------------------------------- +"""This defines types related to ids.""" +from __future__ import annotations + +from typing import TypeAlias + +__all__ = ["SharkeyId"] + +"""The current representation of an ID in Sharkey.""" +SharkeyId: TypeAlias = str diff --git a/src/sharkey_crawler/types/post.py b/src/sharkey_crawler/types/post.py index 6b8e78c..5bfe199 100644 --- a/src/sharkey_crawler/types/post.py +++ b/src/sharkey_crawler/types/post.py @@ -3,6 +3,7 @@ # Licensed under the MIT License. See LICENSE in project root for information. # --------------------------------------------------------------------------------- """This defines types related to posts.""" +# pylint: disable=missing-class-docstring,invalid-name from __future__ import annotations @@ -10,20 +11,17 @@ from datetime import datetime -from pydantic import BaseModel, ConfigDict +from pydantic import BaseModel from .user import UserLite +from .id import SharkeyId __all__ = ["Post", "Visibility", "DriveFile", "DriveFolder", "DriveFileProperties", "Channel", "Poll", "PollChoice"] -SharkeyId = str - class Post(BaseModel): """Represents a post as returned by Sharkey.""" - model_config = ConfigDict(extra="forbid") - id: SharkeyId created_at: datetime updated_at: datetime | None = None @@ -101,7 +99,7 @@ class DriveFileProperties(BaseModel): width: int | float height: int | float orientation: int | float | None = None - avg_color: str = None + avg_color: str | None = None class Channel(BaseModel): diff --git a/src/sharkey_crawler/types/user.py b/src/sharkey_crawler/types/user.py index a7dfb2d..1c626e5 100644 --- a/src/sharkey_crawler/types/user.py +++ b/src/sharkey_crawler/types/user.py @@ -3,6 +3,7 @@ # Licensed under the MIT License. See LICENSE in project root for information. # --------------------------------------------------------------------------------- """This defines types related to users.""" +# pylint: disable=missing-class-docstring,invalid-name from __future__ import annotations @@ -11,10 +12,9 @@ from pydantic import BaseModel -__all__ = ["UserLite", "OnlineStatus", "AvatarDecoration", "Instance", "BadgeRole"] - +from .id import SharkeyId -SharkeyId = str +__all__ = ["UserLite", "OnlineStatus", "AvatarDecoration", "Instance", "BadgeRole"] class UserLite(BaseModel): diff --git a/tests/conftest.py b/tests/conftest.py index a81a954..b305388 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -4,8 +4,6 @@ # --------------------------------------------------------------------------------- """ This is a configuration file for pytest containing customizations and fixtures. - -In VSCode, Code Coverage is recorded in config.xml. Delete this file to reset reporting. """ from __future__ import annotations @@ -16,8 +14,6 @@ def pytest_collection_modifyitems(items: list[Item]): for item in items: - if "spark" in item.nodeid: - item.add_marker(pytest.mark.spark) if "_int_" in item.nodeid: item.add_marker(pytest.mark.integration) else: