Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Implement 3.5, 3.6, 3.7, 3.8, 3.9, and 3.10 support; black; GitHub Actions; upgrade deps #81

Open
wants to merge 16 commits into
base: dev
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from 1 commit
Commits
Show all changes
16 commits
Select commit Hold shift click to select a range
41c7917
* Bring in changes from master branch, i.e., blacken, remove unused i…
SamuelMarks Oct 25, 2020
e527fc0
[.github/workflows/main.yml] Install test deps; [enforce/parsers.py] …
SamuelMarks Oct 25, 2020
b80c342
[.editorconfig] Init; [requirements_*.txt] Bump dependencies; [enforc…
SamuelMarks Oct 26, 2020
a91d029
[tests/protocol_test.py] Decouple test to avoid SyntaxError; [enforce…
SamuelMarks Oct 26, 2020
349e6f8
[enforce/nodes.py] Add support for `Literal` type
SamuelMarks Oct 27, 2020
748f7b0
[*.py] use `hasattr` to throw less exceptions
SamuelMarks Oct 28, 2020
312a03d
[{enforcers,decorators}.py,tests/test_enforcers.py] Enforcers are no …
SamuelMarks Oct 29, 2020
451ef62
[test_*.py] Replace pytest with unittest; [test_validators.py] Implem…
SamuelMarks Nov 2, 2020
7a731f6
[tests/test_protocol.py] Fixed TODO; [tests/test_types.py] Add missin…
SamuelMarks Nov 3, 2020
0a75368
[tests/test_{decorators,protocol}.py] Get more tests passing on 3.9; …
SamuelMarks Nov 3, 2020
3a5c680
[{nodes,tests/test_exception_messages}.py] Improve string concatenation
SamuelMarks Nov 4, 2020
32c75d8
[*.py] Improve string concatenation
SamuelMarks Nov 4, 2020
c86383a
[test/{test_exceptions,test_exception_messages}.py] Improve string co…
SamuelMarks Nov 4, 2020
1a7a3fc
[*.py] Blacken with new version
SamuelMarks Dec 20, 2021
53a4193
[.github/workflows/main.yml] Python 3.10 support; [README.md] 3.10; […
SamuelMarks Dec 20, 2021
8ca1f93
[parsers.py] `python_version_tuple() < ("3", "6")` => `sys.version_in…
SamuelMarks Dec 31, 2021
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Prev Previous commit
Next Next commit
[test_*.py] Replace pytest with unittest; [test_validators.py] Implem…
…ent for `literal`; [*.py] black; use `object` base class
  • Loading branch information
SamuelMarks committed Nov 2, 2020
commit 451ef6289a5fa522010afc429956a7e5d53bbd64
10 changes: 5 additions & 5 deletions .github/workflows/main.yml
Original file line number Diff line number Diff line change
@@ -1,18 1,18 @@
name: Test

on:
push:
branches:
- 'master'
- 'dev'
push:
branches:
- 'master'
- 'dev'

jobs:
test:
name: ${{ matrix.os.name }} ${{ matrix.python-version }}
runs-on: ${{ matrix.os.runs-on }}
strategy:
matrix:
python-version: [3.5, 3.6, 3.7, 3.8, 3.9]
python-version: [ 3.5, 3.6, 3.7, 3.8, 3.9 ]
os:
- name: Linux
runs-on: ubuntu-latest
Expand Down
2 changes: 1 addition & 1 deletion enforce/__init__.py
Original file line number Diff line number Diff line change
@@ -1,3 1,3 @@
from .decorators import runtime_validation
from .settings import config
from .protocol import P
from .settings import config
8 changes: 4 additions & 4 deletions enforce/enforcers.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 18,7 @@
Parameters = namedtuple("Parameters", ["args", "kwargs", "skip"])


class Enforcer:
class Enforcer(object):
"""
A container for storing type checking logic of functions
"""
Expand Down Expand Up @@ -108,7 108,6 @@ def validate_inputs(self, input_data: Parameters) -> Parameters:
bind_arguments.args, bind_arguments.kwargs, skip
)
return validated_data

process_errors(self.settings, self.validator.errors, self.hints)

def validate_outputs(self, output_data: T) -> T:
Expand Down Expand Up @@ -172,7 171,9 @@ def __init__(self, wrapped, settings=None):
)
elif is_type_of_type(wrapped_type, typing.GenericMeta):
super().__init__(wrapped)
self.__enforcer__ = get_enforcer(self, generic=True, settings=self._self_settings)
self.__enforcer__ = get_enforcer(
self, generic=True, settings=self._self_settings
)
else:
raise TypeError("Only generics can be wrapped in GenericProxy")

Expand Down Expand Up @@ -323,7 324,6 @@ def generate_new_enforcer(func, generic, parent_root, instance_of, settings):
bound = False
validator = init_validator(settings, hints, parent_root)


if hasattr(func, "__name__"):
name = func.__name__
elif hasattr(func, "__class__"):
Expand Down
101 changes: 62 additions & 39 deletions enforce/nodes.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,10 17,10 @@
}


class BaseNode:
class BaseNode(object):
def __init__(
self,
expected_data_type: type,
expected_data_type: typing.Optional[type],
is_sequence: bool,
is_container: bool = False,
is_forward_ref: bool = None,
Expand Down Expand Up @@ -51,7 51,7 @@ def __init__(
self.children = []

def validate_children(
self, validator: typing.Literal["Validator"], propagated_data: typing.Any
self, validator: "Validator", propagated_data: typing.Any
) -> typing.List[dt.ValidationResult]:
"""
Performs the validation of child nodes and collects their results
Expand Down Expand Up @@ -111,42 111,48 @@ def get_actual_data_type(

return actual_type

def set_out_data(self, validator: typing.Literal["Validator"], in_data, out_data):
def set_out_data(self, validator: "Validator", in_data, out_data):
"""
Sets the output data for the node to the combined data of its children
Also sets the type of a last processed node
"""
self.in_type = type(in_data)
self.data_out = out_data

def preprocess_data(self, validator: typing.Literal["Validator"], data):
def preprocess_data(self, validator: "Validator", data):
"""
Prepares data for the other stages if needed
"""
return data

def postprocess_data(self, validator: typing.Literal["Validator"], data):
@staticmethod
def postprocess_data(validator: "Validator", data):
"""
Clears or updates data if needed after it was processed by all other stages
"""
return data

def validate_data(self, validator: typing.Literal["Validator"], data, sticky=False) -> bool:
def validate_data(
self, validator: "Validator", data, sticky=False
) -> dt.ValidationResult:
"""
Responsible for determining if node is of specific type
"""
return dt.ValidationResult(
valid=False, data=data, type_name=extract_type_name(data)
)

def map_data(self, validator: typing.Literal["Validator"], self_validation_result):
def map_data(self, validator: "Validator", self_validation_result):
"""
Maps the input data to the nested type nodes
"""
return []

def reduce_data(
self, validator: typing.Literal["Validator"], self_validation_result, child_validation_results
self,
validator: "Validator",
self_validation_result,
child_validation_results,
):
"""
Combines the data from the nested type nodes into a current node expected data type
Expand Down Expand Up @@ -182,7 188,7 @@ class SimpleNode(BaseNode):
def __init__(self, expected_data_type, **kwargs):
super().__init__(expected_data_type, is_sequence=True, type_var=False, **kwargs)

def validate_data(self, validator: typing.Literal["Validator"], data, sticky=False):
def validate_data(self, validator: "Validator", data, sticky=False):
if self.bound:
expected_data_type = self.in_type
else:
Expand All @@ -208,12 214,14 @@ def validate_data(self, validator: typing.Literal["Validator"], data, sticky=Fal

type_name = TYPE_NAME_ALIASES.get(type_name, type_name)

if getattr(expected_data_type, '__origin__', None) == typing.Literal and data in frozenset(expected_data_type.__args__):
if getattr(
expected_data_type, "__origin__", None
) == typing.Literal and data in frozenset(expected_data_type.__args__):
result, type_name = True, expected_data_type

return dt.ValidationResult(valid=result, data=data, type_name=type_name)

def map_data(self, validator: typing.Literal["Validator"], self_validation_result):
def map_data(self, validator: "Validator", self_validation_result):
data = self_validation_result.data
propagated_data = []
if isinstance(data, list):
Expand All @@ -237,16 245,19 @@ class UnionNode(BaseNode):
def __init__(self, **kwargs):
super().__init__(typing.Any, is_sequence=False, is_container=True, **kwargs)

def validate_data(self, validator: typing.Literal["Validator"], data, sticky=False):
def validate_data(self, validator: "Validator", data, sticky=False):
return dt.ValidationResult(
valid=True, data=data, type_name=extract_type_name(data)
)

def map_data(self, validator: typing.Literal["Validator"], self_validation_result):
def map_data(self, validator: "Validator", self_validation_result):
return [self_validation_result.data for _ in self.children]

def reduce_data(
self, validator: typing.Literal["Validator"], self_validation_result, child_validation_result
self,
validator: "Validator",
self_validation_result,
child_validation_result,
):
return next(
(
Expand Down Expand Up @@ -277,14 288,17 @@ def __init__(self, **kwargs):
expected_data_type=None, is_sequence=True, type_var=True, **kwargs
)

def validate_data(self, validator: typing.Literal["Validator"], data, sticky=False):
def validate_data(self, validator: "Validator", data, sticky=False):
return dt.ValidationResult(valid=True, data=data, type_name="typing.TypeVar")

def map_data(self, validator: typing.Literal["Validator"], self_validation_result):
def map_data(self, validator: "Validator", self_validation_result):
return [self_validation_result.data for _ in self.children]

def reduce_data(
self, validator: typing.Literal["Validator"], self_validation_result, child_validation_results
self,
validator: "Validator",
self_validation_result,
child_validation_results,
):
# Returns first non-None element, or None if every element is None
return next(
Expand All @@ -296,7 310,7 @@ def reduce_data(
None,
)

def validate_children(self, validator: typing.Literal["Validator"], propagated_data):
def validate_children(self, validator: "Validator", propagated_data):
children_validation_results = []

for i, child in enumerate(self.children):
Expand Down Expand Up @@ -329,7 343,7 @@ def __init__(self, variable_length=False, **kwargs):
self.variable_length = variable_length
super().__init__(typing.Tuple, is_sequence=True, is_container=True, **kwargs)

def validate_data(self, validator: typing.Literal["Validator"], data, sticky=False):
def validate_data(self, validator: "Validator", data, sticky=False):
# fix for https://github.com/RussBaz/enforce/issues/62 (1/4)
if data is None:
# unfortunately this is not enough to stop propagating to children...
Expand Down Expand Up @@ -364,7 378,7 @@ def validate_data(self, validator: typing.Literal["Validator"], data, sticky=Fal
valid=False, data=data, type_name=extract_type_name(input_type)
)

def validate_children(self, validator: typing.Literal["Validator"], propagated_data):
def validate_children(self, validator: "Validator", propagated_data):
# fix for https://github.com/RussBaz/enforce/issues/62 (3/4)
if propagated_data is None:
# yield a sequence of one element: a single failure
Expand All @@ -389,7 403,7 @@ def validate_children(self, validator: typing.Literal["Validator"], propagated_d
else:
yield super().validate_children(validator, propagated_data)

def map_data(self, validator: typing.Literal["Validator"], self_validation_result):
def map_data(self, validator: "Validator", self_validation_result):
data = self_validation_result.data
# fix for https://github.com/RussBaz/enforce/issues/62 (2/4)
if data is not None:
Expand All @@ -401,7 415,10 @@ def map_data(self, validator: typing.Literal["Validator"], self_validation_resul
return None

def reduce_data(
self, validator: typing.Literal["Validator"], self_validation_result, child_validation_results
self,
validator: "Validator",
self_validation_result,
child_validation_results,
):
return tuple(result.data for result in child_validation_results)

Expand Down Expand Up @@ -442,7 459,7 @@ def __init__(self, data_type, exception_type, **kwargs):
self.data_type_name = None
self.exception_type = exception_type

def preprocess_data(self, validator: typing.Literal["Validator"], data):
def preprocess_data(self, validator: "Validator", data):
data_type = type(data)

self.data_type_name = data_type.__name__
Expand All @@ -466,17 483,17 @@ def preprocess_data(self, validator: typing.Literal["Validator"], data):
str(type(data))
" with incorrect arguments: "
", ".join(
field " -> " str(type(getattr(data, field)))
for field in data._fields
)
field " -> " str(type(getattr(data, field)))
for field in data._fields
)
)
return None
except AttributeError:
return None
except TypeError:
return None

def validate_data(self, validator: typing.Literal["Validator"], data, sticky=False):
def validate_data(self, validator: "Validator", data, sticky=False):
if data is None:
data_type_name = self.data_type_name
else:
Expand Down Expand Up @@ -508,7 525,7 @@ def __init__(self, data_type, **kwargs):
data_type, is_sequence=True, is_container=True, type_var=False, **kwargs
)

def preprocess_data(self, validator: typing.Literal["Validator"], data):
def preprocess_data(self, validator: "Validator", data):
from .enforcers import Enforcer, get_enforcer

covariant = self.covariant or validator.settings.covariant
Expand Down Expand Up @@ -541,7 558,7 @@ def preprocess_data(self, validator: typing.Literal["Validator"], data):

return data

def validate_data(self, validator: typing.Literal["Validator"], data, sticky=False):
def validate_data(self, validator: "Validator", data, sticky=False):
try:
input_type = type(data)

Expand Down Expand Up @@ -619,7 636,7 @@ def __init__(self, data_type, **kwargs):
enforcer, is_sequence=True, is_container=True, type_var=False, **kwargs
)

def preprocess_data(self, validator: typing.Literal["Validator"], data):
def preprocess_data(self, validator: "Validator", data):
from .enforcers import Enforcer, GenericProxy

try:
Expand All @@ -640,7 657,7 @@ def preprocess_data(self, validator: typing.Literal["Validator"], data):
else:
return GenericProxy(data)

def validate_data(self, validator: typing.Literal["Validator"], data, sticky=False):
def validate_data(self, validator: "Validator", data, sticky=False):
enforcer = data.__enforcer__
input_type = enforcer.signature

Expand Down Expand Up @@ -684,7 701,7 @@ class MappingNode(BaseNode):
def __init__(self, data_type, **kwargs):
super().__init__(data_type, is_sequence=True, is_container=True, **kwargs)

def validate_data(self, validator: typing.Literal["Validator"], data, sticky=False):
def validate_data(self, validator: "Validator", data, sticky=False):
if not isinstance(data, type):
input_type = type(data)
else:
Expand All @@ -703,7 720,7 @@ def validate_data(self, validator: typing.Literal["Validator"], data, sticky=Fal
type_name = input_type.__name__
return dt.ValidationResult(valid=result, data=data, type_name=type_name)

def validate_children(self, validator: typing.Literal["Validator"], propagated_data):
def validate_children(self, validator: "Validator", propagated_data):
key_validator = self.children[0]
value_validator = self.children[1]

Expand Down Expand Up @@ -734,7 751,7 @@ def validate_children(self, validator: typing.Literal["Validator"], propagated_d

yield children_validation_results

def map_data(self, validator: typing.Literal["Validator"], self_validation_result):
def map_data(self, validator: "Validator", self_validation_result):
data = self_validation_result.data
output = []
if self_validation_result.valid:
Expand All @@ -744,7 761,10 @@ def map_data(self, validator: typing.Literal["Validator"], self_validation_resul
return output

def reduce_data(
self, validator: typing.Literal["Validator"], self_validation_result, child_validation_results
self,
validator: "Validator",
self_validation_result,
child_validation_results,
):
return {result.data[0]: result.data[1] for result in child_validation_results}

Expand Down Expand Up @@ -804,16 824,19 @@ def __init__(self, forward_ref, **kwargs):
)
self.forward_ref = forward_ref

def validate_data(self, validator: typing.Literal["Validator"], data, sticky=False):
def validate_data(self, validator: "Validator", data, sticky=False):
return dt.ValidationResult(
valid=True, data=data, type_name=extract_type_name(data)
)

def map_data(self, validator: typing.Literal["Validator"], self_validation_result):
def map_data(self, validator: "Validator", self_validation_result):
return [self_validation_result.data]

def reduce_data(
self, validator: typing.Literal["Validator"], self_validation_result, child_validation_result
self,
validator: "Validator",
self_validation_result,
child_validation_result,
):
return child_validation_result[0].data

Expand Down
Loading