Skip to content

Commit b3a4947

Browse files
committed
Adding include_paths to the docs
1 parent ca8e58e commit b3a4947

13 files changed

+298
-60
lines changed

conftest.py

Lines changed: 22 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -46,6 +46,28 @@ def nested_a_result():
4646
return json.load(the_file)
4747

4848

49+
@pytest.fixture(scope='class')
50+
def nested_a_affected_paths():
51+
return {
52+
'root[0][0][2][0][1]', 'root[0][1][1][1][5]', 'root[0][2][1]',
53+
'root[1][1][2][0][1]', 'root[1][2][0]', 'root[1][2][0][1][5]',
54+
'root[1][0][2][2][3]', 'root[0][0][1][0][0]', 'root[0][1][0][2][3]',
55+
'root[0][3][0][2][3]', 'root[0][3][1][0][2]', 'root[1][1][1][0][0]',
56+
'root[1][0][1][2][1]', 'root[1][0][2][1][2]', 'root[1][3][0][2][3]',
57+
'root[1][3][1][0][2]', 'root[1][2][0][2]', 'root[1][0][2][0][1]',
58+
'root[0][3][2][0][1]', 'root[0][3][2][1][0]', 'root[1][3][1][1]',
59+
'root[1][2][1][1][0]', 'root[1][2][1][0]', 'root[1][0][0][0][2]',
60+
'root[1][3][2][1][0]', 'root[1][0][0][1][1]', 'root[0][1][2][0]',
61+
'root[0][1][2][1][0]', 'root[0][2][0][1][2]', 'root[1][3][0][1]',
62+
'root[0][3][1][1]', 'root[1][2][0][0][2]', 'root[1][3][2][0][1]',
63+
'root[1][0][1][0]', 'root[1][2][0][0][0]', 'root[1][0][0][0][1]',
64+
'root[1][3][2][2][2]', 'root[0][1][1][2][1]', 'root[0][1][1][2][2]',
65+
'root[0][2][0][0][2]', 'root[0][2][0][0][3]', 'root[0][3][1][2][1]',
66+
'root[0][3][1][2][2]', 'root[1][2][1][2][3]', 'root[1][0][0][1][2]',
67+
'root[1][0][0][2][1]', 'root[1][3][1][2][1]', 'root[1][3][1][2][2]'
68+
}
69+
70+
4971
@pytest.fixture(scope='class')
5072
def nested_b_t1():
5173
with open(os.path.join(FIXTURES_DIR, 'nested_b_t1.json')) as the_file:

deepdiff/deephash.py

Lines changed: 7 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -9,7 +9,7 @@
99
convert_item_or_items_into_compiled_regexes_else_none,
1010
get_id, type_is_subclass_of_type_group, type_in_type_group,
1111
number_to_string, datetime_normalize, KEY_TO_VAL_STR, short_repr,
12-
get_truncate_datetime, dict_)
12+
get_truncate_datetime, dict_, add_root_to_paths)
1313
from deepdiff.base import Base
1414
logger = logging.getLogger(__name__)
1515

@@ -123,6 +123,7 @@ def __init__(self,
123123
hashes=None,
124124
exclude_types=None,
125125
exclude_paths=None,
126+
include_paths=None,
126127
exclude_regex_paths=None,
127128
hasher=None,
128129
ignore_repetition=True,
@@ -146,7 +147,7 @@ def __init__(self,
146147
raise ValueError(
147148
("The following parameter(s) are not valid: %s\n"
148149
"The valid parameters are obj, hashes, exclude_types, significant_digits, truncate_datetime,"
149-
"exclude_paths, exclude_regex_paths, hasher, ignore_repetition, "
150+
"exclude_paths, include_paths, exclude_regex_paths, hasher, ignore_repetition, "
150151
"number_format_notation, apply_hash, ignore_type_in_groups, ignore_string_type_changes, "
151152
"ignore_numeric_type_changes, ignore_type_subclasses, ignore_string_case "
152153
"number_to_string_func, ignore_private_variables, parent "
@@ -160,7 +161,8 @@ def __init__(self,
160161
exclude_types = set() if exclude_types is None else set(exclude_types)
161162
self.exclude_types_tuple = tuple(exclude_types) # we need tuple for checking isinstance
162163
self.ignore_repetition = ignore_repetition
163-
self.exclude_paths = convert_item_or_items_into_set_else_none(exclude_paths)
164+
self.exclude_paths = add_root_to_paths(convert_item_or_items_into_set_else_none(exclude_paths))
165+
self.include_paths = add_root_to_paths(convert_item_or_items_into_set_else_none(include_paths))
164166
self.exclude_regex_paths = convert_item_or_items_into_compiled_regexes_else_none(exclude_regex_paths)
165167
self.hasher = default_hasher if hasher is None else hasher
166168
self.hashes[UNPROCESSED_KEY] = []
@@ -327,6 +329,8 @@ def _skip_this(self, obj, parent):
327329
skip = False
328330
if self.exclude_paths and parent in self.exclude_paths:
329331
skip = True
332+
if self.include_paths and parent not in self.include_paths:
333+
skip = True
330334
elif self.exclude_regex_paths and any(
331335
[exclude_regex_path.search(parent) for exclude_regex_path in self.exclude_regex_paths]):
332336
skip = True

deepdiff/delta.py

Lines changed: 5 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -7,7 +7,8 @@
77
from deepdiff.helper import (
88
strings, short_repr, numbers,
99
np_ndarray, np_array_factory, numpy_dtypes, get_doc,
10-
not_found, numpy_dtype_string_to_type, dict_)
10+
not_found, numpy_dtype_string_to_type, dict_,
11+
)
1112
from deepdiff.path import _path_to_elements, _get_nested_obj, GET, GETATTR
1213
from deepdiff.anyset import AnySet
1314

@@ -70,11 +71,11 @@ def __init__(
7071
serializer=pickle_dump,
7172
verify_symmetry=False,
7273
):
73-
if 'safe_to_import' not in set(deserializer.__code__.co_varnames):
74+
if hasattr(deserializer, '__code__') and 'safe_to_import' in set(deserializer.__code__.co_varnames):
75+
_deserializer = deserializer
76+
else:
7477
def _deserializer(obj, safe_to_import=None):
7578
return deserializer(obj)
76-
else:
77-
_deserializer = deserializer
7879

7980
if diff is not None:
8081
if isinstance(diff, DeepDiff):

deepdiff/diff.py

Lines changed: 39 additions & 17 deletions
Original file line numberDiff line numberDiff line change
@@ -21,15 +21,15 @@
2121
type_is_subclass_of_type_group, type_in_type_group, get_doc,
2222
number_to_string, datetime_normalize, KEY_TO_VAL_STR, booleans,
2323
np_ndarray, get_numpy_ndarray_rows, OrderedSetPlus, RepeatedTimer,
24-
TEXT_VIEW, TREE_VIEW, DELTA_VIEW, detailed__dict__,
24+
TEXT_VIEW, TREE_VIEW, DELTA_VIEW, detailed__dict__, add_root_to_paths,
2525
np, get_truncate_datetime, dict_, CannotCompare, ENUM_IGNORE_KEYS)
2626
from deepdiff.serialization import SerializationMixin
2727
from deepdiff.distance import DistanceMixin
2828
from deepdiff.model import (
2929
RemapDict, ResultDict, TextResult, TreeResult, DiffLevel,
30-
DictRelationship, AttributeRelationship,
30+
DictRelationship, AttributeRelationship, REPORT_KEYS,
3131
SubscriptableIterableRelationship, NonSubscriptableIterableRelationship,
32-
SetRelationship, NumpyArrayRelationship, CUSTOM_FIELD)
32+
SetRelationship, NumpyArrayRelationship, CUSTOM_FIELD, PrettyOrderedSet, )
3333
from deepdiff.deephash import DeepHash, combine_hashes_lists
3434
from deepdiff.base import Base
3535
from deepdiff.lfucache import LFUCache, DummyLFU
@@ -85,6 +85,7 @@ def _report_progress(_stats, progress_logger, duration):
8585
DEEPHASH_PARAM_KEYS = (
8686
'exclude_types',
8787
'exclude_paths',
88+
'include_paths',
8889
'exclude_regex_paths',
8990
'hasher',
9091
'significant_digits',
@@ -119,6 +120,7 @@ def __init__(self,
119120
exclude_obj_callback=None,
120121
exclude_obj_callback_strict=None,
121122
exclude_paths=None,
123+
include_paths=None,
122124
exclude_regex_paths=None,
123125
exclude_types=None,
124126
get_deep_distance=False,
@@ -157,7 +159,7 @@ def __init__(self,
157159
raise ValueError((
158160
"The following parameter(s) are not valid: %s\n"
159161
"The valid parameters are ignore_order, report_repetition, significant_digits, "
160-
"number_format_notation, exclude_paths, exclude_types, exclude_regex_paths, ignore_type_in_groups, "
162+
"number_format_notation, exclude_paths, include_paths, exclude_types, exclude_regex_paths, ignore_type_in_groups, "
161163
"ignore_string_type_changes, ignore_numeric_type_changes, ignore_type_subclasses, truncate_datetime, "
162164
"ignore_private_variables, ignore_nan_inequality, number_to_string_func, verbose_level, "
163165
"view, hasher, hashes, max_passes, max_diffs, "
@@ -188,7 +190,8 @@ def __init__(self,
188190
ignore_numeric_type_changes=ignore_numeric_type_changes,
189191
ignore_type_subclasses=ignore_type_subclasses)
190192
self.report_repetition = report_repetition
191-
self.exclude_paths = convert_item_or_items_into_set_else_none(exclude_paths)
193+
self.exclude_paths = add_root_to_paths(convert_item_or_items_into_set_else_none(exclude_paths))
194+
self.include_paths = add_root_to_paths(convert_item_or_items_into_set_else_none(include_paths))
192195
self.exclude_regex_paths = convert_item_or_items_into_compiled_regexes_else_none(exclude_regex_paths)
193196
self.exclude_types = set(exclude_types) if exclude_types else None
194197
self.exclude_types_tuple = tuple(exclude_types) if exclude_types else None # we need tuple for checking isinstance
@@ -431,21 +434,24 @@ def _skip_this(self, level):
431434
Check whether this comparison should be skipped because one of the objects to compare meets exclusion criteria.
432435
:rtype: bool
433436
"""
437+
level_path = level.path()
434438
skip = False
435-
if self.exclude_paths and level.path() in self.exclude_paths:
439+
if self.exclude_paths and level_path in self.exclude_paths:
440+
skip = True
441+
if self.include_paths and level_path not in self.include_paths:
436442
skip = True
437443
elif self.exclude_regex_paths and any(
438-
[exclude_regex_path.search(level.path()) for exclude_regex_path in self.exclude_regex_paths]):
444+
[exclude_regex_path.search(level_path) for exclude_regex_path in self.exclude_regex_paths]):
439445
skip = True
440446
elif self.exclude_types_tuple and \
441447
(isinstance(level.t1, self.exclude_types_tuple) or isinstance(level.t2, self.exclude_types_tuple)):
442448
skip = True
443449
elif self.exclude_obj_callback and \
444-
(self.exclude_obj_callback(level.t1, level.path()) or self.exclude_obj_callback(level.t2, level.path())):
450+
(self.exclude_obj_callback(level.t1, level_path) or self.exclude_obj_callback(level.t2, level_path)):
445451
skip = True
446452
elif self.exclude_obj_callback_strict and \
447-
(self.exclude_obj_callback_strict(level.t1, level.path()) and
448-
self.exclude_obj_callback_strict(level.t2, level.path())):
453+
(self.exclude_obj_callback_strict(level.t1, level_path) and
454+
self.exclude_obj_callback_strict(level.t2, level_path)):
449455
skip = True
450456

451457
return skip
@@ -477,12 +483,12 @@ def _get_clean_to_keys_mapping(self, keys, level):
477483
return result
478484

479485
def _diff_dict(self,
480-
level,
481-
parents_ids=frozenset([]),
482-
print_as_attribute=False,
483-
override=False,
484-
override_t1=None,
485-
override_t2=None):
486+
level,
487+
parents_ids=frozenset([]),
488+
print_as_attribute=False,
489+
override=False,
490+
override_t1=None,
491+
override_t2=None):
486492
"""Difference of 2 dictionaries"""
487493
if override:
488494
# for special stuff like custom objects and named tuples we receive preprocessed t1 and t2
@@ -1097,7 +1103,7 @@ def get_other_pair(hash_value, in_t1=True):
10971103
old_indexes=t1_indexes,
10981104
new_indexes=t2_indexes)
10991105
self._report_result('repetition_change',
1100-
repetition_change_level)
1106+
repetition_change_level)
11011107

11021108
else:
11031109
for hash_value in hashes_added:
@@ -1423,6 +1429,22 @@ def get_stats(self):
14231429
"""
14241430
return self._stats
14251431

1432+
@property
1433+
def affected_paths(self):
1434+
"""
1435+
Get the list of paths that were affected.
1436+
Whether a value was changed or they were added or removed.
1437+
"""
1438+
result = OrderedSet()
1439+
for key in REPORT_KEYS:
1440+
value = self.get(key)
1441+
if value:
1442+
if isinstance(value, PrettyOrderedSet):
1443+
result |= value
1444+
else:
1445+
result |= OrderedSet(value.keys())
1446+
return result
1447+
14261448

14271449
if __name__ == "__main__": # pragma: no cover
14281450
import doctest

deepdiff/helper.py

Lines changed: 21 additions & 23 deletions
Original file line numberDiff line numberDiff line change
@@ -8,7 +8,7 @@
88
import time
99
from ast import literal_eval
1010
from decimal import Decimal, localcontext
11-
from collections import namedtuple, OrderedDict
11+
from collections import namedtuple
1212
from itertools import repeat
1313
from ordered_set import OrderedSet
1414
from threading import Timer
@@ -220,28 +220,6 @@ class indexed_set(set):
220220
"""
221221

222222

223-
JSON_CONVERTOR = {
224-
Decimal: float,
225-
OrderedSet: list,
226-
type: lambda x: x.__name__,
227-
bytes: lambda x: x.decode('utf-8')
228-
}
229-
230-
231-
def json_convertor_default(default_mapping=None):
232-
_convertor_mapping = JSON_CONVERTOR.copy()
233-
if default_mapping:
234-
_convertor_mapping.update(default_mapping)
235-
236-
def _convertor(obj):
237-
for original_type, convert_to in _convertor_mapping.items():
238-
if isinstance(obj, original_type):
239-
return convert_to(obj)
240-
raise TypeError('We do not know how to convert {} of type {} for json serialization. Please pass the default_mapping parameter with proper mapping of the object to a basic python type.'.format(obj, type(obj)))
241-
242-
return _convertor
243-
244-
245223
def add_to_frozen_set(parents_ids, item_id):
246224
return parents_ids | {item_id}
247225

@@ -257,6 +235,26 @@ def convert_item_or_items_into_set_else_none(items):
257235
return items
258236

259237

238+
def add_root_to_paths(paths):
239+
"""
240+
Sometimes the users want to just pass
241+
[key] instead of root[key] for example.
242+
Here we automatically add all sorts of variations that might match
243+
the path they were supposed to pass.
244+
"""
245+
if paths is None:
246+
return
247+
result = OrderedSet()
248+
for path in paths:
249+
if path.startswith('root'):
250+
result.add(path)
251+
else:
252+
result.add(f"root.{path}")
253+
result.add(f"root[{path}]")
254+
result.add(f"root['{path}']")
255+
return result
256+
257+
260258
RE_COMPILED_TYPE = type(re.compile(''))
261259

262260

0 commit comments

Comments
 (0)