@@ -103,13 +103,13 @@ def SubLoader(loader): # type: (Loader) -> Loader
103
103
skip_schemas = loader .skip_schemas )
104
104
105
105
class Fetcher (object ):
106
- def fetch_text (self , url ): # type: (unicode ) -> unicode
106
+ def fetch_text (self , url ): # type: (Text ) -> Text
107
107
raise NotImplementedError ()
108
108
109
- def check_exists (self , url ): # type: (unicode ) -> bool
109
+ def check_exists (self , url ): # type: (Text ) -> bool
110
110
raise NotImplementedError ()
111
111
112
- def urljoin (self , base_url , url ): # type: (unicode, unicode ) -> unicode
112
+ def urljoin (self , base_url , url ): # type: (Text, Text ) -> Text
113
113
raise NotImplementedError ()
114
114
115
115
@@ -122,7 +122,7 @@ def __init__(self,
122
122
self .session = session
123
123
124
124
def fetch_text (self , url ):
125
- # type: (unicode ) -> unicode
125
+ # type: (Text ) -> Text
126
126
if url in self .cache :
127
127
return self .cache [url ]
128
128
@@ -152,7 +152,7 @@ def fetch_text(self, url):
152
152
else :
153
153
raise ValueError ('Unsupported scheme in url: %s' % url )
154
154
155
- def check_exists (self , url ): # type: (unicode ) -> bool
155
+ def check_exists (self , url ): # type: (Text ) -> bool
156
156
if url in self .cache :
157
157
return True
158
158
@@ -178,9 +178,9 @@ class Loader(object):
178
178
def __init__ (self ,
179
179
ctx , # type: ContextType
180
180
schemagraph = None , # type: rdflib.graph.Graph
181
- foreign_properties = None , # type: Set[unicode ]
182
- idx = None , # type: Dict[unicode , Union[CommentedMap, CommentedSeq, unicode , None]]
183
- cache = None , # type: Dict[unicode , Any]
181
+ foreign_properties = None , # type: Set[Text ]
182
+ idx = None , # type: Dict[Text , Union[CommentedMap, CommentedSeq, Text , None]]
183
+ cache = None , # type: Dict[Text , Any]
184
184
session = None , # type: requests.sessions.Session
185
185
fetcher_constructor = None , # type: Callable[[Dict[unicode, unicode], requests.sessions.Session], Fetcher]
186
186
skip_schemas = None # type: bool
@@ -239,29 +239,29 @@ def __init__(self,
239
239
self .fetch_text = self .fetcher .fetch_text
240
240
self .check_exists = self .fetcher .check_exists
241
241
242
- self .url_fields = set () # type: Set[unicode ]
243
- self .scoped_ref_fields = {} # type: Dict[unicode , int]
244
- self .vocab_fields = set () # type: Set[unicode ]
245
- self .identifiers = [] # type: List[unicode ]
246
- self .identity_links = set () # type: Set[unicode ]
247
- self .standalone = None # type: Optional[Set[unicode ]]
248
- self .nolinkcheck = set () # type: Set[unicode ]
249
- self .vocab = {} # type: Dict[unicode, unicode ]
250
- self .rvocab = {} # type: Dict[unicode, unicode ]
251
- self .idmap = {} # type: Dict[unicode , Any]
252
- self .mapPredicate = {} # type: Dict[unicode, unicode ]
253
- self .type_dsl_fields = set () # type: Set[unicode ]
242
+ self .url_fields = set () # type: Set[Text ]
243
+ self .scoped_ref_fields = {} # type: Dict[Text , int]
244
+ self .vocab_fields = set () # type: Set[Text ]
245
+ self .identifiers = [] # type: List[Text ]
246
+ self .identity_links = set () # type: Set[Text ]
247
+ self .standalone = None # type: Optional[Set[Text ]]
248
+ self .nolinkcheck = set () # type: Set[Text ]
249
+ self .vocab = {} # type: Dict[Text, Text ]
250
+ self .rvocab = {} # type: Dict[Text, Text ]
251
+ self .idmap = {} # type: Dict[Text , Any]
252
+ self .mapPredicate = {} # type: Dict[Text, Text ]
253
+ self .type_dsl_fields = set () # type: Set[Text ]
254
254
255
255
self .add_context (ctx )
256
256
257
257
def expand_url (self ,
258
- url , # type: unicode
259
- base_url , # type: unicode
258
+ url , # type: Text
259
+ base_url , # type: Text
260
260
scoped_id = False , # type: bool
261
261
vocab_term = False , # type: bool
262
262
scoped_ref = None # type: int
263
263
):
264
- # type: (...) -> unicode
264
+ # type: (...) -> Text
265
265
if url in (u"@id" , u"@type" ):
266
266
return url
267
267
@@ -298,7 +298,7 @@ def expand_url(self,
298
298
else :
299
299
return url
300
300
301
- def _add_properties (self , s ): # type: (unicode ) -> None
301
+ def _add_properties (self , s ): # type: (Text ) -> None
302
302
for _ , _ , rng in self .graph .triples ((s , RDFS .range , None )):
303
303
literal = ((six .text_type (rng ).startswith (
304
304
u"http://www.w3.org/2001/XMLSchema#" ) and
@@ -309,11 +309,11 @@ def _add_properties(self, s): # type: (unicode) -> None
309
309
self .url_fields .add (six .text_type (s ))
310
310
self .foreign_properties .add (six .text_type (s ))
311
311
312
- def add_namespaces (self , ns ): # type: (Dict[unicode, unicode ]) -> None
312
+ def add_namespaces (self , ns ): # type: (Dict[Text, Text ]) -> None
313
313
self .vocab .update (ns )
314
314
315
315
def add_schemas (self , ns , base_url ):
316
- # type: (Union[List[unicode ], unicode ], unicode ) -> None
316
+ # type: (Union[List[Text ], Text ], Text ) -> None
317
317
if self .skip_schemas :
318
318
return
319
319
for sch in aslist (ns ):
@@ -348,7 +348,7 @@ def add_schemas(self, ns, base_url):
348
348
self .idx [six .text_type (s )] = None
349
349
350
350
def add_context (self , newcontext , baseuri = "" ):
351
- # type: (ContextType, unicode ) -> None
351
+ # type: (ContextType, Text ) -> None
352
352
if bool (self .vocab ):
353
353
raise validate .ValidationException (
354
354
"Refreshing context that already has stuff in it" )
@@ -413,17 +413,17 @@ def add_context(self, newcontext, baseuri=""):
413
413
_logger .debug ("vocab is %s" , self .vocab )
414
414
415
415
def resolve_ref (self ,
416
- ref , # type: Union[CommentedMap, CommentedSeq, unicode ]
417
- base_url = None , # type: unicode
416
+ ref , # type: Union[CommentedMap, CommentedSeq, Text ]
417
+ base_url = None , # type: Text
418
418
checklinks = True # type: bool
419
419
):
420
- # type: (...) -> Tuple[Union[CommentedMap, CommentedSeq, unicode , None], Dict[unicode , Any]]
420
+ # type: (...) -> Tuple[Union[CommentedMap, CommentedSeq, Text , None], Dict[Text , Any]]
421
421
422
- lref = ref # type: Union[CommentedMap, CommentedSeq, unicode , None]
422
+ lref = ref # type: Union[CommentedMap, CommentedSeq, Text , None]
423
423
obj = None # type: Optional[CommentedMap]
424
- resolved_obj = None # type: Optional[Union[CommentedMap, CommentedSeq, unicode ]]
424
+ resolved_obj = None # type: Optional[Union[CommentedMap, CommentedSeq, Text ]]
425
425
inc = False
426
- mixin = None # type: Optional[Dict[unicode , Any]]
426
+ mixin = None # type: Optional[Dict[Text , Any]]
427
427
428
428
if not base_url :
429
429
base_url = file_uri (os .getcwd ()) + "/"
@@ -581,10 +581,10 @@ def _resolve_idmap(self,
581
581
typeDSLregex = re .compile (ur"^([^[?]+)(\[\])?(\?)?$" )
582
582
583
583
def _type_dsl (self ,
584
- t , # type: Union[unicode , Dict, List]
584
+ t , # type: Union[Text , Dict, List]
585
585
lc ,
586
586
filename ):
587
- # type: (...) -> Union[unicode , Dict[unicode, unicode ], List[Union[unicode , Dict[unicode, unicode ]]]]
587
+ # type: (...) -> Union[Text , Dict[Text, Text ], List[Union[Text , Dict[Text, Text ]]]]
588
588
589
589
if not isinstance (t , (str , six .text_type )):
590
590
return t
@@ -627,7 +627,7 @@ def _resolve_type_dsl(self,
627
627
t , datum .lc .data [n ], document .lc .filename ))
628
628
if isinstance (datum2 , CommentedSeq ):
629
629
datum3 = CommentedSeq ()
630
- seen = [] # type: List[unicode ]
630
+ seen = [] # type: List[Text ]
631
631
for i , item in enumerate (datum2 ):
632
632
if isinstance (item , CommentedSeq ):
633
633
for j , v in enumerate (item ):
@@ -647,7 +647,7 @@ def _resolve_type_dsl(self,
647
647
document [d ] = datum2
648
648
649
649
def _resolve_identifier (self , document , loader , base_url ):
650
- # type: (CommentedMap, Loader, unicode ) -> unicode
650
+ # type: (CommentedMap, Loader, Text ) -> Text
651
651
# Expand identifier field (usually 'id') to resolve scope
652
652
for identifer in loader .identifiers :
653
653
if identifer in document :
@@ -666,7 +666,7 @@ def _resolve_identifier(self, document, loader, base_url):
666
666
return base_url
667
667
668
668
def _resolve_identity (self , document , loader , base_url ):
669
- # type: (Dict[unicode , List[unicode ]], Loader, unicode ) -> None
669
+ # type: (Dict[Text , List[Text ]], Loader, Text ) -> None
670
670
# Resolve scope for identity fields (fields where the value is the
671
671
# identity of a standalone node, such as enum symbols)
672
672
for identifer in loader .identity_links :
@@ -680,7 +680,7 @@ def _resolve_identity(self, document, loader, base_url):
680
680
n ]] = document [identifer ][n ]
681
681
682
682
def _normalize_fields (self , document , loader ):
683
- # type: (Dict[unicode, unicode ], Loader) -> None
683
+ # type: (Dict[Text, Text ], Loader) -> None
684
684
# Normalize fields which are prefixed or full URIn to vocabulary terms
685
685
for d in document :
686
686
d2 = loader .expand_url (d , u"" , scoped_id = False , vocab_term = True )
@@ -689,9 +689,9 @@ def _normalize_fields(self, document, loader):
689
689
del document [d ]
690
690
691
691
def _resolve_uris (self ,
692
- document , # type: Dict[unicode , Union[unicode , List[unicode ]]]
692
+ document , # type: Dict[Text , Union[Text , List[Text ]]]
693
693
loader , # type: Loader
694
- base_url # type: unicode
694
+ base_url # type: Text
695
695
):
696
696
# type: (...) -> None
697
697
# Resolve remaining URLs based on document base
@@ -714,11 +714,11 @@ def _resolve_uris(self,
714
714
715
715
def resolve_all (self ,
716
716
document , # type: Union[CommentedMap, CommentedSeq]
717
- base_url , # type: unicode
718
- file_base = None , # type: unicode
717
+ base_url , # type: Text
718
+ file_base = None , # type: Text
719
719
checklinks = True # type: bool
720
720
):
721
- # type: (...) -> Tuple[Union[CommentedMap, CommentedSeq, unicode , None], Dict[unicode , Any]]
721
+ # type: (...) -> Tuple[Union[CommentedMap, CommentedSeq, Text , None], Dict[Text , Any]]
722
722
loader = self
723
723
metadata = CommentedMap () # type: CommentedMap
724
724
if file_base is None :
@@ -840,7 +840,7 @@ def resolve_all(self,
840
840
841
841
return document , metadata
842
842
843
- def fetch (self , url , inject_ids = True ): # type: (unicode , bool) -> Any
843
+ def fetch (self , url , inject_ids = True ): # type: (Text , bool) -> Any
844
844
if url in self .idx :
845
845
return self .idx [url ]
846
846
try :
@@ -868,7 +868,7 @@ def fetch(self, url, inject_ids=True): # type: (unicode, bool) -> Any
868
868
FieldType = TypeVar ('FieldType' , six .text_type , CommentedSeq , CommentedMap )
869
869
870
870
def validate_scoped (self , field , link , docid ):
871
- # type: (unicode, unicode, unicode ) -> unicode
871
+ # type: (Text, Text, Text ) -> Text
872
872
split = urlparse .urlsplit (docid )
873
873
sp = split .fragment .split (u"/" )
874
874
n = self .scoped_ref_fields [field ]
@@ -892,7 +892,7 @@ def validate_scoped(self, field, link, docid):
892
892
"Field `%s` references unknown identifier `%s`, tried %s" % (field , link , ", " .join (tried )))
893
893
894
894
def validate_link (self , field , link , docid , all_doc_ids ):
895
- # type: (unicode , FieldType, unicode , Dict[Text, Text]) -> FieldType
895
+ # type: (Text , FieldType, Text , Dict[Text, Text]) -> FieldType
896
896
if field in self .nolinkcheck :
897
897
return link
898
898
if isinstance (link , (str , six .text_type )):
@@ -939,7 +939,7 @@ def getid(self, d): # type: (Any) -> Optional[Text]
939
939
return None
940
940
941
941
def validate_links (self , document , base_url , all_doc_ids ):
942
- # type: (Union[CommentedMap, CommentedSeq, unicode , None], unicode , Dict[Text, Text]) -> None
942
+ # type: (Union[CommentedMap, CommentedSeq, Text , None], Text , Dict[Text, Text]) -> None
943
943
docid = self .getid (document )
944
944
if not docid :
945
945
docid = base_url
0 commit comments