File tree Expand file tree Collapse file tree 1 file changed +3
-5
lines changed Expand file tree Collapse file tree 1 file changed +3
-5
lines changed Original file line number Diff line number Diff line change @@ -81,8 +81,7 @@ def _tokenize(source):
81
81
Returns only NAME tokens.
82
82
"""
83
83
readline = _SourceReader (source ).readline
84
- filter_name = lambda token : token [0 ] == tokenize .NAME
85
- return filter (filter_name , tokenize .generate_tokens (readline ))
84
+ return filter (lambda token : token [0 ] == tokenize .NAME , tokenize .generate_tokens (readline ))
86
85
87
86
88
87
def _search_symbol (source , symbol ):
@@ -108,9 +107,8 @@ def _search_symbol(source, symbol):
108
107
symbol_tokens = list (_tokenize (symbol ))
109
108
source_tokens = list (_tokenize (source ))
110
109
111
- get_str = lambda token : token [1 ]
112
- symbol_tokens_str = list (map (get_str , symbol_tokens ))
113
- source_tokens_str = list (map (get_str , source_tokens ))
110
+ symbol_tokens_str = list (map (lambda token : token [1 ], symbol_tokens ))
111
+ source_tokens_str = list (map (lambda token : token [1 ], source_tokens ))
114
112
115
113
symbol_len = len (symbol_tokens )
116
114
locations = []
You can’t perform that action at this time.
0 commit comments