4
4
static struct PyModuleDef _tokenizemodule ;
5
5
6
6
typedef struct {
7
- PyTypeObject * TokenizerIter ;
7
+ PyTypeObject * TokenizerIter ;
8
8
} tokenize_state ;
9
9
10
- static tokenize_state *
11
- get_tokenize_state (PyObject * module )
12
- {
13
- return (tokenize_state * )PyModule_GetState (module );
10
+ static tokenize_state *
11
+ get_tokenize_state (PyObject * module ) {
12
+ return (tokenize_state * )PyModule_GetState (module );
14
13
}
15
14
16
- #define _tokenize_get_state_by_type (type ) \
15
+ #define _tokenize_get_state_by_type (type ) \
17
16
get_tokenize_state(_PyType_GetModuleByDef(type, &_tokenizemodule))
18
17
19
18
#include "clinic/Python-tokenize.c.h"
@@ -24,9 +23,9 @@ class _tokenizer.tokenizeriter "tokenizeriterobject *" "_tokenize_get_state_by_t
24
23
[clinic start generated code]*/
25
24
/*[clinic end generated code: output=da39a3ee5e6b4b0d input=96d98ee2fef7a8bc]*/
26
25
27
- typedef struct {
28
- PyObject_HEAD
29
- struct tok_state * tok ;
26
+ typedef struct
27
+ {
28
+ PyObject_HEAD struct tok_state * tok ;
30
29
} tokenizeriterobject ;
31
30
32
31
/*[clinic input]
@@ -40,27 +39,28 @@ static PyObject *
40
39
tokenizeriter_new_impl (PyTypeObject * type , const char * source )
41
40
/*[clinic end generated code: output=7fd9f46cf9263cbb input=4384b368407375c6]*/
42
41
{
43
- tokenizeriterobject * self = (tokenizeriterobject * )type -> tp_alloc (type , 0 );
42
+ tokenizeriterobject * self = (tokenizeriterobject * )type -> tp_alloc (type , 0 );
44
43
if (self == NULL ) {
45
44
return NULL ;
46
45
}
47
- PyObject * filename = PyUnicode_FromString ("<string>" );
46
+ PyObject * filename = PyUnicode_FromString ("<string>" );
48
47
if (filename == NULL ) {
49
48
return NULL ;
50
49
}
51
50
self -> tok = PyTokenizer_FromUTF8 (source , 1 );
52
51
if (self -> tok == NULL ) {
52
+ Py_DECREF (filename );
53
53
return NULL ;
54
54
}
55
55
self -> tok -> filename = filename ;
56
- return (PyObject * )self ;
56
+ return (PyObject * )self ;
57
57
}
58
58
59
- static PyObject *
60
- tokenizeriter_next (tokenizeriterobject * it )
59
+ static PyObject *
60
+ tokenizeriter_next (tokenizeriterobject * it )
61
61
{
62
- const char * start ;
63
- const char * end ;
62
+ const char * start ;
63
+ const char * end ;
64
64
int type = PyTokenizer_Get (it -> tok , & start , & end );
65
65
if (type == ERRORTOKEN && PyErr_Occurred ()) {
66
66
return NULL ;
@@ -69,23 +69,24 @@ tokenizeriter_next(tokenizeriterobject* it)
69
69
PyErr_SetString (PyExc_StopIteration , "EOF" );
70
70
return NULL ;
71
71
}
72
- PyObject * str = NULL ;
72
+ PyObject * str = NULL ;
73
73
if (start == NULL || end == NULL ) {
74
74
str = PyUnicode_FromString ("" );
75
- } else {
75
+ }
76
+ else {
76
77
str = PyUnicode_FromStringAndSize (start , end - start );
77
78
}
78
79
if (str == NULL ) {
79
80
return NULL ;
80
81
}
81
82
82
83
Py_ssize_t size = it -> tok -> inp - it -> tok -> buf ;
83
- PyObject * line = PyUnicode_DecodeUTF8 (it -> tok -> buf , size , "replace" );
84
+ PyObject * line = PyUnicode_DecodeUTF8 (it -> tok -> buf , size , "replace" );
84
85
if (line == NULL ) {
85
86
Py_DECREF (str );
86
87
return NULL ;
87
88
}
88
- const char * line_start = type == STRING ? it -> tok -> multi_line_start : it -> tok -> line_start ;
89
+ const char * line_start = type == STRING ? it -> tok -> multi_line_start : it -> tok -> line_start ;
89
90
int lineno = type == STRING ? it -> tok -> first_lineno : it -> tok -> lineno ;
90
91
int end_lineno = it -> tok -> lineno ;
91
92
int col_offset = -1 ;
@@ -101,41 +102,39 @@ tokenizeriter_next(tokenizeriterobject* it)
101
102
}
102
103
103
104
static void
104
- tokenizeriter_dealloc (tokenizeriterobject * it )
105
+ tokenizeriter_dealloc (tokenizeriterobject * it )
105
106
{
106
- PyTypeObject * tp = Py_TYPE (it );
107
+ PyTypeObject * tp = Py_TYPE (it );
107
108
PyTokenizer_Free (it -> tok );
108
109
tp -> tp_free (it );
109
110
Py_DECREF (tp );
110
111
}
111
112
112
113
static PyType_Slot tokenizeriter_slots [] = {
113
- {Py_tp_new , tokenizeriter_new },
114
- {Py_tp_dealloc , tokenizeriter_dealloc },
115
- {Py_tp_getattro , PyObject_GenericGetAttr },
116
- {Py_tp_iter , PyObject_SelfIter },
117
- {Py_tp_iternext , tokenizeriter_next },
118
- {0 , NULL },
114
+ {Py_tp_new , tokenizeriter_new },
115
+ {Py_tp_dealloc , tokenizeriter_dealloc },
116
+ {Py_tp_getattro , PyObject_GenericGetAttr },
117
+ {Py_tp_iter , PyObject_SelfIter },
118
+ {Py_tp_iternext , tokenizeriter_next },
119
+ {0 , NULL },
119
120
};
120
121
121
122
static PyType_Spec tokenizeriter_spec = {
122
- .name = "_tokenize.TokenizerIter" ,
123
- .basicsize = sizeof (tokenizeriterobject ),
124
- .flags = (Py_TPFLAGS_DEFAULT | Py_TPFLAGS_IMMUTABLETYPE ),
125
- .slots = tokenizeriter_slots ,
123
+ .name = "_tokenize.TokenizerIter" ,
124
+ .basicsize = sizeof (tokenizeriterobject ),
125
+ .flags = (Py_TPFLAGS_DEFAULT | Py_TPFLAGS_IMMUTABLETYPE ),
126
+ .slots = tokenizeriter_slots ,
126
127
};
127
128
128
-
129
129
static int
130
- tokenizemodule_exec (PyObject * m )
130
+ tokenizemodule_exec (PyObject * m )
131
131
{
132
- tokenize_state * state = get_tokenize_state (m );
132
+ tokenize_state * state = get_tokenize_state (m );
133
133
if (state == NULL ) {
134
134
return -1 ;
135
135
}
136
136
137
- state -> TokenizerIter = (PyTypeObject * )PyType_FromModuleAndSpec (
138
- m , & tokenizeriter_spec , NULL );
137
+ state -> TokenizerIter = (PyTypeObject * )PyType_FromModuleAndSpec (m , & tokenizeriter_spec , NULL );
139
138
if (state -> TokenizerIter == NULL ) {
140
139
return -1 ;
141
140
}
@@ -147,11 +146,11 @@ tokenizemodule_exec(PyObject* m)
147
146
}
148
147
149
148
static PyMethodDef tokenize_methods [] = {
150
- {NULL , NULL , 0 , NULL } /* Sentinel */
149
+ {NULL , NULL , 0 , NULL } /* Sentinel */
151
150
};
152
151
153
152
static PyModuleDef_Slot tokenizemodule_slots [] = {
154
- {Py_mod_exec , tokenizemodule_exec },
153
+ {Py_mod_exec , tokenizemodule_exec },
155
154
{0 , NULL }
156
155
};
157
156
@@ -178,14 +177,14 @@ tokenizemodule_free(void *m)
178
177
}
179
178
180
179
static struct PyModuleDef _tokenizemodule = {
181
- PyModuleDef_HEAD_INIT ,
182
- .m_name = "_tokenize" ,
183
- .m_size = sizeof (tokenize_state ),
184
- .m_slots = tokenizemodule_slots ,
185
- .m_methods = tokenize_methods ,
186
- .m_traverse = tokenizemodule_traverse ,
187
- .m_clear = tokenizemodule_clear ,
188
- .m_free = tokenizemodule_free ,
180
+ PyModuleDef_HEAD_INIT ,
181
+ .m_name = "_tokenize" ,
182
+ .m_size = sizeof (tokenize_state ),
183
+ .m_slots = tokenizemodule_slots ,
184
+ .m_methods = tokenize_methods ,
185
+ .m_traverse = tokenizemodule_traverse ,
186
+ .m_clear = tokenizemodule_clear ,
187
+ .m_free = tokenizemodule_free ,
189
188
};
190
189
191
190
PyMODINIT_FUNC
0 commit comments