Skip to content

Commit 214c2e5

Browse files
authored
Format the Python-tokenize module and fix exit path (GH-27935)
1 parent 33d95c6 commit 214c2e5

File tree

1 file changed

+46
-47
lines changed

1 file changed

+46
-47
lines changed

Python/Python-tokenize.c

Lines changed: 46 additions & 47 deletions
Original file line numberDiff line numberDiff line change
@@ -4,16 +4,15 @@
44
static struct PyModuleDef _tokenizemodule;
55

66
typedef struct {
7-
PyTypeObject* TokenizerIter;
7+
PyTypeObject *TokenizerIter;
88
} tokenize_state;
99

10-
static tokenize_state*
11-
get_tokenize_state(PyObject* module)
12-
{
13-
return (tokenize_state*)PyModule_GetState(module);
10+
static tokenize_state *
11+
get_tokenize_state(PyObject *module) {
12+
return (tokenize_state *)PyModule_GetState(module);
1413
}
1514

16-
#define _tokenize_get_state_by_type(type) \
15+
#define _tokenize_get_state_by_type(type) \
1716
get_tokenize_state(_PyType_GetModuleByDef(type, &_tokenizemodule))
1817

1918
#include "clinic/Python-tokenize.c.h"
@@ -24,9 +23,9 @@ class _tokenizer.tokenizeriter "tokenizeriterobject *" "_tokenize_get_state_by_t
2423
[clinic start generated code]*/
2524
/*[clinic end generated code: output=da39a3ee5e6b4b0d input=96d98ee2fef7a8bc]*/
2625

27-
typedef struct {
28-
PyObject_HEAD
29-
struct tok_state* tok;
26+
typedef struct
27+
{
28+
PyObject_HEAD struct tok_state *tok;
3029
} tokenizeriterobject;
3130

3231
/*[clinic input]
@@ -40,27 +39,28 @@ static PyObject *
4039
tokenizeriter_new_impl(PyTypeObject *type, const char *source)
4140
/*[clinic end generated code: output=7fd9f46cf9263cbb input=4384b368407375c6]*/
4241
{
43-
tokenizeriterobject* self = (tokenizeriterobject*)type->tp_alloc(type, 0);
42+
tokenizeriterobject *self = (tokenizeriterobject *)type->tp_alloc(type, 0);
4443
if (self == NULL) {
4544
return NULL;
4645
}
47-
PyObject* filename = PyUnicode_FromString("<string>");
46+
PyObject *filename = PyUnicode_FromString("<string>");
4847
if (filename == NULL) {
4948
return NULL;
5049
}
5150
self->tok = PyTokenizer_FromUTF8(source, 1);
5251
if (self->tok == NULL) {
52+
Py_DECREF(filename);
5353
return NULL;
5454
}
5555
self->tok->filename = filename;
56-
return (PyObject*)self;
56+
return (PyObject *)self;
5757
}
5858

59-
static PyObject*
60-
tokenizeriter_next(tokenizeriterobject* it)
59+
static PyObject *
60+
tokenizeriter_next(tokenizeriterobject *it)
6161
{
62-
const char* start;
63-
const char* end;
62+
const char *start;
63+
const char *end;
6464
int type = PyTokenizer_Get(it->tok, &start, &end);
6565
if (type == ERRORTOKEN && PyErr_Occurred()) {
6666
return NULL;
@@ -69,23 +69,24 @@ tokenizeriter_next(tokenizeriterobject* it)
6969
PyErr_SetString(PyExc_StopIteration, "EOF");
7070
return NULL;
7171
}
72-
PyObject* str = NULL;
72+
PyObject *str = NULL;
7373
if (start == NULL || end == NULL) {
7474
str = PyUnicode_FromString("");
75-
} else {
75+
}
76+
else {
7677
str = PyUnicode_FromStringAndSize(start, end - start);
7778
}
7879
if (str == NULL) {
7980
return NULL;
8081
}
8182

8283
Py_ssize_t size = it->tok->inp - it->tok->buf;
83-
PyObject* line = PyUnicode_DecodeUTF8(it->tok->buf, size, "replace");
84+
PyObject *line = PyUnicode_DecodeUTF8(it->tok->buf, size, "replace");
8485
if (line == NULL) {
8586
Py_DECREF(str);
8687
return NULL;
8788
}
88-
const char* line_start = type == STRING ? it->tok->multi_line_start : it->tok->line_start;
89+
const char *line_start = type == STRING ? it->tok->multi_line_start : it->tok->line_start;
8990
int lineno = type == STRING ? it->tok->first_lineno : it->tok->lineno;
9091
int end_lineno = it->tok->lineno;
9192
int col_offset = -1;
@@ -101,41 +102,39 @@ tokenizeriter_next(tokenizeriterobject* it)
101102
}
102103

103104
static void
104-
tokenizeriter_dealloc(tokenizeriterobject* it)
105+
tokenizeriter_dealloc(tokenizeriterobject *it)
105106
{
106-
PyTypeObject* tp = Py_TYPE(it);
107+
PyTypeObject *tp = Py_TYPE(it);
107108
PyTokenizer_Free(it->tok);
108109
tp->tp_free(it);
109110
Py_DECREF(tp);
110111
}
111112

112113
static PyType_Slot tokenizeriter_slots[] = {
113-
{Py_tp_new, tokenizeriter_new},
114-
{Py_tp_dealloc, tokenizeriter_dealloc},
115-
{Py_tp_getattro, PyObject_GenericGetAttr},
116-
{Py_tp_iter, PyObject_SelfIter},
117-
{Py_tp_iternext, tokenizeriter_next},
118-
{0, NULL},
114+
{Py_tp_new, tokenizeriter_new},
115+
{Py_tp_dealloc, tokenizeriter_dealloc},
116+
{Py_tp_getattro, PyObject_GenericGetAttr},
117+
{Py_tp_iter, PyObject_SelfIter},
118+
{Py_tp_iternext, tokenizeriter_next},
119+
{0, NULL},
119120
};
120121

121122
static PyType_Spec tokenizeriter_spec = {
122-
.name = "_tokenize.TokenizerIter",
123-
.basicsize = sizeof(tokenizeriterobject),
124-
.flags = (Py_TPFLAGS_DEFAULT | Py_TPFLAGS_IMMUTABLETYPE),
125-
.slots = tokenizeriter_slots,
123+
.name = "_tokenize.TokenizerIter",
124+
.basicsize = sizeof(tokenizeriterobject),
125+
.flags = (Py_TPFLAGS_DEFAULT | Py_TPFLAGS_IMMUTABLETYPE),
126+
.slots = tokenizeriter_slots,
126127
};
127128

128-
129129
static int
130-
tokenizemodule_exec(PyObject* m)
130+
tokenizemodule_exec(PyObject *m)
131131
{
132-
tokenize_state* state = get_tokenize_state(m);
132+
tokenize_state *state = get_tokenize_state(m);
133133
if (state == NULL) {
134134
return -1;
135135
}
136136

137-
state->TokenizerIter = (PyTypeObject *)PyType_FromModuleAndSpec(
138-
m, &tokenizeriter_spec, NULL);
137+
state->TokenizerIter = (PyTypeObject *)PyType_FromModuleAndSpec(m, &tokenizeriter_spec, NULL);
139138
if (state->TokenizerIter == NULL) {
140139
return -1;
141140
}
@@ -147,11 +146,11 @@ tokenizemodule_exec(PyObject* m)
147146
}
148147

149148
static PyMethodDef tokenize_methods[] = {
150-
{NULL, NULL, 0, NULL} /* Sentinel */
149+
{NULL, NULL, 0, NULL} /* Sentinel */
151150
};
152151

153152
static PyModuleDef_Slot tokenizemodule_slots[] = {
154-
{Py_mod_exec, tokenizemodule_exec},
153+
{Py_mod_exec, tokenizemodule_exec},
155154
{0, NULL}
156155
};
157156

@@ -178,14 +177,14 @@ tokenizemodule_free(void *m)
178177
}
179178

180179
static struct PyModuleDef _tokenizemodule = {
181-
PyModuleDef_HEAD_INIT,
182-
.m_name = "_tokenize",
183-
.m_size = sizeof(tokenize_state),
184-
.m_slots = tokenizemodule_slots,
185-
.m_methods = tokenize_methods,
186-
.m_traverse = tokenizemodule_traverse,
187-
.m_clear = tokenizemodule_clear,
188-
.m_free = tokenizemodule_free,
180+
PyModuleDef_HEAD_INIT,
181+
.m_name = "_tokenize",
182+
.m_size = sizeof(tokenize_state),
183+
.m_slots = tokenizemodule_slots,
184+
.m_methods = tokenize_methods,
185+
.m_traverse = tokenizemodule_traverse,
186+
.m_clear = tokenizemodule_clear,
187+
.m_free = tokenizemodule_free,
189188
};
190189

191190
PyMODINIT_FUNC

0 commit comments

Comments
 (0)