Skip to content

Commit e4fca4e

Browse files
committed
Fix tracking of token position
The line and offset was being dropped when entering into some sub-tokenizers.
1 parent baef3a4 commit e4fca4e

File tree

5 files changed

+34
-25
lines changed

5 files changed

+34
-25
lines changed

__tests__/__snapshots__/interpolant.js.snap

Lines changed: 9 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -39,15 +39,15 @@ Array [
3939
"number",
4040
"10.00",
4141
2,
42-
10,
42+
3,
4343
2,
44-
14,
44+
7,
4545
],
4646
Array [
4747
"endInterpolant",
4848
"}",
4949
2,
50-
15,
50+
8,
5151
],
5252
Array [
5353
"newline",
@@ -66,15 +66,15 @@ Array [
6666
"number",
6767
".100",
6868
3,
69-
19,
7069
3,
71-
22,
70+
3,
71+
6,
7272
],
7373
Array [
7474
"endInterpolant",
7575
"}",
7676
3,
77-
23,
77+
7,
7878
],
7979
Array [
8080
"newline",
@@ -125,15 +125,15 @@ Array [
125125
"number",
126126
"123",
127127
2,
128-
8,
128+
3,
129129
2,
130-
10,
130+
5,
131131
],
132132
Array [
133133
"endInterpolant",
134134
"}",
135135
2,
136-
11,
136+
6,
137137
],
138138
Array [
139139
"newline",

src/tokenize-comment.js

Lines changed: 5 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -15,7 +15,7 @@ let newline = '\n'.charCodeAt(0),
1515
asterisk = '*'.charCodeAt(0),
1616
wordEnd = /[ \n\t\r\(\)\{\},:;@!'"\\]|\*(?=\/)|#(?={)/g;
1717

18-
export default function tokenize(input, l, p) {
18+
export default function tokenize(input, l, p, o) {
1919
let tokens = [];
2020
let css = input.css.valueOf();
2121

@@ -24,7 +24,7 @@ export default function tokenize(input, l, p) {
2424
inInterpolant, inComment, inString;
2525

2626
let length = css.length;
27-
let offset = -1;
27+
let offset = o || -1;
2828
let line = l || 1;
2929
let pos = p || 0;
3030

@@ -102,9 +102,11 @@ export default function tokenize(input, l, p) {
102102
tokens.push(['startInterpolant', '#{', line, pos + 1 - offset]);
103103
next = pos + 1;
104104

105-
let { tokens: t, pos: p } = tokenizeInterpolant(input, line, next + 1);
105+
let { tokens: t, line: l, pos: p, offset: o } = tokenizeInterpolant(input, line, next + 1, offset);
106106
tokens = tokens.concat(t);
107107
next = p;
108+
line = l;
109+
offset = o;
108110

109111
pos = next;
110112
break;

src/tokenize-interpolant.js

Lines changed: 7 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -33,7 +33,7 @@ let singleQuote = "'".charCodeAt(0),
3333
wordEnd = /[ \n\t\r\(\)\{\},:;@!'"\\]|\/(?=\*)|#(?={)/g,
3434
ident = /-?([a-z_]|\\[^\\])([a-z-_0-9]|\\[^\\])*/gi;
3535

36-
export default function tokenize(input, l, p) {
36+
export default function tokenize(input, l, p, o) {
3737
let tokens = [];
3838
let css = input.css.valueOf();
3939

@@ -42,7 +42,7 @@ export default function tokenize(input, l, p) {
4242
inInterpolant, inComment, inString;
4343

4444
let length = css.length;
45-
let offset = -1;
45+
let offset = o || -1;
4646
let line = l || 1;
4747
let pos = p || 0;
4848

@@ -135,9 +135,11 @@ export default function tokenize(input, l, p) {
135135
tokens.push([quote, quote, line, pos - offset]);
136136
next = pos + 1;
137137

138-
let { tokens: t, pos: p } = tokenizeString(input, line, next, quote);
138+
let { tokens: t, line: l, pos: p, offset: o } = tokenizeString(input, line, next, offset, quote);
139139
tokens = tokens.concat(t);
140140
next = p;
141+
line = l;
142+
offset = o;
141143

142144
pos = next;
143145
break;
@@ -178,7 +180,7 @@ export default function tokenize(input, l, p) {
178180
tokens.push(['startComment', '/*', line, pos + 1 - offset]);
179181
next = pos + 1;
180182

181-
let { tokens: t, line: l, pos: p, offset: o } = tokenizeComment(input, line, next + 1);
183+
let { tokens: t, line: l, pos: p, offset: o } = tokenizeComment(input, line, next + 1, offset);
182184
tokens = tokens.concat(t);
183185
next = p;
184186
line = l;
@@ -277,5 +279,5 @@ export default function tokenize(input, l, p) {
277279
pos++;
278280
}
279281

280-
return { tokens, pos };
282+
return { tokens, line, pos, offset };
281283
}

src/tokenize-string.js

Lines changed: 6 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -18,7 +18,7 @@ let singleQuote = "'".charCodeAt(0),
1818
sQuoteEnd = /([.\s]*?)[^\\](?=((#{)|'))/gm,
1919
dQuoteEnd = /([.\s]*?)[^\\](?=((#{)|"))/gm;
2020

21-
export default function tokenize(input, l, p, quote) {
21+
export default function tokenize(input, l, p, o, quote) {
2222
let tokens = [];
2323
let css = input.css.valueOf();
2424

@@ -27,7 +27,7 @@ export default function tokenize(input, l, p, quote) {
2727
inInterpolant, inComment, inString;
2828

2929
let length = css.length;
30-
let offset = -1;
30+
let offset = o || -1;
3131
let line = l || 1;
3232
let pos = p || 0;
3333

@@ -81,12 +81,13 @@ export default function tokenize(input, l, p, quote) {
8181
tokens.push(['startInterpolant', '#{', line, pos + 1 - offset]);
8282
next = pos + 1;
8383

84-
let { tokens: t, pos: p } = tokenizeInterpolant(input, line, next + 1);
84+
let { tokens: t, line: l, pos: p, offset: o } = tokenizeInterpolant(input, line, next + 1, offset);
8585
tokens = tokens.concat(t);
8686
next = p;
87+
line = l;
88+
offset = o;
8789

8890
pos = next;
89-
9091
} else {
9192
quoteEnd.lastIndex = pos;
9293
quoteEnd.test(css);
@@ -111,5 +112,5 @@ export default function tokenize(input, l, p, quote) {
111112
pos++;
112113
}
113114

114-
return { tokens, pos };
115+
return { tokens, line, pos, offset };
115116
}

src/tokenize.js

Lines changed: 7 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -139,9 +139,11 @@ export default function tokenize(input, l, p) {
139139
tokens.push([quote, quote, line, pos - offset]);
140140
next = pos + 1;
141141

142-
let { tokens: t, pos: p } = tokenizeString(input, line, next, quote);
142+
let { tokens: t, line: l, pos: p, offset: o } = tokenizeString(input, line, next, offset, quote);
143143
tokens = tokens.concat(t);
144144
next = p;
145+
line = l;
146+
offset = o;
145147

146148
pos = next;
147149
break;
@@ -182,7 +184,7 @@ export default function tokenize(input, l, p) {
182184
tokens.push(['startComment', '/*', line, pos + 1 - offset]);
183185
next = pos + 1;
184186

185-
let { tokens: t, line: l, pos: p, offset: o } = tokenizeComment(input, line, next + 1);
187+
let { tokens: t, line: l, pos: p, offset: o } = tokenizeComment(input, line, next + 1, offset);
186188
tokens = tokens.concat(t);
187189
next = p;
188190
line = l;
@@ -214,9 +216,11 @@ export default function tokenize(input, l, p) {
214216
tokens.push(['startInterpolant', '#{', line, pos + 1 - offset]);
215217
next = pos + 1;
216218

217-
let { tokens: t, pos: p } = tokenizeInterpolant(input, line, next + 1);
219+
let { tokens: t, line: l, pos: p, offset: o } = tokenizeInterpolant(input, line, next + 1, offset);
218220
tokens = tokens.concat(t);
219221
next = p;
222+
line = l;
223+
offset = o;
220224

221225
pos = next;
222226
break;

0 commit comments

Comments
 (0)