Skip to content

Commit 63a462f

Browse files
committed
Switch to TryFrom
1 parent dc21510 commit 63a462f

File tree

13 files changed

+63
-53
lines changed

13 files changed

+63
-53
lines changed

crates/ra_assists/src/handlers/add_custom_impl.rs

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -60,7 +60,6 @@ pub(crate) fn add_custom_impl(ctx: AssistCtx) -> Option<Assist> {
6060
.collect::<Vec<SmolStr>>();
6161
let has_more_derives = !new_attr_input.is_empty();
6262
let new_attr_input = new_attr_input.iter().sep_by(", ").surround_with("(", ")").to_string();
63-
let new_attr_input_len = new_attr_input.len();
6463

6564
let mut buf = String::new();
6665
buf.push_str("\n\nimpl ");
@@ -70,8 +69,9 @@ pub(crate) fn add_custom_impl(ctx: AssistCtx) -> Option<Assist> {
7069
buf.push_str(" {\n");
7170

7271
let cursor_delta = if has_more_derives {
72+
let delta = input.syntax().text_range().len() - TextSize::of(&new_attr_input);
7373
edit.replace(input.syntax().text_range(), new_attr_input);
74-
input.syntax().text_range().len() - TextSize::from_usize(new_attr_input_len)
74+
delta
7575
} else {
7676
let attr_range = attr.syntax().text_range();
7777
edit.delete(attr_range);

crates/ra_assists/src/handlers/add_function.rs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -129,7 +129,7 @@ impl FunctionBuilder {
129129
let fn_def = indent_once.increase_indent(fn_def);
130130
let fn_def = ast::make::add_trailing_newlines(1, fn_def);
131131
let fn_def = indent.increase_indent(fn_def);
132-
(fn_def, it.syntax().text_range().start() + TextSize::from_usize(1))
132+
(fn_def, it.syntax().text_range().start() + TextSize::of('{'))
133133
}
134134
};
135135

crates/ra_assists/src/handlers/add_new.rs

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -77,13 +77,13 @@ pub(crate) fn add_new(ctx: AssistCtx) -> Option<Assist> {
7777
.text_range()
7878
.end();
7979

80-
Some((start, TextSize::from_usize(1)))
80+
Some((start, TextSize::of("\n")))
8181
})
8282
.unwrap_or_else(|| {
8383
buf = generate_impl_text(&strukt, &buf);
8484
let start = strukt.syntax().text_range().end();
8585

86-
(start, TextSize::from_usize(3))
86+
(start, TextSize::of("\n}\n"))
8787
});
8888

8989
edit.set_cursor(start_offset + TextSize::of(&buf) - end_offset);

crates/ra_assists/src/handlers/merge_match_arms.rs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -89,7 +89,7 @@ pub(crate) fn merge_match_arms(ctx: AssistCtx) -> Option<Assist> {
8989

9090
edit.target(current_text_range);
9191
edit.set_cursor(match cursor_pos {
92-
CursorPos::InExpr(back_offset) => start + TextSize::from_usize(arm.len()) - back_offset,
92+
CursorPos::InExpr(back_offset) => start + TextSize::of(&arm) - back_offset,
9393
CursorPos::InPat(offset) => offset,
9494
});
9595
edit.replace(TextRange::new(start, end), arm);

crates/ra_ide_db/src/line_index.rs

Lines changed: 11 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -1,8 +1,9 @@
11
//! `LineIndex` maps flat `TextSize` offsets into `(Line, Column)`
22
//! representation.
3+
use std::iter;
4+
35
use ra_syntax::{TextRange, TextSize};
46
use rustc_hash::FxHashMap;
5-
use std::iter;
67
use superslice::Ext;
78

89
#[derive(Clone, Debug, PartialEq, Eq)]
@@ -116,12 +117,11 @@ impl LineIndex {
116117
res
117118
}
118119

119-
fn utf16_to_utf8_col(&self, line: u32, col: u32) -> TextSize {
120-
let mut col: TextSize = col.into();
120+
fn utf16_to_utf8_col(&self, line: u32, mut col: u32) -> TextSize {
121121
if let Some(utf16_chars) = self.utf16_lines.get(&line) {
122122
for c in utf16_chars {
123-
if col >= c.start {
124-
col += c.len() - TextSize::from_usize(1);
123+
if col >= u32::from(c.start) {
124+
col += u32::from(c.len()) - 1;
125125
} else {
126126
// From here on, all utf16 characters come *after* the character we are mapping,
127127
// so we don't need to take them into account
@@ -130,12 +130,12 @@ impl LineIndex {
130130
}
131131
}
132132

133-
col
133+
col.into()
134134
}
135135
}
136136

137137
#[cfg(test)]
138-
mod test_line_index {
138+
mod tests {
139139
use super::*;
140140

141141
#[test]
@@ -224,12 +224,12 @@ const C: char = \"メ メ\";
224224
assert!(col_index.utf8_to_utf16_col(2, 15.into()) == 15);
225225

226226
// UTF-16 to UTF-8
227-
assert_eq!(col_index.utf16_to_utf8_col(1, 15), TextSize::from_usize(15));
227+
assert_eq!(col_index.utf16_to_utf8_col(1, 15), TextSize::from(15));
228228

229-
assert_eq!(col_index.utf16_to_utf8_col(1, 18), TextSize::from_usize(20));
230-
assert_eq!(col_index.utf16_to_utf8_col(1, 19), TextSize::from_usize(23));
229+
assert_eq!(col_index.utf16_to_utf8_col(1, 18), TextSize::from(20));
230+
assert_eq!(col_index.utf16_to_utf8_col(1, 19), TextSize::from(23));
231231

232-
assert_eq!(col_index.utf16_to_utf8_col(2, 15), TextSize::from_usize(15));
232+
assert_eq!(col_index.utf16_to_utf8_col(2, 15), TextSize::from(15));
233233
}
234234

235235
#[test]

crates/ra_ide_db/src/line_index_utils.rs

Lines changed: 7 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -7,6 +7,8 @@
77
//! Code in this module applies this "to (Line, Column) after edit"
88
//! transformation.
99
10+
use std::convert::TryInto;
11+
1012
use ra_syntax::{TextRange, TextSize};
1113
use ra_text_edit::{AtomTextEdit, TextEdit};
1214

@@ -139,14 +141,15 @@ impl Iterator for OffsetStepIter<'_> {
139141
.text
140142
.char_indices()
141143
.filter_map(|(i, c)| {
144+
let i: TextSize = i.try_into().unwrap();
145+
let char_len = TextSize::of(c);
142146
if c == '\n' {
143-
let next_offset = self.offset + TextSize::from_usize(i + 1);
147+
let next_offset = self.offset + i + char_len;
144148
let next = Step::Newline(next_offset);
145149
Some((next, next_offset))
146150
} else {
147-
let char_len = TextSize::of(c);
148-
if char_len > TextSize::from_usize(1) {
149-
let start = self.offset + TextSize::from_usize(i);
151+
if !c.is_ascii() {
152+
let start = self.offset + i;
150153
let end = start + char_len;
151154
let next = Step::Utf16Char(TextRange::new(start, end));
152155
let next_offset = end;

crates/ra_ide_db/src/search.rs

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -4,7 +4,7 @@
44
//! get a super-set of matches. Then, we we confirm each match using precise
55
//! name resolution.
66
7-
use std::mem;
7+
use std::{convert::TryInto, mem};
88

99
use hir::{DefWithBody, HasSource, Module, ModuleSource, Semantics, Visibility};
1010
use once_cell::unsync::Lazy;
@@ -207,7 +207,7 @@ impl Definition {
207207
let tree = Lazy::new(|| sema.parse(file_id).syntax().clone());
208208

209209
for (idx, _) in text.match_indices(pat) {
210-
let offset = TextSize::from_usize(idx);
210+
let offset: TextSize = idx.try_into().unwrap();
211211
if !search_range.contains_inclusive(offset) {
212212
tested_by!(search_filters_by_range; force);
213213
continue;

crates/ra_mbe/src/syntax_bridge.rs

Lines changed: 6 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -516,7 +516,7 @@ impl TokenConvertor for Convertor {
516516
fn bump(&mut self) -> Option<(Self::Token, TextRange)> {
517517
if let Some((punct, offset)) = self.punct_offset.clone() {
518518
if usize::from(offset) + 1 < punct.text().len() {
519-
let offset = offset + TextSize::from_usize(1);
519+
let offset = offset + TextSize::of('.');
520520
let range = punct.text_range();
521521
self.punct_offset = Some((punct.clone(), offset));
522522
let range = TextRange::at(range.start() + offset, TextSize::of('.'));
@@ -532,9 +532,9 @@ impl TokenConvertor for Convertor {
532532

533533
let token = if curr.kind().is_punct() {
534534
let range = curr.text_range();
535-
let range = TextRange::at(range.start(), TextSize::from_usize(1));
536-
self.punct_offset = Some((curr.clone(), TextSize::from_usize(0)));
537-
(SynToken::Punch(curr, TextSize::from_usize(0)), range)
535+
let range = TextRange::at(range.start(), TextSize::of('.'));
536+
self.punct_offset = Some((curr.clone(), 0.into()));
537+
(SynToken::Punch(curr, 0.into()), range)
538538
} else {
539539
self.punct_offset = None;
540540
let range = curr.text_range();
@@ -546,7 +546,7 @@ impl TokenConvertor for Convertor {
546546

547547
fn peek(&self) -> Option<Self::Token> {
548548
if let Some((punct, mut offset)) = self.punct_offset.clone() {
549-
offset = offset + TextSize::from_usize(1);
549+
offset = offset + TextSize::of('.');
550550
if usize::from(offset) < punct.text().len() {
551551
return Some(SynToken::Punch(punct, offset));
552552
}
@@ -558,7 +558,7 @@ impl TokenConvertor for Convertor {
558558
}
559559

560560
let token = if curr.kind().is_punct() {
561-
SynToken::Punch(curr, TextSize::from_usize(0))
561+
SynToken::Punch(curr, 0.into())
562562
} else {
563563
SynToken::Ordiniary(curr)
564564
};

crates/ra_syntax/src/ast/tokens.rs

Lines changed: 6 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,7 @@
11
//! There are many AstNodes, but only a few tokens, so we hand-write them here.
22
3+
use std::convert::{TryFrom, TryInto};
4+
35
use crate::{
46
ast::{AstToken, Comment, RawString, String, Whitespace},
57
TextRange, TextSize,
@@ -95,8 +97,8 @@ impl QuoteOffsets {
9597
}
9698

9799
let start = TextSize::from(0);
98-
let left_quote = TextSize::from_usize(left_quote) + TextSize::of('"');
99-
let right_quote = TextSize::from_usize(right_quote);
100+
let left_quote = TextSize::try_from(left_quote).unwrap() + TextSize::of('"');
101+
let right_quote = TextSize::try_from(right_quote).unwrap();
100102
let end = TextSize::of(literal);
101103

102104
let res = QuoteOffsets {
@@ -498,7 +500,7 @@ impl HasFormatSpecifier for String {
498500
let mut res = Vec::with_capacity(text.len());
499501
rustc_lexer::unescape::unescape_str(text, &mut |range, unescaped_char| {
500502
res.push((
501-
TextRange::new(TextSize::from_usize(range.start), TextSize::from_usize(range.end))
503+
TextRange::new(range.start.try_into().unwrap(), range.end.try_into().unwrap())
502504
+ offset,
503505
unescaped_char,
504506
))
@@ -518,11 +520,7 @@ impl HasFormatSpecifier for RawString {
518520

519521
let mut res = Vec::with_capacity(text.len());
520522
for (idx, c) in text.char_indices() {
521-
res.push((
522-
TextRange::new(TextSize::from_usize(idx), TextSize::from_usize(idx + c.len_utf8()))
523-
+ offset,
524-
Ok(c),
525-
));
523+
res.push((TextRange::at(idx.try_into().unwrap(), TextSize::of(c)) + offset, Ok(c)));
526524
}
527525
Some(res)
528526
}

crates/ra_syntax/src/fuzz.rs

Lines changed: 8 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -1,8 +1,13 @@
11
//! FIXME: write short doc here
22
3-
use crate::{validation, AstNode, SourceFile, TextRange, TextSize};
3+
use std::{
4+
convert::TryInto,
5+
str::{self, FromStr},
6+
};
7+
48
use ra_text_edit::AtomTextEdit;
5-
use std::str::{self, FromStr};
9+
10+
use crate::{validation, AstNode, SourceFile, TextRange};
611

712
fn check_file_invariants(file: &SourceFile) {
813
let root = file.syntax();
@@ -35,7 +40,7 @@ impl CheckReparse {
3540
let text = format!("{}{}{}", PREFIX, text, SUFFIX);
3641
text.get(delete_start..delete_start.checked_add(delete_len)?)?; // make sure delete is a valid range
3742
let delete =
38-
TextRange::at(TextSize::from_usize(delete_start), TextSize::from_usize(delete_len));
43+
TextRange::at(delete_start.try_into().unwrap(), delete_len.try_into().unwrap());
3944
let edited_text =
4045
format!("{}{}{}", &text[..delete_start], &insert, &text[delete_start + delete_len..]);
4146
let edit = AtomTextEdit { delete, insert };

crates/ra_syntax/src/parsing/lexer.rs

Lines changed: 13 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,8 @@
11
//! Lexer analyzes raw input string and produces lexemes (tokens).
22
//! It is just a bridge to `rustc_lexer`.
33
4+
use std::convert::TryInto;
5+
46
use crate::{
57
SyntaxError,
68
SyntaxKind::{self, *},
@@ -28,18 +30,19 @@ pub fn tokenize(text: &str) -> (Vec<Token>, Vec<SyntaxError>) {
2830
let mut tokens = Vec::new();
2931
let mut errors = Vec::new();
3032

31-
let mut offset: usize = rustc_lexer::strip_shebang(text)
32-
.map(|shebang_len| {
33-
tokens.push(Token { kind: SHEBANG, len: TextSize::from_usize(shebang_len) });
33+
let mut offset = match rustc_lexer::strip_shebang(text) {
34+
Some(shebang_len) => {
35+
tokens.push(Token { kind: SHEBANG, len: shebang_len.try_into().unwrap() });
3436
shebang_len
35-
})
36-
.unwrap_or(0);
37+
}
38+
None => 0,
39+
};
3740

3841
let text_without_shebang = &text[offset..];
3942

4043
for rustc_token in rustc_lexer::tokenize(text_without_shebang) {
41-
let token_len = TextSize::from_usize(rustc_token.len);
42-
let token_range = TextRange::at(TextSize::from_usize(offset), token_len);
44+
let token_len: TextSize = rustc_token.len.try_into().unwrap();
45+
let token_range = TextRange::at(offset.try_into().unwrap(), token_len);
4346

4447
let (syntax_kind, err_message) =
4548
rustc_token_kind_to_syntax_kind(&rustc_token.kind, &text[token_range]);
@@ -96,10 +99,9 @@ fn lex_first_token(text: &str) -> Option<(Token, Option<SyntaxError>)> {
9699
let rustc_token = rustc_lexer::first_token(text);
97100
let (syntax_kind, err_message) = rustc_token_kind_to_syntax_kind(&rustc_token.kind, text);
98101

99-
let token = Token { kind: syntax_kind, len: TextSize::from_usize(rustc_token.len) };
100-
let optional_error = err_message.map(|err_message| {
101-
SyntaxError::new(err_message, TextRange::new(0.into(), TextSize::of(text)))
102-
});
102+
let token = Token { kind: syntax_kind, len: rustc_token.len.try_into().unwrap() };
103+
let optional_error = err_message
104+
.map(|err_message| SyntaxError::new(err_message, TextRange::up_to(TextSize::of(text))));
103105

104106
Some((token, optional_error))
105107
}

crates/ra_syntax/src/tests.rs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -121,7 +121,7 @@ fn assert_errors_are_absent(errors: &[SyntaxError], path: &Path) {
121121

122122
fn dump_tokens_and_errors(tokens: &[Token], errors: &[SyntaxError], text: &str) -> String {
123123
let mut acc = String::new();
124-
let mut offset = TextSize::from_usize(0);
124+
let mut offset: TextSize = 0.into();
125125
for token in tokens {
126126
let token_len = token.len;
127127
let token_text = &text[TextRange::at(offset, token.len)];

crates/ra_syntax/src/validation.rs

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -2,6 +2,8 @@
22
33
mod block;
44

5+
use std::convert::TryFrom;
6+
57
use rustc_lexer::unescape;
68

79
use crate::{
@@ -112,7 +114,7 @@ fn validate_literal(literal: ast::Literal, acc: &mut Vec<SyntaxError>) {
112114

113115
// FIXME: lift this lambda refactor to `fn` (https://github.com/rust-analyzer/rust-analyzer/pull/2834#discussion_r366199205)
114116
let mut push_err = |prefix_len, (off, err): (usize, unescape::EscapeError)| {
115-
let off = token.text_range().start() + TextSize::from_usize(off + prefix_len);
117+
let off = token.text_range().start() + TextSize::try_from(off + prefix_len).unwrap();
116118
acc.push(SyntaxError::new_at_offset(rustc_unescape_error_to_string(err), off));
117119
};
118120

0 commit comments

Comments
 (0)