Skip to content

Commit b87a23b

Browse files
committed
Rename convertor -> converter
1 parent 66900a7 commit b87a23b

File tree

1 file changed

+22
-22
lines changed

1 file changed

+22
-22
lines changed

crates/mbe/src/syntax_bridge.rs

Lines changed: 22 additions & 22 deletions
Original file line numberDiff line numberDiff line change
@@ -35,7 +35,7 @@ pub fn syntax_node_to_token_tree_with_modifications(
3535
append: FxHashMap<SyntaxElement, Vec<SyntheticToken>>,
3636
) -> (tt::Subtree, TokenMap, u32) {
3737
let global_offset = node.text_range().start();
38-
let mut c = Convertor::new(node, global_offset, existing_token_map, next_id, replace, append);
38+
let mut c = Converter::new(node, global_offset, existing_token_map, next_id, replace, append);
3939
let subtree = convert_tokens(&mut c);
4040
c.id_alloc.map.shrink_to_fit();
4141
always!(c.replace.is_empty(), "replace: {:?}", c.replace);
@@ -100,7 +100,7 @@ pub fn parse_to_token_tree(text: &str) -> Option<(tt::Subtree, TokenMap)> {
100100
return None;
101101
}
102102

103-
let mut conv = RawConvertor {
103+
let mut conv = RawConverter {
104104
lexed,
105105
pos: 0,
106106
id_alloc: TokenIdAlloc {
@@ -148,7 +148,7 @@ pub fn parse_exprs_with_sep(tt: &tt::Subtree, sep: char) -> Vec<tt::Subtree> {
148148
res
149149
}
150150

151-
fn convert_tokens<C: TokenConvertor>(conv: &mut C) -> tt::Subtree {
151+
fn convert_tokens<C: TokenConverter>(conv: &mut C) -> tt::Subtree {
152152
struct StackEntry {
153153
subtree: tt::Subtree,
154154
idx: usize,
@@ -425,8 +425,8 @@ impl TokenIdAlloc {
425425
}
426426
}
427427

428-
/// A raw token (straight from lexer) convertor
429-
struct RawConvertor<'a> {
428+
/// A raw token (straight from lexer) converter
429+
struct RawConverter<'a> {
430430
lexed: parser::LexedStr<'a>,
431431
pos: usize,
432432
id_alloc: TokenIdAlloc,
@@ -442,7 +442,7 @@ trait SrcToken<Ctx>: std::fmt::Debug {
442442
fn synthetic_id(&self, ctx: &Ctx) -> Option<SyntheticTokenId>;
443443
}
444444

445-
trait TokenConvertor: Sized {
445+
trait TokenConverter: Sized {
446446
type Token: SrcToken<Self>;
447447

448448
fn convert_doc_comment(&self, token: &Self::Token) -> Option<Vec<tt::TokenTree>>;
@@ -454,25 +454,25 @@ trait TokenConvertor: Sized {
454454
fn id_alloc(&mut self) -> &mut TokenIdAlloc;
455455
}
456456

457-
impl<'a> SrcToken<RawConvertor<'a>> for usize {
458-
fn kind(&self, ctx: &RawConvertor<'a>) -> SyntaxKind {
457+
impl<'a> SrcToken<RawConverter<'a>> for usize {
458+
fn kind(&self, ctx: &RawConverter<'a>) -> SyntaxKind {
459459
ctx.lexed.kind(*self)
460460
}
461461

462-
fn to_char(&self, ctx: &RawConvertor<'a>) -> Option<char> {
462+
fn to_char(&self, ctx: &RawConverter<'a>) -> Option<char> {
463463
ctx.lexed.text(*self).chars().next()
464464
}
465465

466-
fn to_text(&self, ctx: &RawConvertor<'_>) -> SmolStr {
466+
fn to_text(&self, ctx: &RawConverter<'_>) -> SmolStr {
467467
ctx.lexed.text(*self).into()
468468
}
469469

470-
fn synthetic_id(&self, _ctx: &RawConvertor<'a>) -> Option<SyntheticTokenId> {
470+
fn synthetic_id(&self, _ctx: &RawConverter<'a>) -> Option<SyntheticTokenId> {
471471
None
472472
}
473473
}
474474

475-
impl<'a> TokenConvertor for RawConvertor<'a> {
475+
impl<'a> TokenConverter for RawConverter<'a> {
476476
type Token = usize;
477477

478478
fn convert_doc_comment(&self, &token: &usize) -> Option<Vec<tt::TokenTree>> {
@@ -504,7 +504,7 @@ impl<'a> TokenConvertor for RawConvertor<'a> {
504504
}
505505
}
506506

507-
struct Convertor {
507+
struct Converter {
508508
id_alloc: TokenIdAlloc,
509509
current: Option<SyntaxToken>,
510510
current_synthetic: Vec<SyntheticToken>,
@@ -515,19 +515,19 @@ struct Convertor {
515515
punct_offset: Option<(SyntaxToken, TextSize)>,
516516
}
517517

518-
impl Convertor {
518+
impl Converter {
519519
fn new(
520520
node: &SyntaxNode,
521521
global_offset: TextSize,
522522
existing_token_map: TokenMap,
523523
next_id: u32,
524524
mut replace: FxHashMap<SyntaxElement, Vec<SyntheticToken>>,
525525
mut append: FxHashMap<SyntaxElement, Vec<SyntheticToken>>,
526-
) -> Convertor {
526+
) -> Converter {
527527
let range = node.text_range();
528528
let mut preorder = node.preorder_with_tokens();
529529
let (first, synthetic) = Self::next_token(&mut preorder, &mut replace, &mut append);
530-
Convertor {
530+
Converter {
531531
id_alloc: { TokenIdAlloc { map: existing_token_map, global_offset, next_id } },
532532
current: first,
533533
current_synthetic: synthetic,
@@ -590,39 +590,39 @@ impl SynToken {
590590
}
591591
}
592592

593-
impl SrcToken<Convertor> for SynToken {
594-
fn kind(&self, _ctx: &Convertor) -> SyntaxKind {
593+
impl SrcToken<Converter> for SynToken {
594+
fn kind(&self, _ctx: &Converter) -> SyntaxKind {
595595
match self {
596596
SynToken::Ordinary(token) => token.kind(),
597597
SynToken::Punch(token, _) => token.kind(),
598598
SynToken::Synthetic(token) => token.kind,
599599
}
600600
}
601-
fn to_char(&self, _ctx: &Convertor) -> Option<char> {
601+
fn to_char(&self, _ctx: &Converter) -> Option<char> {
602602
match self {
603603
SynToken::Ordinary(_) => None,
604604
SynToken::Punch(it, i) => it.text().chars().nth((*i).into()),
605605
SynToken::Synthetic(token) if token.text.len() == 1 => token.text.chars().next(),
606606
SynToken::Synthetic(_) => None,
607607
}
608608
}
609-
fn to_text(&self, _ctx: &Convertor) -> SmolStr {
609+
fn to_text(&self, _ctx: &Converter) -> SmolStr {
610610
match self {
611611
SynToken::Ordinary(token) => token.text().into(),
612612
SynToken::Punch(token, _) => token.text().into(),
613613
SynToken::Synthetic(token) => token.text.clone(),
614614
}
615615
}
616616

617-
fn synthetic_id(&self, _ctx: &Convertor) -> Option<SyntheticTokenId> {
617+
fn synthetic_id(&self, _ctx: &Converter) -> Option<SyntheticTokenId> {
618618
match self {
619619
SynToken::Synthetic(token) => Some(token.id),
620620
_ => None,
621621
}
622622
}
623623
}
624624

625-
impl TokenConvertor for Convertor {
625+
impl TokenConverter for Converter {
626626
type Token = SynToken;
627627
fn convert_doc_comment(&self, token: &Self::Token) -> Option<Vec<tt::TokenTree>> {
628628
convert_doc_comment(token.token()?)

0 commit comments

Comments
 (0)