Skip to content

Commit 2be98a2

Browse files
committed
fix(deps): upgrade sentence-splitter@3
1 parent 3dfb5f4 commit 2be98a2

File tree

3 files changed

+1440
-1553
lines changed

3 files changed

+1440
-1553
lines changed

package.json

Lines changed: 4 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -36,9 +36,10 @@
3636
"textlint-scripts": "^3.0.0"
3737
},
3838
"dependencies": {
39-
"kuromojin": "^2.0.0",
40-
"sentence-splitter": "^2.0.0",
39+
"kuromojin": "^2.1.1",
40+
"sentence-splitter": "^3.2.0",
4141
"structured-source": "^3.0.2",
42-
"textlint-rule-helper": "^2.0.0"
42+
"textlint-rule-helper": "^2.0.0",
43+
"textlint-util-to-string": "^3.1.1"
4344
}
4445
}

src/max-ten.js

Lines changed: 23 additions & 38 deletions
Original file line numberDiff line numberDiff line change
@@ -1,56 +1,42 @@
11
// LICENSE : MIT
22
"use strict";
3-
import {RuleHelper} from "textlint-rule-helper"
4-
import {getTokenizer} from "kuromojin";
5-
import {split as splitSentences} from "sentence-splitter";
6-
import Source from "structured-source";
3+
import { RuleHelper } from "textlint-rule-helper"
4+
import { getTokenizer } from "kuromojin";
5+
import { splitAST, Syntax as SentenceSyntax } from "sentence-splitter";
6+
import { StringSource } from "textlint-util-to-string";
7+
78
const defaultOptions = {
89
max: 3, // 1文に利用できる最大の、の数
910
strict: false // 例外ルールを適応するかどうか
1011
};
1112

1213
function isSandwichedMeishi({
13-
before,
14-
token,
15-
after
16-
}) {
14+
before,
15+
token,
16+
after
17+
}) {
1718
if (before === undefined || after === undefined || token === undefined) {
1819
return false;
1920
}
2021
return before.pos === "名詞" && after.pos === "名詞";
2122
}
22-
/**
23-
* add two positions.
24-
* note: line starts with 1, column starts with 0.
25-
* @param {Position} base
26-
* @param {Position} relative
27-
* @return {Position}
28-
*/
29-
function addPositions(base, relative) {
30-
return {
31-
line: base.line + relative.line - 1, // line 1 + line 1 should be line 1
32-
column: relative.line == 1 ? base.column + relative.column // when the same line
33-
: relative.column // when another line
34-
};
35-
}
23+
3624
/**
3725
* @param {RuleContext} context
3826
* @param {object} [options]
3927
*/
40-
module.exports = function(context, options = {}) {
28+
module.exports = function (context, options = {}) {
4129
const maxLen = options.max || defaultOptions.max;
4230
const isStrict = options.strict || defaultOptions.strict;
43-
let helper = new RuleHelper(context);
44-
let {Syntax, RuleError, report, getSource} = context;
31+
const helper = new RuleHelper(context);
32+
const { Syntax, RuleError, report, getSource } = context;
4533
return {
46-
[Syntax.Paragraph](node){
34+
[Syntax.Paragraph](node) {
4735
if (helper.isChildNode(node, [Syntax.BlockQuote])) {
4836
return;
4937
}
50-
let sentences = splitSentences(getSource(node), {
51-
charRegExp: /[\?\!]/,
52-
newLineCharacters: "\n\n"
53-
});
38+
const resultNode = splitAST(node);
39+
const sentences = resultNode.children.filter(childNode => childNode.type === SentenceSyntax.Sentence);
5440
/*
5541
<p>
5642
<str><code><img><str>
@@ -65,10 +51,10 @@ module.exports = function(context, options = {}) {
6551
*/
6652
return getTokenizer().then(tokenizer => {
6753
sentences.forEach(sentence => {
68-
let text = sentence.value;
69-
let source = new Source(text);
54+
const source = new StringSource(sentence);
55+
const text = source.toString();
56+
const tokens = tokenizer.tokenizeForSentence(text);
7057
let currentTenCount = 0;
71-
let tokens = tokenizer.tokenizeForSentence(text);
7258
let lastToken = null;
7359
tokens.forEach((token, index) => {
7460
let surface = token.surface_form;
@@ -92,11 +78,10 @@ module.exports = function(context, options = {}) {
9278
}
9379
// report
9480
if (currentTenCount >= maxLen) {
95-
let positionInSentence = source.indexToPosition(lastToken.word_position - 1);
96-
let positionInNode = addPositions(sentence.loc.start, positionInSentence);
97-
let ruleError = new context.RuleError(`一つの文で"、"を${maxLen}つ以上使用しています`, {
98-
line: positionInNode.line - 1,
99-
column: positionInNode.column
81+
const positionInSentence = source.originalIndexFromIndex(lastToken.word_position - 1);
82+
const index = sentence.range[0] + positionInSentence;
83+
const ruleError = new context.RuleError(`一つの文で"、"を${maxLen}つ以上使用しています`, {
84+
index
10085
});
10186
report(node, ruleError);
10287
currentTenCount = 0;

0 commit comments

Comments
 (0)