forked from FoundKeyGang/FoundKey
[MFM] Better MFM parsing
This commit is contained in:
parent
9b23ebd4a3
commit
fe707f88a4
3 changed files with 25 additions and 47 deletions
|
@ -26,45 +26,6 @@ export default (source: string): Node[] => {
|
||||||
nodes = concatText(nodes);
|
nodes = concatText(nodes);
|
||||||
concatTextRecursive(nodes);
|
concatTextRecursive(nodes);
|
||||||
|
|
||||||
function getBeforeTextNode(node: Node): Node {
|
|
||||||
if (node == null) return null;
|
|
||||||
if (node.name == 'text') return node;
|
|
||||||
if (node.children) return getBeforeTextNode(node.children[node.children.length - 1]);
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
|
|
||||||
function getAfterTextNode(node: Node): Node {
|
|
||||||
if (node == null) return null;
|
|
||||||
if (node.name == 'text') return node;
|
|
||||||
if (node.children) return getBeforeTextNode(node.children[0]);
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
|
|
||||||
function isBlockNode(node: Node): boolean {
|
|
||||||
return ['blockCode', 'center', 'quote', 'title'].includes(node.name);
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* ブロック要素の前後にある改行を削除します
|
|
||||||
* (ブロック要素自体が改行の役割を果たすため、余計に改行されてしまう)
|
|
||||||
* @param nodes
|
|
||||||
*/
|
|
||||||
const removeNeedlessLineBreaks = (nodes: Node[]) => {
|
|
||||||
nodes.forEach((node, i) => {
|
|
||||||
if (node.children) removeNeedlessLineBreaks(node.children);
|
|
||||||
if (isBlockNode(node)) {
|
|
||||||
const before = getBeforeTextNode(nodes[i - 1]);
|
|
||||||
const after = getAfterTextNode(nodes[i + 1]);
|
|
||||||
if (before && before.props.text.endsWith('\n')) {
|
|
||||||
before.props.text = before.props.text.substring(0, before.props.text.length - 1);
|
|
||||||
}
|
|
||||||
if (after && after.props.text.startsWith('\n')) {
|
|
||||||
after.props.text = after.props.text.substring(1);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
});
|
|
||||||
};
|
|
||||||
|
|
||||||
const removeEmptyTextNodes = (nodes: Node[]) => {
|
const removeEmptyTextNodes = (nodes: Node[]) => {
|
||||||
nodes.forEach(n => {
|
nodes.forEach(n => {
|
||||||
if (n.children) {
|
if (n.children) {
|
||||||
|
@ -74,8 +35,6 @@ export default (source: string): Node[] => {
|
||||||
return nodes.filter(n => !(n.name == 'text' && n.props.text == ''));
|
return nodes.filter(n => !(n.name == 'text' && n.props.text == ''));
|
||||||
};
|
};
|
||||||
|
|
||||||
removeNeedlessLineBreaks(nodes);
|
|
||||||
|
|
||||||
nodes = removeEmptyTextNodes(nodes);
|
nodes = removeEmptyTextNodes(nodes);
|
||||||
|
|
||||||
return nodes;
|
return nodes;
|
||||||
|
|
|
@ -254,7 +254,7 @@ const mfm = P.createLanguage({
|
||||||
const qInner = quote.join('\n').replace(/^>/gm, '').replace(/^ /gm, '');
|
const qInner = quote.join('\n').replace(/^>/gm, '').replace(/^ /gm, '');
|
||||||
if (qInner == '') return P.makeFailure(i, 'not a quote');
|
if (qInner == '') return P.makeFailure(i, 'not a quote');
|
||||||
const contents = r.root.tryParse(qInner);
|
const contents = r.root.tryParse(qInner);
|
||||||
return P.makeSuccess(i + quote.join('\n').length, makeNodeWithChildren('quote', contents));
|
return P.makeSuccess(i + quote.join('\n').length + 1, makeNodeWithChildren('quote', contents));
|
||||||
})),
|
})),
|
||||||
//#endregion
|
//#endregion
|
||||||
|
|
||||||
|
|
29
test/mfm.ts
29
test/mfm.ts
|
@ -299,6 +299,7 @@ describe('Text', () => {
|
||||||
nodeWithChildren('quote', [
|
nodeWithChildren('quote', [
|
||||||
text('foo')
|
text('foo')
|
||||||
]),
|
]),
|
||||||
|
text('\n'),
|
||||||
nodeWithChildren('quote', [
|
nodeWithChildren('quote', [
|
||||||
text('bar')
|
text('bar')
|
||||||
]),
|
]),
|
||||||
|
@ -358,7 +359,7 @@ describe('Text', () => {
|
||||||
it('with before and after texts', () => {
|
it('with before and after texts', () => {
|
||||||
const tokens = analyze('before\n> foo\nafter');
|
const tokens = analyze('before\n> foo\nafter');
|
||||||
assert.deepEqual([
|
assert.deepEqual([
|
||||||
text('before'),
|
text('before\n'),
|
||||||
nodeWithChildren('quote', [
|
nodeWithChildren('quote', [
|
||||||
text('foo')
|
text('foo')
|
||||||
]),
|
]),
|
||||||
|
@ -366,6 +367,24 @@ describe('Text', () => {
|
||||||
], tokens);
|
], tokens);
|
||||||
});
|
});
|
||||||
|
|
||||||
|
it('multiple quotes', () => {
|
||||||
|
const tokens = analyze('> foo\nbar\n\n> foo\nbar\n\n> foo\nbar');
|
||||||
|
assert.deepEqual([
|
||||||
|
nodeWithChildren('quote', [
|
||||||
|
text('foo')
|
||||||
|
]),
|
||||||
|
text('bar\n\n'),
|
||||||
|
nodeWithChildren('quote', [
|
||||||
|
text('foo')
|
||||||
|
]),
|
||||||
|
text('bar\n\n'),
|
||||||
|
nodeWithChildren('quote', [
|
||||||
|
text('foo')
|
||||||
|
]),
|
||||||
|
text('bar'),
|
||||||
|
], tokens);
|
||||||
|
});
|
||||||
|
|
||||||
it('require line break before ">"', () => {
|
it('require line break before ">"', () => {
|
||||||
const tokens = analyze('foo>bar');
|
const tokens = analyze('foo>bar');
|
||||||
assert.deepEqual([
|
assert.deepEqual([
|
||||||
|
@ -388,11 +407,11 @@ describe('Text', () => {
|
||||||
it('trim line breaks', () => {
|
it('trim line breaks', () => {
|
||||||
const tokens = analyze('foo\n\n>a\n>>b\n>>\n>>>\n>>>c\n>>>\n>d\n\n');
|
const tokens = analyze('foo\n\n>a\n>>b\n>>\n>>>\n>>>c\n>>>\n>d\n\n');
|
||||||
assert.deepEqual([
|
assert.deepEqual([
|
||||||
text('foo\n'),
|
text('foo\n\n'),
|
||||||
nodeWithChildren('quote', [
|
nodeWithChildren('quote', [
|
||||||
text('a'),
|
text('a\n'),
|
||||||
nodeWithChildren('quote', [
|
nodeWithChildren('quote', [
|
||||||
text('b\n'),
|
text('b\n\n'),
|
||||||
nodeWithChildren('quote', [
|
nodeWithChildren('quote', [
|
||||||
text('\nc\n')
|
text('\nc\n')
|
||||||
])
|
])
|
||||||
|
@ -664,7 +683,7 @@ describe('Text', () => {
|
||||||
it('with before and after texts', () => {
|
it('with before and after texts', () => {
|
||||||
const tokens = analyze('before\n【foo】\nafter');
|
const tokens = analyze('before\n【foo】\nafter');
|
||||||
assert.deepEqual([
|
assert.deepEqual([
|
||||||
text('before'),
|
text('before\n'),
|
||||||
nodeWithChildren('title', [
|
nodeWithChildren('title', [
|
||||||
text('foo')
|
text('foo')
|
||||||
]),
|
]),
|
||||||
|
|
Loading…
Reference in a new issue