From bc400ac789c76e915df82b0998d3fd12c89da2e8 Mon Sep 17 00:00:00 2001 From: Trevor Buckner Date: Sun, 15 Aug 2021 23:02:39 -0400 Subject: [PATCH] fix: Refactor table tokens (#2166) BREAKING CHANGE: - `table` tokens `header` property changed to contain an array of objects for each header cell with `text` and `tokens` properties. - `table` tokens `cells` property changed to `rows` and is an array of rows where each row contains an array of objects for each cell with `text` and `tokens` properties. v2: ```json { "type": "table", "align": [null, null], "raw": "| a | b |\n|---|---|\n| 1 | 2 |\n", "header": ["a", "b"], "cells": [["1", "2"]], "tokens": { "header": [ [{ "type": "text", "raw": "a", "text": "a" }], [{ "type": "text", "raw": "b", "text": "b" }] ], "cells": [[ [{ "type": "text", "raw": "1", "text": "1" }], [{ "type": "text", "raw": "2", "text": "2" }] ]] } } ``` v3: ```json { "type": "table", "align": [null, null], "raw": "| a | b |\n|---|---|\n| 1 | 2 |\n", "header": [ { "text": "a", "tokens": [{ "type": "text", "raw": "a", "text": "a" }] }, { "text": "b", "tokens": [{ "type": "text", "raw": "b", "text": "b" }] } ], "rows": [ { "text": "1", "tokens": [{ "type": "text", "raw": "1", "text": "1" }] }, { "text": "2", "tokens": [{ "type": "text", "raw": "2", "text": "2" }] } ] } ``` --- src/Parser.js | 8 +-- src/Tokenizer.js | 25 ++++----- src/marked.js | 8 +-- test/unit/Lexer-spec.js | 112 ++++++++++++++++++++++++++------------- test/unit/Parser-spec.js | 34 +++++++----- 5 files changed, 113 insertions(+), 74 deletions(-) diff --git a/src/Parser.js b/src/Parser.js index ef5009aea8..a7519f9134 100644 --- a/src/Parser.js +++ b/src/Parser.js @@ -103,22 +103,22 @@ module.exports = class Parser { l2 = token.header.length; for (j = 0; j < l2; j++) { cell += this.renderer.tablecell( - this.parseInline(token.tokens.header[j]), + this.parseInline(token.header[j].tokens), { header: true, align: token.align[j] } ); } header += this.renderer.tablerow(cell); body = ''; - l2 = token.cells.length; + l2 = token.rows.length; for (j = 0; j < l2; j++) { - row = token.tokens.cells[j]; + row = token.rows[j]; cell = ''; l3 = row.length; for (k = 0; k < l3; k++) { cell += this.renderer.tablecell( - this.parseInline(row[k]), + this.parseInline(row[k].tokens), { header: false, align: token.align[k] } ); } diff --git a/src/Tokenizer.js b/src/Tokenizer.js index 85bdb33675..eed2a9b994 100644 --- a/src/Tokenizer.js +++ b/src/Tokenizer.js @@ -352,9 +352,9 @@ module.exports = class Tokenizer { if (cap) { const item = { type: 'table', - header: splitCells(cap[1].replace(/^ *| *\| *$/g, '')), + header: splitCells(cap[1]).map(c => { return { text: c }; }), align: cap[2].replace(/^ *|\| *$/g, '').split(/ *\| */), - cells: cap[3] ? cap[3].replace(/\n$/, '').split('\n') : [] + rows: cap[3] ? cap[3].replace(/\n$/, '').split('\n') : [] }; if (item.header.length === item.align.length) { @@ -374,32 +374,27 @@ module.exports = class Tokenizer { } } - l = item.cells.length; + l = item.rows.length; for (i = 0; i < l; i++) { - item.cells[i] = splitCells(item.cells[i], item.header.length); + item.rows[i] = splitCells(item.rows[i], item.header.length).map(c => { return { text: c }; }); } // parse child tokens inside headers and cells - item.tokens = { - header: [], - cells: [] - }; // header child tokens l = item.header.length; for (j = 0; j < l; j++) { - item.tokens.header[j] = []; - this.lexer.inlineTokens(item.header[j], item.tokens.header[j]); + item.header[j].tokens = []; + this.lexer.inlineTokens(item.header[j].text, item.header[j].tokens); } // cell child tokens - l = item.cells.length; + l = item.rows.length; for (j = 0; j < l; j++) { - row = item.cells[j]; - item.tokens.cells[j] = []; + row = item.rows[j]; for (k = 0; k < row.length; k++) { - item.tokens.cells[j][k] = []; - this.lexer.inlineTokens(row[k], item.tokens.cells[j][k]); + row[k].tokens = []; + this.lexer.inlineTokens(row[k].text, row[k].tokens); } } diff --git a/src/marked.js b/src/marked.js index e36907a895..e91be9e10e 100644 --- a/src/marked.js +++ b/src/marked.js @@ -260,12 +260,12 @@ marked.walkTokens = function(tokens, callback) { callback(token); switch (token.type) { case 'table': { - for (const cell of token.tokens.header) { - marked.walkTokens(cell, callback); + for (const cell of token.header) { + marked.walkTokens(cell.tokens, callback); } - for (const row of token.tokens.cells) { + for (const row of token.rows) { for (const cell of row) { - marked.walkTokens(cell, callback); + marked.walkTokens(cell.tokens, callback); } } break; diff --git a/test/unit/Lexer-spec.js b/test/unit/Lexer-spec.js index af3f5781b1..09c951e402 100644 --- a/test/unit/Lexer-spec.js +++ b/test/unit/Lexer-spec.js @@ -176,20 +176,30 @@ lheading 2 `, tokens: [{ type: 'table', - header: ['a', 'b'], align: [null, null], - cells: [['1', '2']], raw: '| a | b |\n|---|---|\n| 1 | 2 |\n', - tokens: { - header: [ - [{ type: 'text', raw: 'a', text: 'a' }], - [{ type: 'text', raw: 'b', text: 'b' }] - ], - cells: [[ - [{ type: 'text', raw: '1', text: '1' }], - [{ type: 'text', raw: '2', text: '2' }] - ]] - } + header: [ + { + text: 'a', + tokens: [{ type: 'text', raw: 'a', text: 'a' }] + }, + { + text: 'b', + tokens: [{ type: 'text', raw: 'b', text: 'b' }] + } + ], + rows: [ + [ + { + text: '1', + tokens: [{ type: 'text', raw: '1', text: '1' }] + }, + { + text: '2', + tokens: [{ type: 'text', raw: '2', text: '2' }] + } + ] + ] }] }); }); @@ -203,22 +213,38 @@ lheading 2 `, tokens: [{ type: 'table', - header: ['a', 'b', 'c'], align: ['left', 'center', 'right'], - cells: [['1', '2', '3']], raw: '| a | b | c |\n|:--|:-:|--:|\n| 1 | 2 | 3 |\n', - tokens: { - header: [ - [{ type: 'text', raw: 'a', text: 'a' }], - [{ type: 'text', raw: 'b', text: 'b' }], - [{ type: 'text', raw: 'c', text: 'c' }] - ], - cells: [[ - [{ type: 'text', raw: '1', text: '1' }], - [{ type: 'text', raw: '2', text: '2' }], - [{ type: 'text', raw: '3', text: '3' }] - ]] - } + header: [ + { + text: 'a', + tokens: [{ type: 'text', raw: 'a', text: 'a' }] + }, + { + text: 'b', + tokens: [{ type: 'text', raw: 'b', text: 'b' }] + }, + { + text: 'c', + tokens: [{ type: 'text', raw: 'c', text: 'c' }] + } + ], + rows: [ + [ + { + text: '1', + tokens: [{ type: 'text', raw: '1', text: '1' }] + }, + { + text: '2', + tokens: [{ type: 'text', raw: '2', text: '2' }] + }, + { + text: '3', + tokens: [{ type: 'text', raw: '3', text: '3' }] + } + ] + ] }] }); }); @@ -232,20 +258,30 @@ a | b `, tokens: [{ type: 'table', - header: ['a', 'b'], align: [null, null], - cells: [['1', '2']], raw: 'a | b\n--|--\n1 | 2\n', - tokens: { - header: [ - [{ type: 'text', raw: 'a', text: 'a' }], - [{ type: 'text', raw: 'b', text: 'b' }] - ], - cells: [[ - [{ type: 'text', raw: '1', text: '1' }], - [{ type: 'text', raw: '2', text: '2' }] - ]] - } + header: [ + { + text: 'a', + tokens: [{ type: 'text', raw: 'a', text: 'a' }] + }, + { + text: 'b', + tokens: [{ type: 'text', raw: 'b', text: 'b' }] + } + ], + rows: [ + [ + { + text: '1', + tokens: [{ type: 'text', raw: '1', text: '1' }] + }, + { + text: '2', + tokens: [{ type: 'text', raw: '2', text: '2' }] + } + ] + ] }] }); }); diff --git a/test/unit/Parser-spec.js b/test/unit/Parser-spec.js index 10efd2d0db..d432514b5d 100644 --- a/test/unit/Parser-spec.js +++ b/test/unit/Parser-spec.js @@ -68,21 +68,29 @@ describe('Parser', () => { await expectHtml({ tokens: [{ type: 'table', - header: ['a', 'b'], align: ['left', 'right'], - cells: [['1', '2']], - tokens: { - header: [ - [{ type: 'text', text: 'a' }], - [{ type: 'text', text: 'b' }] - ], - cells: [ - [ - [{ type: 'text', text: '1' }], - [{ type: 'text', text: '2' }] - ] + header: [ + { + text: 'a', + tokens: [{ type: 'text', raw: 'a', text: 'a' }] + }, + { + text: 'b', + tokens: [{ type: 'text', raw: 'b', text: 'b' }] + } + ], + rows: [ + [ + { + text: '1', + tokens: [{ type: 'text', raw: '1', text: '1' }] + }, + { + text: '2', + tokens: [{ type: 'text', raw: '2', text: '2' }] + } ] - } + ] }], html: `