mirror of
https://github.com/9001/copyparty.git
synced 2025-08-17 09:02:15 -06:00
upgrade marked.js from v1.1.0 to v3.0.4
This commit is contained in:
parent
c5fbbbbb5c
commit
da904d6be8
|
@ -194,36 +194,40 @@ function ev(e) {
|
||||||
|
|
||||||
|
|
||||||
// https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/endsWith
|
// https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/endsWith
|
||||||
if (!String.prototype.endsWith) {
|
if (!String.prototype.endsWith)
|
||||||
String.prototype.endsWith = function (search, this_len) {
|
String.prototype.endsWith = function (search, this_len) {
|
||||||
if (this_len === undefined || this_len > this.length) {
|
if (this_len === undefined || this_len > this.length) {
|
||||||
this_len = this.length;
|
this_len = this.length;
|
||||||
}
|
}
|
||||||
return this.substring(this_len - search.length, this_len) === search;
|
return this.substring(this_len - search.length, this_len) === search;
|
||||||
};
|
};
|
||||||
}
|
|
||||||
if (!String.startsWith) {
|
if (!String.startsWith)
|
||||||
String.prototype.startsWith = function (s, i) {
|
String.prototype.startsWith = function (s, i) {
|
||||||
i = i > 0 ? i | 0 : 0;
|
i = i > 0 ? i | 0 : 0;
|
||||||
return this.substring(i, i + s.length) === s;
|
return this.substring(i, i + s.length) === s;
|
||||||
};
|
};
|
||||||
}
|
|
||||||
if (!Element.prototype.matches) {
|
if (!String.trimEnd)
|
||||||
|
String.prototype.trimEnd = String.prototype.trimRight = function () {
|
||||||
|
return this.replace(/[ \t\r\n]+$/m, '');
|
||||||
|
};
|
||||||
|
|
||||||
|
if (!Element.prototype.matches)
|
||||||
Element.prototype.matches =
|
Element.prototype.matches =
|
||||||
Element.prototype.oMatchesSelector ||
|
Element.prototype.oMatchesSelector ||
|
||||||
Element.prototype.msMatchesSelector ||
|
Element.prototype.msMatchesSelector ||
|
||||||
Element.prototype.mozMatchesSelector ||
|
Element.prototype.mozMatchesSelector ||
|
||||||
Element.prototype.webkitMatchesSelector;
|
Element.prototype.webkitMatchesSelector;
|
||||||
}
|
|
||||||
if (!Element.prototype.closest) {
|
if (!Element.prototype.closest)
|
||||||
Element.prototype.closest = function (s) {
|
Element.prototype.closest = function (s) {
|
||||||
var el = this;
|
var el = this;
|
||||||
do {
|
do {
|
||||||
if (el.matches(s)) return el;
|
if (el.matches(s)) return el;
|
||||||
el = el.parentElement || el.parentNode;
|
el = el.parentElement || el.parentNode;
|
||||||
} while (el !== null && el.nodeType === 1);
|
} while (el !== null && el.nodeType === 1);
|
||||||
}
|
};
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
// https://stackoverflow.com/a/950146
|
// https://stackoverflow.com/a/950146
|
||||||
|
|
|
@ -2,10 +2,10 @@ FROM alpine:3.14
|
||||||
WORKDIR /z
|
WORKDIR /z
|
||||||
ENV ver_asmcrypto=5b994303a9d3e27e0915f72a10b6c2c51535a4dc \
|
ENV ver_asmcrypto=5b994303a9d3e27e0915f72a10b6c2c51535a4dc \
|
||||||
ver_hashwasm=4.9.0 \
|
ver_hashwasm=4.9.0 \
|
||||||
ver_marked=1.1.0 \
|
ver_marked=3.0.4 \
|
||||||
ver_ogvjs=1.8.4 \
|
ver_ogvjs=1.8.4 \
|
||||||
ver_mde=2.14.0 \
|
ver_mde=2.15.0 \
|
||||||
ver_codemirror=5.59.3 \
|
ver_codemirror=5.62.3 \
|
||||||
ver_fontawesome=5.13.0 \
|
ver_fontawesome=5.13.0 \
|
||||||
ver_zopfli=1.0.3
|
ver_zopfli=1.0.3
|
||||||
|
|
||||||
|
@ -113,7 +113,7 @@ RUN cd CodeMirror-$ver_codemirror \
|
||||||
COPY easymde.patch /z/
|
COPY easymde.patch /z/
|
||||||
RUN cd easy-markdown-editor-$ver_mde \
|
RUN cd easy-markdown-editor-$ver_mde \
|
||||||
&& patch -p1 < /z/easymde.patch \
|
&& patch -p1 < /z/easymde.patch \
|
||||||
&& sed -ri 's`https://registry.npmjs.org/marked/-/marked-0.8.2.tgz`file:/z/nodepkgs/marked`' package-lock.json \
|
&& sed -ri 's`https://registry.npmjs.org/marked/-/marked-[0-9\.]+.tgz`file:/z/nodepkgs/marked`' package-lock.json \
|
||||||
&& sed -ri 's`("marked": ")[^"]+`\1file:/z/nodepkgs/marked`' ./package.json \
|
&& sed -ri 's`("marked": ")[^"]+`\1file:/z/nodepkgs/marked`' ./package.json \
|
||||||
&& sed -ri 's`("codemirror": ")[^"]+`\1file:/z/nodepkgs/codemirror`' ./package.json \
|
&& sed -ri 's`("codemirror": ")[^"]+`\1file:/z/nodepkgs/codemirror`' ./package.json \
|
||||||
&& npm install
|
&& npm install
|
||||||
|
|
|
@ -1,15 +1,15 @@
|
||||||
diff --git a/src/Lexer.js b/src/Lexer.js
|
diff --git a/src/Lexer.js b/src/Lexer.js
|
||||||
adds linetracking to marked.js v1.0.0 +git;
|
adds linetracking to marked.js v3.0.4;
|
||||||
add data-ln="%d" to most tags, %d is the source markdown line
|
add data-ln="%d" to most tags, %d is the source markdown line
|
||||||
--- a/src/Lexer.js
|
--- a/src/Lexer.js
|
||||||
+++ b/src/Lexer.js
|
+++ b/src/Lexer.js
|
||||||
@@ -49,4 +49,5 @@ function mangle(text) {
|
@@ -50,4 +50,5 @@ function mangle(text) {
|
||||||
module.exports = class Lexer {
|
module.exports = class Lexer {
|
||||||
constructor(options) {
|
constructor(options) {
|
||||||
+ this.ln = 1; // like most editors, start couting from 1
|
+ this.ln = 1; // like most editors, start couting from 1
|
||||||
this.tokens = [];
|
this.tokens = [];
|
||||||
this.tokens.links = Object.create(null);
|
this.tokens.links = Object.create(null);
|
||||||
@@ -108,4 +109,15 @@ module.exports = class Lexer {
|
@@ -127,4 +128,15 @@ module.exports = class Lexer {
|
||||||
}
|
}
|
||||||
|
|
||||||
+ set_ln(token, ln = this.ln) {
|
+ set_ln(token, ln = this.ln) {
|
||||||
|
@ -25,122 +25,123 @@ add data-ln="%d" to most tags, %d is the source markdown line
|
||||||
+
|
+
|
||||||
/**
|
/**
|
||||||
* Lexing
|
* Lexing
|
||||||
@@ -113,10 +125,15 @@ module.exports = class Lexer {
|
@@ -134,7 +146,11 @@ module.exports = class Lexer {
|
||||||
blockTokens(src, tokens = [], top = true) {
|
src = src.replace(/^ +$/gm, '');
|
||||||
src = src.replace(/^ +$/gm, '');
|
}
|
||||||
- let token, i, l, lastToken;
|
- let token, lastToken, cutSrc, lastParagraphClipped;
|
||||||
+ let token, i, l, lastToken, ln;
|
+ let token, lastToken, cutSrc, lastParagraphClipped, ln;
|
||||||
|
|
||||||
while (src) {
|
while (src) {
|
||||||
+ // this.ln will be bumped by recursive calls into this func;
|
+ // this.ln will be bumped by recursive calls into this func;
|
||||||
+ // reset the count and rely on the outermost token's raw only
|
+ // reset the count and rely on the outermost token's raw only
|
||||||
+ ln = this.ln;
|
+ ln = this.ln;
|
||||||
+
|
+
|
||||||
// newline
|
if (this.options.extensions
|
||||||
|
&& this.options.extensions.block
|
||||||
|
@@ -142,4 +158,5 @@ module.exports = class Lexer {
|
||||||
|
if (token = extTokenizer.call({ lexer: this }, src, tokens)) {
|
||||||
|
src = src.substring(token.raw.length);
|
||||||
|
+ this.set_ln(token, ln);
|
||||||
|
tokens.push(token);
|
||||||
|
return true;
|
||||||
|
@@ -153,4 +170,5 @@ module.exports = class Lexer {
|
||||||
if (token = this.tokenizer.space(src)) {
|
if (token = this.tokenizer.space(src)) {
|
||||||
src = src.substring(token.raw.length);
|
src = src.substring(token.raw.length);
|
||||||
+ this.set_ln(token); // is \n if not type
|
+ this.set_ln(token, ln); // is \n if not type
|
||||||
if (token.type) {
|
if (token.type) {
|
||||||
tokens.push(token);
|
tokens.push(token);
|
||||||
@@ -128,4 +145,5 @@ module.exports = class Lexer {
|
@@ -162,4 +180,5 @@ module.exports = class Lexer {
|
||||||
if (token = this.tokenizer.code(src, tokens)) {
|
if (token = this.tokenizer.code(src)) {
|
||||||
src = src.substring(token.raw.length);
|
src = src.substring(token.raw.length);
|
||||||
+ this.set_ln(token);
|
+ this.set_ln(token, ln);
|
||||||
if (token.type) {
|
lastToken = tokens[tokens.length - 1];
|
||||||
tokens.push(token);
|
// An indented code block cannot interrupt a paragraph.
|
||||||
@@ -141,4 +159,5 @@ module.exports = class Lexer {
|
@@ -177,4 +196,5 @@ module.exports = class Lexer {
|
||||||
if (token = this.tokenizer.fences(src)) {
|
if (token = this.tokenizer.fences(src)) {
|
||||||
src = src.substring(token.raw.length);
|
src = src.substring(token.raw.length);
|
||||||
+ this.set_ln(token);
|
+ this.set_ln(token, ln);
|
||||||
tokens.push(token);
|
tokens.push(token);
|
||||||
continue;
|
continue;
|
||||||
@@ -148,4 +167,5 @@ module.exports = class Lexer {
|
@@ -184,4 +204,5 @@ module.exports = class Lexer {
|
||||||
if (token = this.tokenizer.heading(src)) {
|
if (token = this.tokenizer.heading(src)) {
|
||||||
src = src.substring(token.raw.length);
|
src = src.substring(token.raw.length);
|
||||||
+ this.set_ln(token);
|
+ this.set_ln(token, ln);
|
||||||
tokens.push(token);
|
tokens.push(token);
|
||||||
continue;
|
continue;
|
||||||
@@ -155,4 +175,5 @@ module.exports = class Lexer {
|
@@ -191,4 +212,5 @@ module.exports = class Lexer {
|
||||||
if (token = this.tokenizer.nptable(src)) {
|
|
||||||
src = src.substring(token.raw.length);
|
|
||||||
+ this.set_ln(token);
|
|
||||||
tokens.push(token);
|
|
||||||
continue;
|
|
||||||
@@ -162,4 +183,5 @@ module.exports = class Lexer {
|
|
||||||
if (token = this.tokenizer.hr(src)) {
|
if (token = this.tokenizer.hr(src)) {
|
||||||
src = src.substring(token.raw.length);
|
src = src.substring(token.raw.length);
|
||||||
+ this.set_ln(token);
|
+ this.set_ln(token, ln);
|
||||||
tokens.push(token);
|
tokens.push(token);
|
||||||
continue;
|
continue;
|
||||||
@@ -170,4 +192,7 @@ module.exports = class Lexer {
|
@@ -198,4 +220,5 @@ module.exports = class Lexer {
|
||||||
|
if (token = this.tokenizer.blockquote(src)) {
|
||||||
src = src.substring(token.raw.length);
|
src = src.substring(token.raw.length);
|
||||||
token.tokens = this.blockTokens(token.text, [], top);
|
|
||||||
+ // recursive call to blockTokens probably bumped this.ln,
|
|
||||||
+ // token.raw is more reliable so reset this.ln and use that
|
|
||||||
+ this.set_ln(token, ln);
|
+ this.set_ln(token, ln);
|
||||||
tokens.push(token);
|
tokens.push(token);
|
||||||
continue;
|
continue;
|
||||||
@@ -180,5 +205,9 @@ module.exports = class Lexer {
|
@@ -205,4 +228,5 @@ module.exports = class Lexer {
|
||||||
for (i = 0; i < l; i++) {
|
if (token = this.tokenizer.list(src)) {
|
||||||
token.items[i].tokens = this.blockTokens(token.items[i].text, [], false);
|
src = src.substring(token.raw.length);
|
||||||
+ // list entries don't bump the linecounter, so let's
|
|
||||||
+ this.ln++;
|
|
||||||
}
|
|
||||||
+ // then reset like blockquote
|
|
||||||
+ this.set_ln(token, ln);
|
+ this.set_ln(token, ln);
|
||||||
tokens.push(token);
|
tokens.push(token);
|
||||||
continue;
|
continue;
|
||||||
@@ -188,4 +217,5 @@ module.exports = class Lexer {
|
@@ -212,4 +236,5 @@ module.exports = class Lexer {
|
||||||
if (token = this.tokenizer.html(src)) {
|
if (token = this.tokenizer.html(src)) {
|
||||||
src = src.substring(token.raw.length);
|
src = src.substring(token.raw.length);
|
||||||
+ this.set_ln(token);
|
+ this.set_ln(token, ln);
|
||||||
tokens.push(token);
|
tokens.push(token);
|
||||||
continue;
|
continue;
|
||||||
@@ -195,4 +225,5 @@ module.exports = class Lexer {
|
@@ -219,4 +244,5 @@ module.exports = class Lexer {
|
||||||
if (top && (token = this.tokenizer.def(src))) {
|
if (token = this.tokenizer.def(src)) {
|
||||||
src = src.substring(token.raw.length);
|
src = src.substring(token.raw.length);
|
||||||
+ this.set_ln(token);
|
+ this.set_ln(token, ln);
|
||||||
if (!this.tokens.links[token.tag]) {
|
lastToken = tokens[tokens.length - 1];
|
||||||
this.tokens.links[token.tag] = {
|
if (lastToken && (lastToken.type === 'paragraph' || lastToken.type === 'text')) {
|
||||||
@@ -207,4 +238,5 @@ module.exports = class Lexer {
|
@@ -236,4 +262,5 @@ module.exports = class Lexer {
|
||||||
if (token = this.tokenizer.table(src)) {
|
if (token = this.tokenizer.table(src)) {
|
||||||
src = src.substring(token.raw.length);
|
src = src.substring(token.raw.length);
|
||||||
+ this.set_ln(token);
|
+ this.set_ln(token, ln);
|
||||||
tokens.push(token);
|
tokens.push(token);
|
||||||
continue;
|
continue;
|
||||||
@@ -214,4 +246,5 @@ module.exports = class Lexer {
|
@@ -243,4 +270,5 @@ module.exports = class Lexer {
|
||||||
if (token = this.tokenizer.lheading(src)) {
|
if (token = this.tokenizer.lheading(src)) {
|
||||||
src = src.substring(token.raw.length);
|
src = src.substring(token.raw.length);
|
||||||
+ this.set_ln(token);
|
+ this.set_ln(token, ln);
|
||||||
tokens.push(token);
|
tokens.push(token);
|
||||||
continue;
|
continue;
|
||||||
@@ -221,4 +254,5 @@ module.exports = class Lexer {
|
@@ -263,4 +291,5 @@ module.exports = class Lexer {
|
||||||
if (top && (token = this.tokenizer.paragraph(src))) {
|
}
|
||||||
|
if (this.state.top && (token = this.tokenizer.paragraph(cutSrc))) {
|
||||||
|
+ this.set_ln(token, ln);
|
||||||
|
lastToken = tokens[tokens.length - 1];
|
||||||
|
if (lastParagraphClipped && lastToken.type === 'paragraph') {
|
||||||
|
@@ -280,4 +309,6 @@ module.exports = class Lexer {
|
||||||
|
if (token = this.tokenizer.text(src)) {
|
||||||
src = src.substring(token.raw.length);
|
src = src.substring(token.raw.length);
|
||||||
+ this.set_ln(token);
|
+ this.set_ln(token, ln);
|
||||||
tokens.push(token);
|
+ this.ln++;
|
||||||
continue;
|
lastToken = tokens[tokens.length - 1];
|
||||||
@@ -228,4 +262,5 @@ module.exports = class Lexer {
|
if (lastToken && lastToken.type === 'text') {
|
||||||
if (token = this.tokenizer.text(src, tokens)) {
|
@@ -355,4 +386,5 @@ module.exports = class Lexer {
|
||||||
src = src.substring(token.raw.length);
|
if (token = extTokenizer.call({ lexer: this }, src, tokens)) {
|
||||||
+ this.set_ln(token);
|
src = src.substring(token.raw.length);
|
||||||
if (token.type) {
|
+ this.ln = token.ln || this.ln;
|
||||||
tokens.push(token);
|
tokens.push(token);
|
||||||
@@ -263,4 +298,7 @@ module.exports = class Lexer {
|
return true;
|
||||||
for (i = 0; i < l; i++) {
|
@@ -420,4 +452,6 @@ module.exports = class Lexer {
|
||||||
token = tokens[i];
|
|
||||||
+ // this.ln is at EOF when inline() is invoked;
|
|
||||||
+ // all this affects <br> tags only so no biggie if it breaks
|
|
||||||
+ this.ln = token.ln || this.ln;
|
|
||||||
switch (token.type) {
|
|
||||||
case 'paragraph':
|
|
||||||
@@ -386,4 +424,6 @@ module.exports = class Lexer {
|
|
||||||
if (token = this.tokenizer.br(src)) {
|
if (token = this.tokenizer.br(src)) {
|
||||||
src = src.substring(token.raw.length);
|
src = src.substring(token.raw.length);
|
||||||
+ // no need to reset (no more blockTokens anyways)
|
+ // no need to reset (no more blockTokens anyways)
|
||||||
+ token.ln = this.ln++;
|
+ token.ln = this.ln++;
|
||||||
tokens.push(token);
|
tokens.push(token);
|
||||||
continue;
|
continue;
|
||||||
|
@@ -462,4 +496,5 @@ module.exports = class Lexer {
|
||||||
|
if (token = this.tokenizer.inlineText(cutSrc, smartypants)) {
|
||||||
|
src = src.substring(token.raw.length);
|
||||||
|
+ this.ln = token.ln || this.ln;
|
||||||
|
if (token.raw.slice(-1) !== '_') { // Track prevChar before string of ____ started
|
||||||
|
prevChar = token.raw.slice(-1);
|
||||||
diff --git a/src/Parser.js b/src/Parser.js
|
diff --git a/src/Parser.js b/src/Parser.js
|
||||||
--- a/src/Parser.js
|
--- a/src/Parser.js
|
||||||
+++ b/src/Parser.js
|
+++ b/src/Parser.js
|
||||||
|
@ -150,17 +151,16 @@ diff --git a/src/Parser.js b/src/Parser.js
|
||||||
+ this.ln = 0; // error indicator; should always be set >=1 from tokens
|
+ this.ln = 0; // error indicator; should always be set >=1 from tokens
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -55,4 +56,9 @@ module.exports = class Parser {
|
@@ -64,4 +65,8 @@ module.exports = class Parser {
|
||||||
for (i = 0; i < l; i++) {
|
for (i = 0; i < l; i++) {
|
||||||
token = tokens[i];
|
token = tokens[i];
|
||||||
+ // take line-numbers from tokens whenever possible
|
+ // take line-numbers from tokens whenever possible
|
||||||
+ // and update the renderer's html attribute with the new value
|
+ // and update the renderer's html attribute with the new value
|
||||||
+ this.ln = token.ln || this.ln;
|
+ this.ln = token.ln || this.ln;
|
||||||
+ this.renderer.tag_ln(this.ln);
|
+ this.renderer.tag_ln(this.ln);
|
||||||
+
|
|
||||||
switch (token.type) {
|
// Run any renderer extensions
|
||||||
case 'space': {
|
@@ -124,7 +129,10 @@ module.exports = class Parser {
|
||||||
@@ -105,7 +111,10 @@ module.exports = class Parser {
|
|
||||||
}
|
}
|
||||||
|
|
||||||
- body += this.renderer.tablerow(cell);
|
- body += this.renderer.tablerow(cell);
|
||||||
|
@ -173,7 +173,7 @@ diff --git a/src/Parser.js b/src/Parser.js
|
||||||
+ out += this.renderer.tag_ln(token.ln).table(header, body);
|
+ out += this.renderer.tag_ln(token.ln).table(header, body);
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
@@ -148,8 +157,12 @@ module.exports = class Parser {
|
@@ -167,8 +175,12 @@ module.exports = class Parser {
|
||||||
|
|
||||||
itemBody += this.parse(item.tokens, loose);
|
itemBody += this.parse(item.tokens, loose);
|
||||||
- body += this.renderer.listitem(itemBody, task, checked);
|
- body += this.renderer.listitem(itemBody, task, checked);
|
||||||
|
@ -188,7 +188,7 @@ diff --git a/src/Parser.js b/src/Parser.js
|
||||||
+ out += this.renderer.tag_ln(token.ln).list(body, ordered, start);
|
+ out += this.renderer.tag_ln(token.ln).list(body, ordered, start);
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
@@ -160,5 +173,6 @@ module.exports = class Parser {
|
@@ -179,5 +191,6 @@ module.exports = class Parser {
|
||||||
}
|
}
|
||||||
case 'paragraph': {
|
case 'paragraph': {
|
||||||
- out += this.renderer.paragraph(this.parseInline(token.tokens));
|
- out += this.renderer.paragraph(this.parseInline(token.tokens));
|
||||||
|
@ -196,22 +196,14 @@ diff --git a/src/Parser.js b/src/Parser.js
|
||||||
+ out += this.renderer.tag_ln(token.ln).paragraph(t);
|
+ out += this.renderer.tag_ln(token.ln).paragraph(t);
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
@@ -199,4 +213,6 @@ module.exports = class Parser {
|
@@ -221,4 +234,7 @@ module.exports = class Parser {
|
||||||
for (i = 0; i < l; i++) {
|
|
||||||
token = tokens[i];
|
token = tokens[i];
|
||||||
|
|
||||||
+ // another thing that only affects <br/> and other inlines
|
+ // another thing that only affects <br/> and other inlines
|
||||||
+ this.ln = token.ln || this.ln;
|
+ this.ln = token.ln || this.ln;
|
||||||
switch (token.type) {
|
+
|
||||||
case 'escape': {
|
// Run any renderer extensions
|
||||||
@@ -229,5 +245,7 @@ module.exports = class Parser {
|
if (this.options.extensions && this.options.extensions.renderers && this.options.extensions.renderers[token.type]) {
|
||||||
}
|
|
||||||
case 'br': {
|
|
||||||
- out += renderer.br();
|
|
||||||
+ // update the html attribute before writing each <br/>,
|
|
||||||
+ // don't care about the others
|
|
||||||
+ out += renderer.tag_ln(this.ln).br();
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
diff --git a/src/Renderer.js b/src/Renderer.js
|
diff --git a/src/Renderer.js b/src/Renderer.js
|
||||||
--- a/src/Renderer.js
|
--- a/src/Renderer.js
|
||||||
+++ b/src/Renderer.js
|
+++ b/src/Renderer.js
|
||||||
|
@ -228,7 +220,7 @@ diff --git a/src/Renderer.js b/src/Renderer.js
|
||||||
+
|
+
|
||||||
code(code, infostring, escaped) {
|
code(code, infostring, escaped) {
|
||||||
const lang = (infostring || '').match(/\S*/)[0];
|
const lang = (infostring || '').match(/\S*/)[0];
|
||||||
@@ -24,10 +30,10 @@ module.exports = class Renderer {
|
@@ -26,10 +32,10 @@ module.exports = class Renderer {
|
||||||
|
|
||||||
if (!lang) {
|
if (!lang) {
|
||||||
- return '<pre><code>'
|
- return '<pre><code>'
|
||||||
|
@ -241,58 +233,69 @@ diff --git a/src/Renderer.js b/src/Renderer.js
|
||||||
+ return '<pre' + this.ln + '><code class="'
|
+ return '<pre' + this.ln + '><code class="'
|
||||||
+ this.options.langPrefix
|
+ this.options.langPrefix
|
||||||
+ escape(lang, true)
|
+ escape(lang, true)
|
||||||
@@ -38,5 +44,5 @@ module.exports = class Renderer {
|
@@ -40,5 +46,5 @@ module.exports = class Renderer {
|
||||||
|
|
||||||
blockquote(quote) {
|
blockquote(quote) {
|
||||||
- return '<blockquote>\n' + quote + '</blockquote>\n';
|
- return '<blockquote>\n' + quote + '</blockquote>\n';
|
||||||
+ return '<blockquote' + this.ln + '>\n' + quote + '</blockquote>\n';
|
+ return '<blockquote' + this.ln + '>\n' + quote + '</blockquote>\n';
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -49,4 +55,5 @@ module.exports = class Renderer {
|
@@ -51,4 +57,5 @@ module.exports = class Renderer {
|
||||||
return '<h'
|
return '<h'
|
||||||
+ level
|
+ level
|
||||||
+ + this.ln
|
+ + this.ln
|
||||||
+ ' id="'
|
+ ' id="'
|
||||||
+ this.options.headerPrefix
|
+ this.options.headerPrefix
|
||||||
@@ -59,5 +66,5 @@ module.exports = class Renderer {
|
@@ -61,5 +68,5 @@ module.exports = class Renderer {
|
||||||
}
|
}
|
||||||
// ignore IDs
|
// ignore IDs
|
||||||
- return '<h' + level + '>' + text + '</h' + level + '>\n';
|
- return '<h' + level + '>' + text + '</h' + level + '>\n';
|
||||||
+ return '<h' + level + this.ln + '>' + text + '</h' + level + '>\n';
|
+ return '<h' + level + this.ln + '>' + text + '</h' + level + '>\n';
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -73,5 +80,5 @@ module.exports = class Renderer {
|
@@ -75,5 +82,5 @@ module.exports = class Renderer {
|
||||||
|
|
||||||
listitem(text) {
|
listitem(text) {
|
||||||
- return '<li>' + text + '</li>\n';
|
- return '<li>' + text + '</li>\n';
|
||||||
+ return '<li' + this.ln + '>' + text + '</li>\n';
|
+ return '<li' + this.ln + '>' + text + '</li>\n';
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -85,5 +92,5 @@ module.exports = class Renderer {
|
@@ -87,5 +94,5 @@ module.exports = class Renderer {
|
||||||
|
|
||||||
paragraph(text) {
|
paragraph(text) {
|
||||||
- return '<p>' + text + '</p>\n';
|
- return '<p>' + text + '</p>\n';
|
||||||
+ return '<p' + this.ln + '>' + text + '</p>\n';
|
+ return '<p' + this.ln + '>' + text + '</p>\n';
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -100,5 +107,5 @@ module.exports = class Renderer {
|
@@ -102,5 +109,5 @@ module.exports = class Renderer {
|
||||||
|
|
||||||
tablerow(content) {
|
tablerow(content) {
|
||||||
- return '<tr>\n' + content + '</tr>\n';
|
- return '<tr>\n' + content + '</tr>\n';
|
||||||
+ return '<tr' + this.ln + '>\n' + content + '</tr>\n';
|
+ return '<tr' + this.ln + '>\n' + content + '</tr>\n';
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -125,5 +132,5 @@ module.exports = class Renderer {
|
@@ -127,5 +134,5 @@ module.exports = class Renderer {
|
||||||
|
|
||||||
br() {
|
br() {
|
||||||
- return this.options.xhtml ? '<br/>' : '<br>';
|
- return this.options.xhtml ? '<br/>' : '<br>';
|
||||||
+ return this.options.xhtml ? '<br' + this.ln + '/>' : '<br' + this.ln + '>';
|
+ return this.options.xhtml ? '<br' + this.ln + '/>' : '<br' + this.ln + '>';
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -151,5 +158,5 @@ module.exports = class Renderer {
|
@@ -153,5 +160,5 @@ module.exports = class Renderer {
|
||||||
}
|
}
|
||||||
|
|
||||||
- let out = '<img src="' + href + '" alt="' + text + '"';
|
- let out = '<img src="' + href + '" alt="' + text + '"';
|
||||||
+ let out = '<img' + this.ln + ' src="' + href + '" alt="' + text + '"';
|
+ let out = '<img' + this.ln + ' src="' + href + '" alt="' + text + '"';
|
||||||
if (title) {
|
if (title) {
|
||||||
out += ' title="' + title + '"';
|
out += ' title="' + title + '"';
|
||||||
|
diff --git a/src/Tokenizer.js b/src/Tokenizer.js
|
||||||
|
--- a/src/Tokenizer.js
|
||||||
|
+++ b/src/Tokenizer.js
|
||||||
|
@@ -301,4 +301,7 @@ module.exports = class Tokenizer {
|
||||||
|
const l = list.items.length;
|
||||||
|
|
||||||
|
+ // each nested list gets +1 ahead; this hack makes every listgroup -1 but atleast it doesn't get infinitely bad
|
||||||
|
+ this.lexer.ln--;
|
||||||
|
+
|
||||||
|
// Item child tokens handled here at end because we needed to have the final item to trim it first
|
||||||
|
for (i = 0; i < l; i++) {
|
||||||
|
|
|
@ -1,52 +1,52 @@
|
||||||
diff --git a/src/Lexer.js b/src/Lexer.js
|
diff --git a/src/Lexer.js b/src/Lexer.js
|
||||||
--- a/src/Lexer.js
|
--- a/src/Lexer.js
|
||||||
+++ b/src/Lexer.js
|
+++ b/src/Lexer.js
|
||||||
@@ -5,5 +5,5 @@ const { block, inline } = require('./rules.js');
|
@@ -6,5 +6,5 @@ const { repeatString } = require('./helpers.js');
|
||||||
/**
|
/**
|
||||||
* smartypants text replacement
|
* smartypants text replacement
|
||||||
- */
|
- */
|
||||||
+ *
|
+ *
|
||||||
function smartypants(text) {
|
function smartypants(text) {
|
||||||
return text
|
return text
|
||||||
@@ -26,5 +26,5 @@ function smartypants(text) {
|
@@ -27,5 +27,5 @@ function smartypants(text) {
|
||||||
/**
|
/**
|
||||||
* mangle email addresses
|
* mangle email addresses
|
||||||
- */
|
- */
|
||||||
+ *
|
+ *
|
||||||
function mangle(text) {
|
function mangle(text) {
|
||||||
let out = '',
|
let out = '',
|
||||||
@@ -439,5 +439,5 @@ module.exports = class Lexer {
|
@@ -465,5 +465,5 @@ module.exports = class Lexer {
|
||||||
|
|
||||||
// autolink
|
// autolink
|
||||||
- if (token = this.tokenizer.autolink(src, mangle)) {
|
- if (token = this.tokenizer.autolink(src, mangle)) {
|
||||||
+ if (token = this.tokenizer.autolink(src)) {
|
+ if (token = this.tokenizer.autolink(src)) {
|
||||||
src = src.substring(token.raw.length);
|
src = src.substring(token.raw.length);
|
||||||
tokens.push(token);
|
tokens.push(token);
|
||||||
@@ -446,5 +446,5 @@ module.exports = class Lexer {
|
@@ -472,5 +472,5 @@ module.exports = class Lexer {
|
||||||
|
|
||||||
// url (gfm)
|
// url (gfm)
|
||||||
- if (!inLink && (token = this.tokenizer.url(src, mangle))) {
|
- if (!this.state.inLink && (token = this.tokenizer.url(src, mangle))) {
|
||||||
+ if (!inLink && (token = this.tokenizer.url(src))) {
|
+ if (!this.state.inLink && (token = this.tokenizer.url(src))) {
|
||||||
src = src.substring(token.raw.length);
|
src = src.substring(token.raw.length);
|
||||||
tokens.push(token);
|
tokens.push(token);
|
||||||
@@ -453,5 +453,5 @@ module.exports = class Lexer {
|
@@ -493,5 +493,5 @@ module.exports = class Lexer {
|
||||||
|
}
|
||||||
// text
|
}
|
||||||
- if (token = this.tokenizer.inlineText(src, inRawBlock, smartypants)) {
|
- if (token = this.tokenizer.inlineText(cutSrc, smartypants)) {
|
||||||
+ if (token = this.tokenizer.inlineText(src, inRawBlock)) {
|
+ if (token = this.tokenizer.inlineText(cutSrc)) {
|
||||||
src = src.substring(token.raw.length);
|
src = src.substring(token.raw.length);
|
||||||
tokens.push(token);
|
this.ln = token.ln || this.ln;
|
||||||
diff --git a/src/Renderer.js b/src/Renderer.js
|
diff --git a/src/Renderer.js b/src/Renderer.js
|
||||||
--- a/src/Renderer.js
|
--- a/src/Renderer.js
|
||||||
+++ b/src/Renderer.js
|
+++ b/src/Renderer.js
|
||||||
@@ -140,5 +140,5 @@ module.exports = class Renderer {
|
@@ -142,5 +142,5 @@ module.exports = class Renderer {
|
||||||
|
|
||||||
link(href, title, text) {
|
link(href, title, text) {
|
||||||
- href = cleanUrl(this.options.sanitize, this.options.baseUrl, href);
|
- href = cleanUrl(this.options.sanitize, this.options.baseUrl, href);
|
||||||
+ href = cleanUrl(this.options.baseUrl, href);
|
+ href = cleanUrl(this.options.baseUrl, href);
|
||||||
if (href === null) {
|
if (href === null) {
|
||||||
return text;
|
return text;
|
||||||
@@ -153,5 +153,5 @@ module.exports = class Renderer {
|
@@ -155,5 +155,5 @@ module.exports = class Renderer {
|
||||||
|
|
||||||
image(href, title, text) {
|
image(href, title, text) {
|
||||||
- href = cleanUrl(this.options.sanitize, this.options.baseUrl, href);
|
- href = cleanUrl(this.options.sanitize, this.options.baseUrl, href);
|
||||||
|
@ -56,22 +56,23 @@ diff --git a/src/Renderer.js b/src/Renderer.js
|
||||||
diff --git a/src/Tokenizer.js b/src/Tokenizer.js
|
diff --git a/src/Tokenizer.js b/src/Tokenizer.js
|
||||||
--- a/src/Tokenizer.js
|
--- a/src/Tokenizer.js
|
||||||
+++ b/src/Tokenizer.js
|
+++ b/src/Tokenizer.js
|
||||||
@@ -287,11 +287,8 @@ module.exports = class Tokenizer {
|
@@ -321,14 +321,7 @@ module.exports = class Tokenizer {
|
||||||
if (cap) {
|
type: 'html',
|
||||||
return {
|
|
||||||
- type: this.options.sanitize
|
|
||||||
- ? 'paragraph'
|
|
||||||
- : 'html',
|
|
||||||
+ type: 'html',
|
|
||||||
raw: cap[0],
|
raw: cap[0],
|
||||||
- pre: !this.options.sanitizer
|
- pre: !this.options.sanitizer
|
||||||
- && (cap[1] === 'pre' || cap[1] === 'script' || cap[1] === 'style'),
|
- && (cap[1] === 'pre' || cap[1] === 'script' || cap[1] === 'style'),
|
||||||
- text: this.options.sanitize ? (this.options.sanitizer ? this.options.sanitizer(cap[0]) : escape(cap[0])) : cap[0]
|
+ pre: (cap[1] === 'pre' || cap[1] === 'script' || cap[1] === 'style'),
|
||||||
+ pre: cap[1] === 'pre' || cap[1] === 'script' || cap[1] === 'style',
|
text: cap[0]
|
||||||
+ text: cap[0]
|
|
||||||
};
|
};
|
||||||
|
- if (this.options.sanitize) {
|
||||||
|
- token.type = 'paragraph';
|
||||||
|
- token.text = this.options.sanitizer ? this.options.sanitizer(cap[0]) : escape(cap[0]);
|
||||||
|
- token.tokens = [];
|
||||||
|
- this.lexer.inline(token.text, token.tokens);
|
||||||
|
- }
|
||||||
|
return token;
|
||||||
}
|
}
|
||||||
@@ -421,15 +418,9 @@ module.exports = class Tokenizer {
|
@@ -477,15 +470,9 @@ module.exports = class Tokenizer {
|
||||||
|
|
||||||
return {
|
return {
|
||||||
- type: this.options.sanitize
|
- type: this.options.sanitize
|
||||||
|
@ -79,8 +80,8 @@ diff --git a/src/Tokenizer.js b/src/Tokenizer.js
|
||||||
- : 'html',
|
- : 'html',
|
||||||
+ type: 'html',
|
+ type: 'html',
|
||||||
raw: cap[0],
|
raw: cap[0],
|
||||||
inLink,
|
inLink: this.lexer.state.inLink,
|
||||||
inRawBlock,
|
inRawBlock: this.lexer.state.inRawBlock,
|
||||||
- text: this.options.sanitize
|
- text: this.options.sanitize
|
||||||
- ? (this.options.sanitizer
|
- ? (this.options.sanitizer
|
||||||
- ? this.options.sanitizer(cap[0])
|
- ? this.options.sanitizer(cap[0])
|
||||||
|
@ -89,7 +90,7 @@ diff --git a/src/Tokenizer.js b/src/Tokenizer.js
|
||||||
+ text: cap[0]
|
+ text: cap[0]
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
@@ -550,10 +541,10 @@ module.exports = class Tokenizer {
|
@@ -672,10 +659,10 @@ module.exports = class Tokenizer {
|
||||||
}
|
}
|
||||||
|
|
||||||
- autolink(src, mangle) {
|
- autolink(src, mangle) {
|
||||||
|
@ -102,7 +103,7 @@ diff --git a/src/Tokenizer.js b/src/Tokenizer.js
|
||||||
+ text = escape(cap[1]);
|
+ text = escape(cap[1]);
|
||||||
href = 'mailto:' + text;
|
href = 'mailto:' + text;
|
||||||
} else {
|
} else {
|
||||||
@@ -578,10 +569,10 @@ module.exports = class Tokenizer {
|
@@ -700,10 +687,10 @@ module.exports = class Tokenizer {
|
||||||
}
|
}
|
||||||
|
|
||||||
- url(src, mangle) {
|
- url(src, mangle) {
|
||||||
|
@ -115,15 +116,15 @@ diff --git a/src/Tokenizer.js b/src/Tokenizer.js
|
||||||
+ text = escape(cap[0]);
|
+ text = escape(cap[0]);
|
||||||
href = 'mailto:' + text;
|
href = 'mailto:' + text;
|
||||||
} else {
|
} else {
|
||||||
@@ -615,12 +606,12 @@ module.exports = class Tokenizer {
|
@@ -737,12 +724,12 @@ module.exports = class Tokenizer {
|
||||||
}
|
}
|
||||||
|
|
||||||
- inlineText(src, inRawBlock, smartypants) {
|
- inlineText(src, smartypants) {
|
||||||
+ inlineText(src, inRawBlock) {
|
+ inlineText(src) {
|
||||||
const cap = this.rules.inline.text.exec(src);
|
const cap = this.rules.inline.text.exec(src);
|
||||||
if (cap) {
|
if (cap) {
|
||||||
let text;
|
let text;
|
||||||
if (inRawBlock) {
|
if (this.lexer.state.inRawBlock) {
|
||||||
- text = this.options.sanitize ? (this.options.sanitizer ? this.options.sanitizer(cap[0]) : escape(cap[0])) : cap[0];
|
- text = this.options.sanitize ? (this.options.sanitizer ? this.options.sanitizer(cap[0]) : escape(cap[0])) : cap[0];
|
||||||
+ text = cap[0];
|
+ text = cap[0];
|
||||||
} else {
|
} else {
|
||||||
|
@ -134,7 +135,7 @@ diff --git a/src/Tokenizer.js b/src/Tokenizer.js
|
||||||
diff --git a/src/defaults.js b/src/defaults.js
|
diff --git a/src/defaults.js b/src/defaults.js
|
||||||
--- a/src/defaults.js
|
--- a/src/defaults.js
|
||||||
+++ b/src/defaults.js
|
+++ b/src/defaults.js
|
||||||
@@ -8,12 +8,8 @@ function getDefaults() {
|
@@ -9,12 +9,8 @@ function getDefaults() {
|
||||||
highlight: null,
|
highlight: null,
|
||||||
langPrefix: 'language-',
|
langPrefix: 'language-',
|
||||||
- mangle: true,
|
- mangle: true,
|
||||||
|
@ -170,7 +171,7 @@ diff --git a/src/helpers.js b/src/helpers.js
|
||||||
+function cleanUrl(base, href) {
|
+function cleanUrl(base, href) {
|
||||||
if (base && !originIndependentUrl.test(href)) {
|
if (base && !originIndependentUrl.test(href)) {
|
||||||
href = resolveUrl(base, href);
|
href = resolveUrl(base, href);
|
||||||
@@ -223,10 +210,4 @@ function findClosingBracket(str, b) {
|
@@ -227,10 +214,4 @@ function findClosingBracket(str, b) {
|
||||||
}
|
}
|
||||||
|
|
||||||
-function checkSanitizeDeprecation(opt) {
|
-function checkSanitizeDeprecation(opt) {
|
||||||
|
@ -179,14 +180,13 @@ diff --git a/src/helpers.js b/src/helpers.js
|
||||||
- }
|
- }
|
||||||
-}
|
-}
|
||||||
-
|
-
|
||||||
module.exports = {
|
// copied from https://stackoverflow.com/a/5450113/806777
|
||||||
escape,
|
function repeatString(pattern, count) {
|
||||||
@@ -239,5 +220,4 @@ module.exports = {
|
@@ -260,5 +241,4 @@ module.exports = {
|
||||||
splitCells,
|
|
||||||
rtrim,
|
rtrim,
|
||||||
- findClosingBracket,
|
findClosingBracket,
|
||||||
- checkSanitizeDeprecation
|
- checkSanitizeDeprecation,
|
||||||
+ findClosingBracket
|
repeatString
|
||||||
};
|
};
|
||||||
diff --git a/src/marked.js b/src/marked.js
|
diff --git a/src/marked.js b/src/marked.js
|
||||||
--- a/src/marked.js
|
--- a/src/marked.js
|
||||||
|
@ -203,8 +203,14 @@ diff --git a/src/marked.js b/src/marked.js
|
||||||
- checkSanitizeDeprecation(opt);
|
- checkSanitizeDeprecation(opt);
|
||||||
|
|
||||||
if (callback) {
|
if (callback) {
|
||||||
@@ -108,5 +106,5 @@ function marked(src, opt, callback) {
|
@@ -302,5 +300,4 @@ marked.parseInline = function(src, opt) {
|
||||||
return Parser.parse(tokens, opt);
|
|
||||||
|
opt = merge({}, marked.defaults, opt || {});
|
||||||
|
- checkSanitizeDeprecation(opt);
|
||||||
|
|
||||||
|
try {
|
||||||
|
@@ -311,5 +308,5 @@ marked.parseInline = function(src, opt) {
|
||||||
|
return Parser.parseInline(tokens, opt);
|
||||||
} catch (e) {
|
} catch (e) {
|
||||||
- e.message += '\nPlease report this to https://github.com/markedjs/marked.';
|
- e.message += '\nPlease report this to https://github.com/markedjs/marked.';
|
||||||
+ e.message += '\nmake issue @ https://github.com/9001/copyparty';
|
+ e.message += '\nmake issue @ https://github.com/9001/copyparty';
|
||||||
|
@ -252,86 +258,87 @@ diff --git a/test/bench.js b/test/bench.js
|
||||||
diff --git a/test/specs/run-spec.js b/test/specs/run-spec.js
|
diff --git a/test/specs/run-spec.js b/test/specs/run-spec.js
|
||||||
--- a/test/specs/run-spec.js
|
--- a/test/specs/run-spec.js
|
||||||
+++ b/test/specs/run-spec.js
|
+++ b/test/specs/run-spec.js
|
||||||
@@ -22,8 +22,4 @@ function runSpecs(title, dir, showCompletionTable, options) {
|
@@ -22,9 +22,4 @@ function runSpecs(title, dir, showCompletionTable, options) {
|
||||||
}
|
}
|
||||||
|
|
||||||
- if (spec.options.sanitizer) {
|
- if (spec.options.sanitizer) {
|
||||||
- // eslint-disable-next-line no-eval
|
- // eslint-disable-next-line no-eval
|
||||||
- spec.options.sanitizer = eval(spec.options.sanitizer);
|
- spec.options.sanitizer = eval(spec.options.sanitizer);
|
||||||
- }
|
- }
|
||||||
|
-
|
||||||
(spec.only ? fit : (spec.skip ? xit : it))('should ' + passFail + example, async() => {
|
(spec.only ? fit : (spec.skip ? xit : it))('should ' + passFail + example, async() => {
|
||||||
@@ -53,3 +49,2 @@ runSpecs('Original', './original', false, { gfm: false, pedantic: true });
|
const before = process.hrtime();
|
||||||
|
@@ -53,3 +48,2 @@ runSpecs('Original', './original', false, { gfm: false, pedantic: true });
|
||||||
runSpecs('New', './new');
|
runSpecs('New', './new');
|
||||||
runSpecs('ReDOS', './redos');
|
runSpecs('ReDOS', './redos');
|
||||||
-runSpecs('Security', './security', false, { silent: true }); // silent - do not show deprecation warning
|
-runSpecs('Security', './security', false, { silent: true }); // silent - do not show deprecation warning
|
||||||
diff --git a/test/unit/Lexer-spec.js b/test/unit/Lexer-spec.js
|
diff --git a/test/unit/Lexer-spec.js b/test/unit/Lexer-spec.js
|
||||||
--- a/test/unit/Lexer-spec.js
|
--- a/test/unit/Lexer-spec.js
|
||||||
+++ b/test/unit/Lexer-spec.js
|
+++ b/test/unit/Lexer-spec.js
|
||||||
@@ -465,5 +465,5 @@ a | b
|
@@ -589,5 +589,5 @@ paragraph
|
||||||
});
|
});
|
||||||
|
|
||||||
- it('sanitize', () => {
|
- it('sanitize', () => {
|
||||||
+ /*it('sanitize', () => {
|
+ /*it('sanitize', () => {
|
||||||
expectTokens({
|
expectTokens({
|
||||||
md: '<div>html</div>',
|
md: '<div>html</div>',
|
||||||
@@ -483,5 +483,5 @@ a | b
|
@@ -607,5 +607,5 @@ paragraph
|
||||||
]
|
]
|
||||||
});
|
});
|
||||||
- });
|
- });
|
||||||
+ });*/
|
+ });*/
|
||||||
});
|
});
|
||||||
|
|
||||||
@@ -587,5 +587,5 @@ a | b
|
@@ -652,5 +652,5 @@ paragraph
|
||||||
});
|
});
|
||||||
|
|
||||||
- it('html sanitize', () => {
|
- it('html sanitize', () => {
|
||||||
+ /*it('html sanitize', () => {
|
+ /*it('html sanitize', () => {
|
||||||
expectInlineTokens({
|
expectInlineTokens({
|
||||||
md: '<div>html</div>',
|
md: '<div>html</div>',
|
||||||
@@ -597,5 +597,5 @@ a | b
|
@@ -660,5 +660,5 @@ paragraph
|
||||||
]
|
]
|
||||||
});
|
});
|
||||||
- });
|
- });
|
||||||
+ });*/
|
+ });*/
|
||||||
|
|
||||||
it('link', () => {
|
it('link', () => {
|
||||||
@@ -909,5 +909,5 @@ a | b
|
@@ -971,5 +971,5 @@ paragraph
|
||||||
});
|
});
|
||||||
|
|
||||||
- it('autolink mangle email', () => {
|
- it('autolink mangle email', () => {
|
||||||
+ /*it('autolink mangle email', () => {
|
+ /*it('autolink mangle email', () => {
|
||||||
expectInlineTokens({
|
expectInlineTokens({
|
||||||
md: '<test@example.com>',
|
md: '<test@example.com>',
|
||||||
@@ -929,5 +929,5 @@ a | b
|
@@ -991,5 +991,5 @@ paragraph
|
||||||
]
|
]
|
||||||
});
|
});
|
||||||
- });
|
- });
|
||||||
+ });*/
|
+ });*/
|
||||||
|
|
||||||
it('url', () => {
|
it('url', () => {
|
||||||
@@ -966,5 +966,5 @@ a | b
|
@@ -1028,5 +1028,5 @@ paragraph
|
||||||
});
|
});
|
||||||
|
|
||||||
- it('url mangle email', () => {
|
- it('url mangle email', () => {
|
||||||
+ /*it('url mangle email', () => {
|
+ /*it('url mangle email', () => {
|
||||||
expectInlineTokens({
|
expectInlineTokens({
|
||||||
md: 'test@example.com',
|
md: 'test@example.com',
|
||||||
@@ -986,5 +986,5 @@ a | b
|
@@ -1048,5 +1048,5 @@ paragraph
|
||||||
]
|
]
|
||||||
});
|
});
|
||||||
- });
|
- });
|
||||||
+ });*/
|
+ });*/
|
||||||
});
|
});
|
||||||
|
|
||||||
@@ -1002,5 +1002,5 @@ a | b
|
@@ -1064,5 +1064,5 @@ paragraph
|
||||||
});
|
});
|
||||||
|
|
||||||
- describe('smartypants', () => {
|
- describe('smartypants', () => {
|
||||||
+ /*describe('smartypants', () => {
|
+ /*describe('smartypants', () => {
|
||||||
it('single quotes', () => {
|
it('single quotes', () => {
|
||||||
expectInlineTokens({
|
expectInlineTokens({
|
||||||
@@ -1072,5 +1072,5 @@ a | b
|
@@ -1134,5 +1134,5 @@ paragraph
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
- });
|
- });
|
||||||
|
|
|
@ -140,12 +140,12 @@ a newline toplevel
|
||||||
| a table | on the right |
|
| a table | on the right |
|
||||||
| second row | foo bar |
|
| second row | foo bar |
|
||||||
|
|
||||||
||
|
a||a
|
||||||
--|:-:|-:
|
--|:-:|-:
|
||||||
a table | big text in this | aaakbfddd
|
a table | big text in this | aaakbfddd
|
||||||
second row | centred | bbb
|
second row | centred | bbb
|
||||||
|
|
||||||
||
|
||||
|
||||||
--|--|--
|
--|--|--
|
||||||
foo
|
foo
|
||||||
|
|
||||||
|
|
Loading…
Reference in a new issue