mirror of
				https://github.com/9001/copyparty.git
				synced 2025-10-31 04:32:20 -06:00 
			
		
		
		
	
		
			
				
	
	
		
			319 lines
		
	
	
		
			9.2 KiB
		
	
	
	
		
			Diff
		
	
	
	
	
	
			
		
		
	
	
			319 lines
		
	
	
		
			9.2 KiB
		
	
	
	
		
			Diff
		
	
	
	
	
	
| diff --git a/src/Lexer.js b/src/Lexer.js
 | |
| strip some features
 | |
| --- a/src/Lexer.js
 | |
| +++ b/src/Lexer.js
 | |
| @@ -7,5 +7,5 @@ import { repeatString } from './helpers.js';
 | |
|   * smartypants text replacement
 | |
|   * @param {string} text
 | |
| - */
 | |
| + *
 | |
|  function smartypants(text) {
 | |
|    return text
 | |
| @@ -29,5 +29,5 @@ function smartypants(text) {
 | |
|   * mangle email addresses
 | |
|   * @param {string} text
 | |
| - */
 | |
| + *
 | |
|  function mangle(text) {
 | |
|    let out = '',
 | |
| @@ -478,5 +478,5 @@ export class Lexer {
 | |
|  
 | |
|        // autolink
 | |
| -      if (token = this.tokenizer.autolink(src, mangle)) {
 | |
| +      if (token = this.tokenizer.autolink(src)) {
 | |
|          src = src.substring(token.raw.length);
 | |
|          tokens.push(token);
 | |
| @@ -485,5 +485,5 @@ export class Lexer {
 | |
|  
 | |
|        // url (gfm)
 | |
| -      if (!this.state.inLink && (token = this.tokenizer.url(src, mangle))) {
 | |
| +      if (!this.state.inLink && (token = this.tokenizer.url(src))) {
 | |
|          src = src.substring(token.raw.length);
 | |
|          tokens.push(token);
 | |
| @@ -506,5 +506,5 @@ export class Lexer {
 | |
|          }
 | |
|        }
 | |
| -      if (token = this.tokenizer.inlineText(cutSrc, smartypants)) {
 | |
| +      if (token = this.tokenizer.inlineText(cutSrc)) {
 | |
|          src = src.substring(token.raw.length);
 | |
|          this.ln = token.ln || this.ln;
 | |
| diff --git a/src/Renderer.js b/src/Renderer.js
 | |
| --- a/src/Renderer.js
 | |
| +++ b/src/Renderer.js
 | |
| @@ -173,5 +173,5 @@ export class Renderer {
 | |
|     */
 | |
|    link(href, title, text) {
 | |
| -    href = cleanUrl(this.options.sanitize, this.options.baseUrl, href);
 | |
| +    href = cleanUrl(this.options.baseUrl, href);
 | |
|      if (href === null) {
 | |
|        return text;
 | |
| @@ -191,5 +191,5 @@ export class Renderer {
 | |
|     */
 | |
|    image(href, title, text) {
 | |
| -    href = cleanUrl(this.options.sanitize, this.options.baseUrl, href);
 | |
| +    href = cleanUrl(this.options.baseUrl, href);
 | |
|      if (href === null) {
 | |
|        return text;
 | |
| diff --git a/src/Tokenizer.js b/src/Tokenizer.js
 | |
| --- a/src/Tokenizer.js
 | |
| +++ b/src/Tokenizer.js
 | |
| @@ -367,14 +367,7 @@ export class Tokenizer {
 | |
|          type: 'html',
 | |
|          raw: cap[0],
 | |
| -        pre: !this.options.sanitizer
 | |
| -          && (cap[1] === 'pre' || cap[1] === 'script' || cap[1] === 'style'),
 | |
| +        pre: (cap[1] === 'pre' || cap[1] === 'script' || cap[1] === 'style'),
 | |
|          text: cap[0]
 | |
|        };
 | |
| -      if (this.options.sanitize) {
 | |
| -        const text = this.options.sanitizer ? this.options.sanitizer(cap[0]) : escape(cap[0]);
 | |
| -        token.type = 'paragraph';
 | |
| -        token.text = text;
 | |
| -        token.tokens = this.lexer.inline(text);
 | |
| -      }
 | |
|        return token;
 | |
|      }
 | |
| @@ -517,15 +510,9 @@ export class Tokenizer {
 | |
|  
 | |
|        return {
 | |
| -        type: this.options.sanitize
 | |
| -          ? 'text'
 | |
| -          : 'html',
 | |
| +        type: 'html',
 | |
|          raw: cap[0],
 | |
|          inLink: this.lexer.state.inLink,
 | |
|          inRawBlock: this.lexer.state.inRawBlock,
 | |
| -        text: this.options.sanitize
 | |
| -          ? (this.options.sanitizer
 | |
| -            ? this.options.sanitizer(cap[0])
 | |
| -            : escape(cap[0]))
 | |
| -          : cap[0]
 | |
| +        text: cap[0]
 | |
|        };
 | |
|      }
 | |
| @@ -714,10 +701,10 @@ export class Tokenizer {
 | |
|    }
 | |
|  
 | |
| -  autolink(src, mangle) {
 | |
| +  autolink(src) {
 | |
|      const cap = this.rules.inline.autolink.exec(src);
 | |
|      if (cap) {
 | |
|        let text, href;
 | |
|        if (cap[2] === '@') {
 | |
| -        text = escape(this.options.mangle ? mangle(cap[1]) : cap[1]);
 | |
| +        text = escape(cap[1]);
 | |
|          href = 'mailto:' + text;
 | |
|        } else {
 | |
| @@ -742,10 +729,10 @@ export class Tokenizer {
 | |
|    }
 | |
|  
 | |
| -  url(src, mangle) {
 | |
| +  url(src) {
 | |
|      let cap;
 | |
|      if (cap = this.rules.inline.url.exec(src)) {
 | |
|        let text, href;
 | |
|        if (cap[2] === '@') {
 | |
| -        text = escape(this.options.mangle ? mangle(cap[0]) : cap[0]);
 | |
| +        text = escape(cap[0]);
 | |
|          href = 'mailto:' + text;
 | |
|        } else {
 | |
| @@ -779,12 +766,12 @@ export class Tokenizer {
 | |
|    }
 | |
|  
 | |
| -  inlineText(src, smartypants) {
 | |
| +  inlineText(src) {
 | |
|      const cap = this.rules.inline.text.exec(src);
 | |
|      if (cap) {
 | |
|        let text;
 | |
|        if (this.lexer.state.inRawBlock) {
 | |
| -        text = this.options.sanitize ? (this.options.sanitizer ? this.options.sanitizer(cap[0]) : escape(cap[0])) : cap[0];
 | |
| +        text = cap[0];
 | |
|        } else {
 | |
| -        text = escape(this.options.smartypants ? smartypants(cap[0]) : cap[0]);
 | |
| +        text = escape(cap[0]);
 | |
|        }
 | |
|        return {
 | |
| diff --git a/src/defaults.js b/src/defaults.js
 | |
| --- a/src/defaults.js
 | |
| +++ b/src/defaults.js
 | |
| @@ -11,11 +11,7 @@ export function getDefaults() {
 | |
|      hooks: null,
 | |
|      langPrefix: 'language-',
 | |
| -    mangle: true,
 | |
|      pedantic: false,
 | |
|      renderer: null,
 | |
| -    sanitize: false,
 | |
| -    sanitizer: null,
 | |
|      silent: false,
 | |
| -    smartypants: false,
 | |
|      tokenizer: null,
 | |
|      walkTokens: null,
 | |
| diff --git a/src/helpers.js b/src/helpers.js
 | |
| --- a/src/helpers.js
 | |
| +++ b/src/helpers.js
 | |
| @@ -78,18 +78,5 @@ const originIndependentUrl = /^$|^[a-z][a-z0-9+.-]*:|^[?#]/i;
 | |
|   * @param {string} href
 | |
|   */
 | |
| -export function cleanUrl(sanitize, base, href) {
 | |
| -  if (sanitize) {
 | |
| -    let prot;
 | |
| -    try {
 | |
| -      prot = decodeURIComponent(unescape(href))
 | |
| -        .replace(nonWordAndColonTest, '')
 | |
| -        .toLowerCase();
 | |
| -    } catch (e) {
 | |
| -      return null;
 | |
| -    }
 | |
| -    if (prot.indexOf('javascript:') === 0 || prot.indexOf('vbscript:') === 0 || prot.indexOf('data:') === 0) {
 | |
| -      return null;
 | |
| -    }
 | |
| -  }
 | |
| +export function cleanUrl(base, href) {
 | |
|    if (base && !originIndependentUrl.test(href)) {
 | |
|      href = resolveUrl(base, href);
 | |
| @@ -233,10 +220,4 @@ export function findClosingBracket(str, b) {
 | |
|  }
 | |
|  
 | |
| -export function checkSanitizeDeprecation(opt) {
 | |
| -  if (opt && opt.sanitize && !opt.silent) {
 | |
| -    console.warn('marked(): sanitize and sanitizer parameters are deprecated since version 0.7.0, should not be used and will be removed in the future. Read more here: https://marked.js.org/#/USING_ADVANCED.md#options');
 | |
| -  }
 | |
| -}
 | |
| -
 | |
|  // copied from https://stackoverflow.com/a/5450113/806777
 | |
|  /**
 | |
| diff --git a/src/marked.js b/src/marked.js
 | |
| --- a/src/marked.js
 | |
| +++ b/src/marked.js
 | |
| @@ -7,5 +7,4 @@ import { Slugger } from './Slugger.js';
 | |
|  import { Hooks } from './Hooks.js';
 | |
|  import {
 | |
| -  checkSanitizeDeprecation,
 | |
|    escape
 | |
|  } from './helpers.js';
 | |
| @@ -18,5 +17,5 @@ import {
 | |
|  function onError(silent, async, callback) {
 | |
|    return (e) => {
 | |
| -    e.message += '\nPlease report this to https://github.com/markedjs/marked.';
 | |
| +    e.message += '\nmake issue @ https://github.com/9001/copyparty';
 | |
|  
 | |
|      if (silent) {
 | |
| @@ -65,6 +64,4 @@ function parseMarkdown(lexer, parser) {
 | |
|      }
 | |
|  
 | |
| -    checkSanitizeDeprecation(opt);
 | |
| -
 | |
|      if (opt.hooks) {
 | |
|        opt.hooks.options = opt;
 | |
| diff --git a/test/bench.js b/test/bench.js
 | |
| --- a/test/bench.js
 | |
| +++ b/test/bench.js
 | |
| @@ -39,5 +39,4 @@ export async function runBench(options) {
 | |
|      breaks: false,
 | |
|      pedantic: false,
 | |
| -    sanitize: false
 | |
|    });
 | |
|    if (options.marked) {
 | |
| @@ -50,5 +49,4 @@ export async function runBench(options) {
 | |
|      breaks: false,
 | |
|      pedantic: false,
 | |
| -    sanitize: false
 | |
|    });
 | |
|    if (options.marked) {
 | |
| @@ -61,5 +59,4 @@ export async function runBench(options) {
 | |
|    //   breaks: false,
 | |
|    //   pedantic: false,
 | |
| -  //   sanitize: false
 | |
|    // });
 | |
|    // if (options.marked) {
 | |
| diff --git a/test/specs/run-spec.js b/test/specs/run-spec.js
 | |
| --- a/test/specs/run-spec.js
 | |
| +++ b/test/specs/run-spec.js
 | |
| @@ -25,9 +25,4 @@ function runSpecs(title, dir, showCompletionTable, options) {
 | |
|            }
 | |
|  
 | |
| -          if (spec.options.sanitizer) {
 | |
| -            // eslint-disable-next-line no-eval
 | |
| -            spec.options.sanitizer = eval(spec.options.sanitizer);
 | |
| -          }
 | |
| -
 | |
|            (spec.only ? fit : (spec.skip ? xit : it))('should ' + passFail + example, async() => {
 | |
|              const before = process.hrtime();
 | |
| @@ -56,3 +51,2 @@ runSpecs('Original', './original', false, { gfm: false, pedantic: true });
 | |
|  runSpecs('New', './new');
 | |
|  runSpecs('ReDOS', './redos');
 | |
| -runSpecs('Security', './security', false, { silent: true }); // silent - do not show deprecation warning
 | |
| diff --git a/test/unit/Lexer-spec.js b/test/unit/Lexer-spec.js
 | |
| --- a/test/unit/Lexer-spec.js
 | |
| +++ b/test/unit/Lexer-spec.js
 | |
| @@ -794,5 +794,5 @@ paragraph
 | |
|      });
 | |
|  
 | |
| -    it('sanitize', () => {
 | |
| +    /*it('sanitize', () => {
 | |
|        expectTokens({
 | |
|          md: '<div>html</div>',
 | |
| @@ -812,5 +812,5 @@ paragraph
 | |
|          ]
 | |
|        });
 | |
| -    });
 | |
| +    });*/
 | |
|    });
 | |
|  
 | |
| @@ -892,5 +892,5 @@ paragraph
 | |
|        });
 | |
|  
 | |
| -      it('html sanitize', () => {
 | |
| +      /*it('html sanitize', () => {
 | |
|          expectInlineTokens({
 | |
|            md: '<div>html</div>',
 | |
| @@ -900,5 +900,5 @@ paragraph
 | |
|            ]
 | |
|          });
 | |
| -      });
 | |
| +      });*/
 | |
|  
 | |
|        it('link', () => {
 | |
| @@ -1211,5 +1211,5 @@ paragraph
 | |
|          });
 | |
|  
 | |
| -        it('autolink mangle email', () => {
 | |
| +        /*it('autolink mangle email', () => {
 | |
|            expectInlineTokens({
 | |
|              md: '<test@example.com>',
 | |
| @@ -1231,5 +1231,5 @@ paragraph
 | |
|              ]
 | |
|            });
 | |
| -        });
 | |
| +        });*/
 | |
|  
 | |
|          it('url', () => {
 | |
| @@ -1268,5 +1268,5 @@ paragraph
 | |
|          });
 | |
|  
 | |
| -        it('url mangle email', () => {
 | |
| +        /*it('url mangle email', () => {
 | |
|            expectInlineTokens({
 | |
|              md: 'test@example.com',
 | |
| @@ -1288,5 +1288,5 @@ paragraph
 | |
|              ]
 | |
|            });
 | |
| -        });
 | |
| +        });*/
 | |
|        });
 | |
|  
 | |
| @@ -1304,5 +1304,5 @@ paragraph
 | |
|        });
 | |
|  
 | |
| -      describe('smartypants', () => {
 | |
| +      /*describe('smartypants', () => {
 | |
|          it('single quotes', () => {
 | |
|            expectInlineTokens({
 | |
| @@ -1374,5 +1374,5 @@ paragraph
 | |
|            });
 | |
|          });
 | |
| -      });
 | |
| +      });*/
 | |
|      });
 | |
|    });
 |