Updated the files.

This commit is contained in:
Batuhan Berk Başoğlu 2024-02-08 19:38:41 -05:00
parent 1553e6b971
commit 753967d4f5
23418 changed files with 3784666 additions and 0 deletions

View file

@ -0,0 +1,6 @@
/// <reference types="node" />
import { Writable } from 'node:stream';
export declare class DevNullStream extends Writable {
_write(_chunk: string, _encoding: string, cb: () => void): void;
}
//# sourceMappingURL=dev-null-stream.d.ts.map

View file

@ -0,0 +1,7 @@
import { Writable } from 'node:stream';
export class DevNullStream extends Writable {
_write(_chunk, _encoding, cb) {
cb();
}
}
//# sourceMappingURL=dev-null-stream.js.map

154
my-app/node_modules/parse5-sax-parser/dist/index.d.ts generated vendored Executable file
View file

@ -0,0 +1,154 @@
/// <reference types="node" />
import { Transform } from 'node:stream';
import type { Tokenizer, TokenHandler, Token } from 'parse5';
import { ParserFeedbackSimulator } from './parser-feedback-simulator.js';
export interface SAXParserOptions {
/**
* Enables source code location information for tokens.
*
* When enabled, each token will have a `sourceCodeLocation` property.
*/
sourceCodeLocationInfo?: boolean;
}
/**
* Streaming [SAX](https://en.wikipedia.org/wiki/Simple_API_for_XML)-style HTML parser.
* A [transform stream](https://nodejs.org/api/stream.html#stream_class_stream_transform) (which means you can pipe _through_ it, see example).
*
* @example
*
* ```js
* const SAXParser = require('parse5-sax-parser');
* const http = require('http');
* const fs = require('fs');
*
* const file = fs.createWriteStream('/home/google.com.html');
* const parser = new SAXParser();
*
* parser.on('text', text => {
* // Handle page text content
* ...
* });
*
* http.get('http://google.com', res => {
* // `SAXParser` is the `Transform` stream, which means you can pipe
* // through it. So, you can analyze the page content and, e.g., save it
* // to the file at the same time:
* res.pipe(parser).pipe(file);
* });
* ```
*/
export declare class SAXParser extends Transform implements TokenHandler {
protected options: SAXParserOptions;
protected parserFeedbackSimulator: ParserFeedbackSimulator;
private pendingText;
private lastChunkWritten;
private stopped;
protected tokenizer: Tokenizer;
/**
* @param options Parsing options.
*/
constructor(options?: SAXParserOptions);
_transform(chunk: string, _encoding: string, callback: (error?: Error | null, data?: string) => void): void;
_final(callback: (error?: Error | null, data?: string) => void): void;
/**
* Stops parsing. Useful if you want the parser to stop consuming CPU time
* once you've obtained the desired info from the input stream. Doesn't
* prevent piping, so that data will flow through the parser as usual.
*
* @example
*
* ```js
* const SAXParser = require('parse5-sax-parser');
* const http = require('http');
* const fs = require('fs');
*
* const file = fs.createWriteStream('google.com.html');
* const parser = new SAXParser();
*
* parser.on('doctype', ({ name, publicId, systemId }) => {
* // Process doctype info and stop parsing
* ...
* parser.stop();
* });
*
* http.get('http://google.com', res => {
* // Despite the fact that parser.stop() was called whole
* // content of the page will be written to the file
* res.pipe(parser).pipe(file);
* });
* ```
*/
stop(): void;
protected _transformChunk(chunk: string): string;
/** @internal */
onCharacter({ chars, location }: Token.CharacterToken): void;
/** @internal */
onWhitespaceCharacter(token: Token.CharacterToken): void;
/** @internal */
onNullCharacter(token: Token.CharacterToken): void;
/** @internal */
onEof(): void;
/** @internal */
onStartTag(token: Token.TagToken): void;
/** @internal */
onEndTag(token: Token.TagToken): void;
/** @internal */
onDoctype(token: Token.DoctypeToken): void;
/** @internal */
onComment(token: Token.CommentToken): void;
protected emitIfListenerExists(eventName: string, token: SaxToken): boolean;
protected _emitToken(eventName: string, token: SaxToken): void;
private _emitPendingText;
}
export interface SaxToken {
/** Source code location info. Available if location info is enabled via {@link SAXParserOptions}. */
sourceCodeLocation?: Token.Location | null;
}
export interface StartTag extends SaxToken {
/** Tag name */
tagName: string;
/** List of attributes */
attrs: Token.Attribute[];
/** Indicates if the tag is self-closing */
selfClosing: boolean;
}
export interface EndTag extends SaxToken {
/** Tag name */
tagName: string;
}
export interface Text extends SaxToken {
/** Text content. */
text: string;
}
export interface Comment extends SaxToken {
/** Comment text. */
text: string;
}
export interface Doctype extends SaxToken {
/** Document type name. */
name: string | null;
/** Document type public identifier. */
publicId: string | null;
/** Document type system identifier. */
systemId: string | null;
}
export interface SAXParser {
/** Raised when the parser encounters a start tag. */
on(event: 'startTag', listener: (startTag: StartTag) => void): this;
/** Raised when the parser encounters an end tag. */
on(event: 'endTag', listener: (endTag: EndTag) => void): this;
/** Raised when the parser encounters a comment. */
on(event: 'comment', listener: (comment: Comment) => void): this;
/** Raised when the parser encounters text content. */
on(event: 'text', listener: (text: Text) => void): this;
/** Raised when the parser encounters a [document type declaration](https://en.wikipedia.org/wiki/Document_type_declaration) */
on(event: 'doctype', listener: (doctype: Doctype) => void): this;
/**
* Base event handler.
*
* @param event Name of the event
* @param handler Event handler
*/
on(event: string, handler: (...args: any[]) => void): this;
}
//# sourceMappingURL=index.d.ts.map

192
my-app/node_modules/parse5-sax-parser/dist/index.js generated vendored Executable file
View file

@ -0,0 +1,192 @@
import { Transform } from 'node:stream';
import { DevNullStream } from './dev-null-stream.js';
import { ParserFeedbackSimulator } from './parser-feedback-simulator.js';
/**
* Streaming [SAX](https://en.wikipedia.org/wiki/Simple_API_for_XML)-style HTML parser.
* A [transform stream](https://nodejs.org/api/stream.html#stream_class_stream_transform) (which means you can pipe _through_ it, see example).
*
* @example
*
* ```js
* const SAXParser = require('parse5-sax-parser');
* const http = require('http');
* const fs = require('fs');
*
* const file = fs.createWriteStream('/home/google.com.html');
* const parser = new SAXParser();
*
* parser.on('text', text => {
* // Handle page text content
* ...
* });
*
* http.get('http://google.com', res => {
* // `SAXParser` is the `Transform` stream, which means you can pipe
* // through it. So, you can analyze the page content and, e.g., save it
* // to the file at the same time:
* res.pipe(parser).pipe(file);
* });
* ```
*/
export class SAXParser extends Transform {
/**
* @param options Parsing options.
*/
constructor(options = {}) {
super({ encoding: 'utf8', decodeStrings: false });
this.pendingText = null;
this.lastChunkWritten = false;
this.stopped = false;
this.options = {
sourceCodeLocationInfo: false,
...options,
};
this.parserFeedbackSimulator = new ParserFeedbackSimulator(this.options, this);
this.tokenizer = this.parserFeedbackSimulator.tokenizer;
// NOTE: always pipe the stream to the /dev/null stream to avoid
// the `highWaterMark` to be hit even if we don't have consumers.
// (see: https://github.com/inikulin/parse5/issues/97#issuecomment-171940774)
this.pipe(new DevNullStream());
}
//`Transform` implementation
_transform(chunk, _encoding, callback) {
if (typeof chunk !== 'string') {
throw new TypeError('Parser can work only with string streams.');
}
callback(null, this._transformChunk(chunk));
}
_final(callback) {
this.lastChunkWritten = true;
callback(null, this._transformChunk(''));
}
/**
* Stops parsing. Useful if you want the parser to stop consuming CPU time
* once you've obtained the desired info from the input stream. Doesn't
* prevent piping, so that data will flow through the parser as usual.
*
* @example
*
* ```js
* const SAXParser = require('parse5-sax-parser');
* const http = require('http');
* const fs = require('fs');
*
* const file = fs.createWriteStream('google.com.html');
* const parser = new SAXParser();
*
* parser.on('doctype', ({ name, publicId, systemId }) => {
* // Process doctype info and stop parsing
* ...
* parser.stop();
* });
*
* http.get('http://google.com', res => {
* // Despite the fact that parser.stop() was called whole
* // content of the page will be written to the file
* res.pipe(parser).pipe(file);
* });
* ```
*/
stop() {
this.stopped = true;
this.tokenizer.pause();
}
//Internals
_transformChunk(chunk) {
if (!this.stopped) {
this.tokenizer.write(chunk, this.lastChunkWritten);
}
return chunk;
}
/** @internal */
onCharacter({ chars, location }) {
if (this.pendingText === null) {
this.pendingText = { text: chars, sourceCodeLocation: location };
}
else {
this.pendingText.text += chars;
if (location && this.pendingText.sourceCodeLocation) {
const { endLine, endCol, endOffset } = location;
this.pendingText.sourceCodeLocation = {
...this.pendingText.sourceCodeLocation,
endLine,
endCol,
endOffset,
};
}
}
if (this.tokenizer.preprocessor.willDropParsedChunk()) {
this._emitPendingText();
}
}
/** @internal */
onWhitespaceCharacter(token) {
this.onCharacter(token);
}
/** @internal */
onNullCharacter(token) {
this.onCharacter(token);
}
/** @internal */
onEof() {
this._emitPendingText();
this.stopped = true;
}
/** @internal */
onStartTag(token) {
this._emitPendingText();
const startTag = {
tagName: token.tagName,
attrs: token.attrs,
selfClosing: token.selfClosing,
sourceCodeLocation: token.location,
};
this.emitIfListenerExists('startTag', startTag);
}
/** @internal */
onEndTag(token) {
this._emitPendingText();
const endTag = {
tagName: token.tagName,
sourceCodeLocation: token.location,
};
this.emitIfListenerExists('endTag', endTag);
}
/** @internal */
onDoctype(token) {
this._emitPendingText();
const doctype = {
name: token.name,
publicId: token.publicId,
systemId: token.systemId,
sourceCodeLocation: token.location,
};
this.emitIfListenerExists('doctype', doctype);
}
/** @internal */
onComment(token) {
this._emitPendingText();
const comment = {
text: token.data,
sourceCodeLocation: token.location,
};
this.emitIfListenerExists('comment', comment);
}
emitIfListenerExists(eventName, token) {
if (this.listenerCount(eventName) === 0) {
return false;
}
this._emitToken(eventName, token);
return true;
}
_emitToken(eventName, token) {
this.emit(eventName, token);
}
_emitPendingText() {
if (this.pendingText !== null) {
this.emitIfListenerExists('text', this.pendingText);
this.pendingText = null;
}
}
}
//# sourceMappingURL=index.js.map

View file

@ -0,0 +1,32 @@
import { Tokenizer, type TokenizerOptions, type TokenHandler, Token } from 'parse5';
/**
* Simulates adjustments of the Tokenizer which are performed by the standard parser during tree construction.
*/
export declare class ParserFeedbackSimulator implements TokenHandler {
private handler;
private namespaceStack;
inForeignContent: boolean;
skipNextNewLine: boolean;
tokenizer: Tokenizer;
constructor(options: TokenizerOptions, handler: TokenHandler);
/** @internal */
onNullCharacter(token: Token.CharacterToken): void;
/** @internal */
onWhitespaceCharacter(token: Token.CharacterToken): void;
/** @internal */
onCharacter(token: Token.CharacterToken): void;
/** @internal */
onComment(token: Token.CommentToken): void;
/** @internal */
onDoctype(token: Token.DoctypeToken): void;
/** @internal */
onEof(token: Token.EOFToken): void;
private _enterNamespace;
private _leaveCurrentNamespace;
private _ensureTokenizerMode;
/** @internal */
onStartTag(token: Token.TagToken): void;
/** @internal */
onEndTag(token: Token.TagToken): void;
}
//# sourceMappingURL=parser-feedback-simulator.d.ts.map

View file

@ -0,0 +1,184 @@
import { Tokenizer, TokenizerMode, Token, foreignContent, html, } from 'parse5';
const $ = html.TAG_ID;
const REPLACEMENT_CHARACTER = '\uFFFD';
const LINE_FEED_CODE_POINT = 0x0a;
/**
* Simulates adjustments of the Tokenizer which are performed by the standard parser during tree construction.
*/
export class ParserFeedbackSimulator {
constructor(options, handler) {
this.handler = handler;
this.namespaceStack = [];
this.inForeignContent = false;
this.skipNextNewLine = false;
this.tokenizer = new Tokenizer(options, this);
this._enterNamespace(html.NS.HTML);
}
/** @internal */
onNullCharacter(token) {
this.skipNextNewLine = false;
if (this.inForeignContent) {
this.handler.onCharacter({
type: Token.TokenType.CHARACTER,
chars: REPLACEMENT_CHARACTER,
location: token.location,
});
}
else {
this.handler.onNullCharacter(token);
}
}
/** @internal */
onWhitespaceCharacter(token) {
if (this.skipNextNewLine && token.chars.charCodeAt(0) === LINE_FEED_CODE_POINT) {
this.skipNextNewLine = false;
if (token.chars.length === 1) {
return;
}
token.chars = token.chars.substr(1);
}
this.handler.onWhitespaceCharacter(token);
}
/** @internal */
onCharacter(token) {
this.skipNextNewLine = false;
this.handler.onCharacter(token);
}
/** @internal */
onComment(token) {
this.skipNextNewLine = false;
this.handler.onComment(token);
}
/** @internal */
onDoctype(token) {
this.skipNextNewLine = false;
this.handler.onDoctype(token);
}
/** @internal */
onEof(token) {
this.skipNextNewLine = false;
this.handler.onEof(token);
}
//Namespace stack mutations
_enterNamespace(namespace) {
this.namespaceStack.unshift(namespace);
this.inForeignContent = namespace !== html.NS.HTML;
this.tokenizer.inForeignNode = this.inForeignContent;
}
_leaveCurrentNamespace() {
this.namespaceStack.shift();
this.inForeignContent = this.namespaceStack[0] !== html.NS.HTML;
this.tokenizer.inForeignNode = this.inForeignContent;
}
//Token handlers
_ensureTokenizerMode(tn) {
switch (tn) {
case $.TEXTAREA:
case $.TITLE: {
this.tokenizer.state = TokenizerMode.RCDATA;
break;
}
case $.PLAINTEXT: {
this.tokenizer.state = TokenizerMode.PLAINTEXT;
break;
}
case $.SCRIPT: {
this.tokenizer.state = TokenizerMode.SCRIPT_DATA;
break;
}
case $.STYLE:
case $.IFRAME:
case $.XMP:
case $.NOEMBED:
case $.NOFRAMES:
case $.NOSCRIPT: {
this.tokenizer.state = TokenizerMode.RAWTEXT;
break;
}
default:
// Do nothing
}
}
/** @internal */
onStartTag(token) {
let tn = token.tagID;
switch (tn) {
case $.SVG: {
this._enterNamespace(html.NS.SVG);
break;
}
case $.MATH: {
this._enterNamespace(html.NS.MATHML);
break;
}
default:
// Do nothing
}
if (this.inForeignContent) {
if (foreignContent.causesExit(token)) {
this._leaveCurrentNamespace();
}
else {
const currentNs = this.namespaceStack[0];
if (currentNs === html.NS.MATHML) {
foreignContent.adjustTokenMathMLAttrs(token);
}
else if (currentNs === html.NS.SVG) {
foreignContent.adjustTokenSVGTagName(token);
foreignContent.adjustTokenSVGAttrs(token);
}
foreignContent.adjustTokenXMLAttrs(token);
tn = token.tagID;
if (!token.selfClosing && foreignContent.isIntegrationPoint(tn, currentNs, token.attrs)) {
this._enterNamespace(html.NS.HTML);
}
}
}
else {
switch (tn) {
case $.PRE:
case $.TEXTAREA:
case $.LISTING: {
this.skipNextNewLine = true;
break;
}
case $.IMAGE: {
token.tagName = html.TAG_NAMES.IMG;
token.tagID = $.IMG;
break;
}
default:
// Do nothing
}
this._ensureTokenizerMode(tn);
}
this.handler.onStartTag(token);
}
/** @internal */
onEndTag(token) {
let tn = token.tagID;
if (!this.inForeignContent) {
const previousNs = this.namespaceStack[1];
if (previousNs === html.NS.SVG) {
const adjustedTagName = foreignContent.SVG_TAG_NAMES_ADJUSTMENT_MAP.get(token.tagName);
if (adjustedTagName) {
tn = html.getTagID(adjustedTagName);
}
}
//NOTE: check for exit from integration point
if (foreignContent.isIntegrationPoint(tn, previousNs, token.attrs)) {
this._leaveCurrentNamespace();
}
}
else if ((tn === $.SVG && this.namespaceStack[0] === html.NS.SVG) ||
(tn === $.MATH && this.namespaceStack[0] === html.NS.MATHML)) {
this._leaveCurrentNamespace();
}
// NOTE: adjust end tag name as well for consistency
if (this.namespaceStack[0] === html.NS.SVG) {
foreignContent.adjustTokenSVGTagName(token);
}
this.handler.onEndTag(token);
}
}
//# sourceMappingURL=parser-feedback-simulator.js.map