Linux websever 5.15.0-153-generic #163-Ubuntu SMP Thu Aug 7 16:37:18 UTC 2025 x86_64
Apache/2.4.52 (Ubuntu)
: 192.168.3.70 | : 192.168.1.99
Cant Read [ /etc/named.conf ]
8.1.2-1ubuntu2.23
urlab
www.github.com/MadExploits
Terminal
AUTO ROOT
Adminer
Backdoor Destroyer
Linux Exploit
Lock Shell
Lock File
Create User
CREATE RDP
PHP Mailer
BACKCONNECT
UNLOCK SHELL
HASH IDENTIFIER
CPANEL RESET
CREATE WP USER
README
+ Create Folder
+ Create File
/
var /
www /
html /
cqt /
node_modules /
yaml /
dist /
parse /
[ HOME SHELL ]
Name
Size
Permission
Action
cst-scalar.d.ts
3.45
KB
-rw-r--r--
cst-scalar.js
9.16
KB
-rw-r--r--
cst-stringify.d.ts
313
B
-rw-r--r--
cst-stringify.js
1.76
KB
-rw-r--r--
cst-visit.d.ts
2.21
KB
-rw-r--r--
cst-visit.js
3.89
KB
-rw-r--r--
cst.d.ts
3.39
KB
-rw-r--r--
cst.js
3.17
KB
-rw-r--r--
lexer.d.ts
2.86
KB
-rw-r--r--
lexer.js
23.51
KB
-rw-r--r--
line-counter.d.ts
733
B
-rw-r--r--
line-counter.js
1.41
KB
-rw-r--r--
parser.d.ts
2.49
KB
-rw-r--r--
parser.js
33.85
KB
-rw-r--r--
Delete
Unzip
Zip
${this.title}
Close
Code Editor : parser.d.ts
import { Token } from './cst.js'; /** * A YAML concrete syntax tree (CST) parser * * ```ts * const src: string = ... * for (const token of new Parser().parse(src)) { * // token: Token * } * ``` * * To use the parser with a user-provided lexer: * * ```ts * function* parse(source: string, lexer: Lexer) { * const parser = new Parser() * for (const lexeme of lexer.lex(source)) * yield* parser.next(lexeme) * yield* parser.end() * } * * const src: string = ... * const lexer = new Lexer() * for (const token of parse(src, lexer)) { * // token: Token * } * ``` */ export declare class Parser { private onNewLine?; /** If true, space and sequence indicators count as indentation */ private atNewLine; /** If true, next token is a scalar value */ private atScalar; /** Current indentation level */ private indent; /** Current offset since the start of parsing */ offset: number; /** On the same line with a block map key */ private onKeyLine; /** Top indicates the node that's currently being built */ stack: Token[]; /** The source of the current token, set in parse() */ private source; /** The type of the current token, set in parse() */ private type; /** * @param onNewLine - If defined, called separately with the start position of * each new line (in `parse()`, including the start of input). */ constructor(onNewLine?: (offset: number) => void); /** * Parse `source` as a YAML stream. * If `incomplete`, a part of the last line may be left as a buffer for the next call. * * Errors are not thrown, but yielded as `{ type: 'error', message }` tokens. * * @returns A generator of tokens representing each directive, document, and other structure. */ parse(source: string, incomplete?: boolean): Generator<Token, void, any>; /** * Advance the parser by the `source` of one lexical token. */ next(source: string): Generator<Token, void, any>; private lexer; /** Call at end of input to push out any remaining constructions */ end(): Generator<Token, void, any>; private get sourceToken(); private step; private peek; private pop; private stream; private document; private scalar; private blockScalar; private blockMap; private blockSequence; private flowCollection; private flowScalar; private startBlockValue; private atIndentedComment; private documentEnd; private lineEnd; }
Close