🎉 initiate project *astro_rewrite*
This commit is contained in:
		
							parent
							
								
									ffd4d5e86c
								
							
						
					
					
						commit
						2ba37bfbe3
					
				
					 8658 changed files with 2268794 additions and 2538 deletions
				
			
		
							
								
								
									
										7
									
								
								node_modules/micromark/dev/lib/initialize/content.d.ts
									
										
									
										generated
									
									
										vendored
									
									
										Normal file
									
								
							
							
						
						
									
										7
									
								
								node_modules/micromark/dev/lib/initialize/content.d.ts
									
										
									
										generated
									
									
										vendored
									
									
										Normal file
									
								
							|  | @ -0,0 +1,7 @@ | |||
| /** @type {InitialConstruct} */ | ||||
| export const content: InitialConstruct | ||||
| export type InitialConstruct = import('micromark-util-types').InitialConstruct | ||||
| export type Initializer = import('micromark-util-types').Initializer | ||||
| export type State = import('micromark-util-types').State | ||||
| export type Token = import('micromark-util-types').Token | ||||
| export type TokenizeContext = import('micromark-util-types').TokenizeContext | ||||
							
								
								
									
										97
									
								
								node_modules/micromark/dev/lib/initialize/content.js
									
										
									
										generated
									
									
										vendored
									
									
										Normal file
									
								
							
							
						
						
									
										97
									
								
								node_modules/micromark/dev/lib/initialize/content.js
									
										
									
										generated
									
									
										vendored
									
									
										Normal file
									
								
							|  | @ -0,0 +1,97 @@ | |||
| /** | ||||
|  * @typedef {import('micromark-util-types').InitialConstruct} InitialConstruct | ||||
|  * @typedef {import('micromark-util-types').Initializer} Initializer | ||||
|  * @typedef {import('micromark-util-types').State} State | ||||
|  * @typedef {import('micromark-util-types').Token} Token | ||||
|  * @typedef {import('micromark-util-types').TokenizeContext} TokenizeContext | ||||
|  */ | ||||
| 
 | ||||
| import {factorySpace} from 'micromark-factory-space' | ||||
| import {markdownLineEnding} from 'micromark-util-character' | ||||
| import {codes} from 'micromark-util-symbol/codes.js' | ||||
| import {constants} from 'micromark-util-symbol/constants.js' | ||||
| import {types} from 'micromark-util-symbol/types.js' | ||||
| import {ok as assert} from 'uvu/assert' | ||||
| 
 | ||||
| /** @type {InitialConstruct} */ | ||||
| export const content = {tokenize: initializeContent} | ||||
| 
 | ||||
| /** | ||||
|  * @this {TokenizeContext} | ||||
|  * @type {Initializer} | ||||
|  */ | ||||
| function initializeContent(effects) { | ||||
|   const contentStart = effects.attempt( | ||||
|     this.parser.constructs.contentInitial, | ||||
|     afterContentStartConstruct, | ||||
|     paragraphInitial | ||||
|   ) | ||||
|   /** @type {Token} */ | ||||
|   let previous | ||||
| 
 | ||||
|   return contentStart | ||||
| 
 | ||||
|   /** @type {State} */ | ||||
|   function afterContentStartConstruct(code) { | ||||
|     assert( | ||||
|       code === codes.eof || markdownLineEnding(code), | ||||
|       'expected eol or eof' | ||||
|     ) | ||||
| 
 | ||||
|     if (code === codes.eof) { | ||||
|       effects.consume(code) | ||||
|       return | ||||
|     } | ||||
| 
 | ||||
|     effects.enter(types.lineEnding) | ||||
|     effects.consume(code) | ||||
|     effects.exit(types.lineEnding) | ||||
|     return factorySpace(effects, contentStart, types.linePrefix) | ||||
|   } | ||||
| 
 | ||||
|   /** @type {State} */ | ||||
|   function paragraphInitial(code) { | ||||
|     assert( | ||||
|       code !== codes.eof && !markdownLineEnding(code), | ||||
|       'expected anything other than a line ending or EOF' | ||||
|     ) | ||||
|     effects.enter(types.paragraph) | ||||
|     return lineStart(code) | ||||
|   } | ||||
| 
 | ||||
|   /** @type {State} */ | ||||
|   function lineStart(code) { | ||||
|     const token = effects.enter(types.chunkText, { | ||||
|       contentType: constants.contentTypeText, | ||||
|       previous | ||||
|     }) | ||||
| 
 | ||||
|     if (previous) { | ||||
|       previous.next = token | ||||
|     } | ||||
| 
 | ||||
|     previous = token | ||||
| 
 | ||||
|     return data(code) | ||||
|   } | ||||
| 
 | ||||
|   /** @type {State} */ | ||||
|   function data(code) { | ||||
|     if (code === codes.eof) { | ||||
|       effects.exit(types.chunkText) | ||||
|       effects.exit(types.paragraph) | ||||
|       effects.consume(code) | ||||
|       return | ||||
|     } | ||||
| 
 | ||||
|     if (markdownLineEnding(code)) { | ||||
|       effects.consume(code) | ||||
|       effects.exit(types.chunkText) | ||||
|       return lineStart | ||||
|     } | ||||
| 
 | ||||
|     // Data.
 | ||||
|     effects.consume(code) | ||||
|     return data | ||||
|   } | ||||
| } | ||||
							
								
								
									
										12
									
								
								node_modules/micromark/dev/lib/initialize/document.d.ts
									
										
									
										generated
									
									
										vendored
									
									
										Normal file
									
								
							
							
						
						
									
										12
									
								
								node_modules/micromark/dev/lib/initialize/document.d.ts
									
										
									
										generated
									
									
										vendored
									
									
										Normal file
									
								
							|  | @ -0,0 +1,12 @@ | |||
| /** @type {InitialConstruct} */ | ||||
| export const document: InitialConstruct | ||||
| export type Construct = import('micromark-util-types').Construct | ||||
| export type ContainerState = import('micromark-util-types').ContainerState | ||||
| export type InitialConstruct = import('micromark-util-types').InitialConstruct | ||||
| export type Initializer = import('micromark-util-types').Initializer | ||||
| export type Point = import('micromark-util-types').Point | ||||
| export type State = import('micromark-util-types').State | ||||
| export type Token = import('micromark-util-types').Token | ||||
| export type TokenizeContext = import('micromark-util-types').TokenizeContext | ||||
| export type Tokenizer = import('micromark-util-types').Tokenizer | ||||
| export type StackItem = [Construct, ContainerState] | ||||
							
								
								
									
										435
									
								
								node_modules/micromark/dev/lib/initialize/document.js
									
										
									
										generated
									
									
										vendored
									
									
										Normal file
									
								
							
							
						
						
									
										435
									
								
								node_modules/micromark/dev/lib/initialize/document.js
									
										
									
										generated
									
									
										vendored
									
									
										Normal file
									
								
							|  | @ -0,0 +1,435 @@ | |||
| /** | ||||
|  * @typedef {import('micromark-util-types').Construct} Construct | ||||
|  * @typedef {import('micromark-util-types').ContainerState} ContainerState | ||||
|  * @typedef {import('micromark-util-types').InitialConstruct} InitialConstruct | ||||
|  * @typedef {import('micromark-util-types').Initializer} Initializer | ||||
|  * @typedef {import('micromark-util-types').Point} Point | ||||
|  * @typedef {import('micromark-util-types').State} State | ||||
|  * @typedef {import('micromark-util-types').Token} Token | ||||
|  * @typedef {import('micromark-util-types').TokenizeContext} TokenizeContext | ||||
|  * @typedef {import('micromark-util-types').Tokenizer} Tokenizer | ||||
|  */ | ||||
| 
 | ||||
| /** | ||||
|  * @typedef {[Construct, ContainerState]} StackItem | ||||
|  */ | ||||
| 
 | ||||
| import {factorySpace} from 'micromark-factory-space' | ||||
| import {markdownLineEnding} from 'micromark-util-character' | ||||
| import {splice} from 'micromark-util-chunked' | ||||
| import {codes} from 'micromark-util-symbol/codes.js' | ||||
| import {constants} from 'micromark-util-symbol/constants.js' | ||||
| import {types} from 'micromark-util-symbol/types.js' | ||||
| import {ok as assert} from 'uvu/assert' | ||||
| 
 | ||||
| /** @type {InitialConstruct} */ | ||||
| export const document = {tokenize: initializeDocument} | ||||
| 
 | ||||
| /** @type {Construct} */ | ||||
| const containerConstruct = {tokenize: tokenizeContainer} | ||||
| 
 | ||||
| /** | ||||
|  * @this {TokenizeContext} | ||||
|  * @type {Initializer} | ||||
|  */ | ||||
| function initializeDocument(effects) { | ||||
|   const self = this | ||||
|   /** @type {Array<StackItem>} */ | ||||
|   const stack = [] | ||||
|   let continued = 0 | ||||
|   /** @type {TokenizeContext | undefined} */ | ||||
|   let childFlow | ||||
|   /** @type {Token | undefined} */ | ||||
|   let childToken | ||||
|   /** @type {number} */ | ||||
|   let lineStartOffset | ||||
| 
 | ||||
|   return start | ||||
| 
 | ||||
|   /** @type {State} */ | ||||
|   function start(code) { | ||||
|     // First we iterate through the open blocks, starting with the root
 | ||||
|     // document, and descending through last children down to the last open
 | ||||
|     // block.
 | ||||
|     // Each block imposes a condition that the line must satisfy if the block is
 | ||||
|     // to remain open.
 | ||||
|     // For example, a block quote requires a `>` character.
 | ||||
|     // A paragraph requires a non-blank line.
 | ||||
|     // In this phase we may match all or just some of the open blocks.
 | ||||
|     // But we cannot close unmatched blocks yet, because we may have a lazy
 | ||||
|     // continuation line.
 | ||||
|     if (continued < stack.length) { | ||||
|       const item = stack[continued] | ||||
|       self.containerState = item[1] | ||||
|       assert( | ||||
|         item[0].continuation, | ||||
|         'expected `continuation` to be defined on container construct' | ||||
|       ) | ||||
|       return effects.attempt( | ||||
|         item[0].continuation, | ||||
|         documentContinue, | ||||
|         checkNewContainers | ||||
|       )(code) | ||||
|     } | ||||
| 
 | ||||
|     // Done.
 | ||||
|     return checkNewContainers(code) | ||||
|   } | ||||
| 
 | ||||
|   /** @type {State} */ | ||||
|   function documentContinue(code) { | ||||
|     assert( | ||||
|       self.containerState, | ||||
|       'expected `containerState` to be defined after continuation' | ||||
|     ) | ||||
| 
 | ||||
|     continued++ | ||||
| 
 | ||||
|     // Note: this field is called `_closeFlow` but it also closes containers.
 | ||||
|     // Perhaps a good idea to rename it but it’s already used in the wild by
 | ||||
|     // extensions.
 | ||||
|     if (self.containerState._closeFlow) { | ||||
|       self.containerState._closeFlow = undefined | ||||
| 
 | ||||
|       if (childFlow) { | ||||
|         closeFlow() | ||||
|       } | ||||
| 
 | ||||
|       // Note: this algorithm for moving events around is similar to the
 | ||||
|       // algorithm when dealing with lazy lines in `writeToChild`.
 | ||||
|       const indexBeforeExits = self.events.length | ||||
|       let indexBeforeFlow = indexBeforeExits | ||||
|       /** @type {Point | undefined} */ | ||||
|       let point | ||||
| 
 | ||||
|       // Find the flow chunk.
 | ||||
|       while (indexBeforeFlow--) { | ||||
|         if ( | ||||
|           self.events[indexBeforeFlow][0] === 'exit' && | ||||
|           self.events[indexBeforeFlow][1].type === types.chunkFlow | ||||
|         ) { | ||||
|           point = self.events[indexBeforeFlow][1].end | ||||
|           break | ||||
|         } | ||||
|       } | ||||
| 
 | ||||
|       assert(point, 'could not find previous flow chunk') | ||||
| 
 | ||||
|       exitContainers(continued) | ||||
| 
 | ||||
|       // Fix positions.
 | ||||
|       let index = indexBeforeExits | ||||
| 
 | ||||
|       while (index < self.events.length) { | ||||
|         self.events[index][1].end = Object.assign({}, point) | ||||
|         index++ | ||||
|       } | ||||
| 
 | ||||
|       // Inject the exits earlier (they’re still also at the end).
 | ||||
|       splice( | ||||
|         self.events, | ||||
|         indexBeforeFlow + 1, | ||||
|         0, | ||||
|         self.events.slice(indexBeforeExits) | ||||
|       ) | ||||
| 
 | ||||
|       // Discard the duplicate exits.
 | ||||
|       self.events.length = index | ||||
| 
 | ||||
|       return checkNewContainers(code) | ||||
|     } | ||||
| 
 | ||||
|     return start(code) | ||||
|   } | ||||
| 
 | ||||
|   /** @type {State} */ | ||||
|   function checkNewContainers(code) { | ||||
|     // Next, after consuming the continuation markers for existing blocks, we
 | ||||
|     // look for new block starts (e.g. `>` for a block quote).
 | ||||
|     // If we encounter a new block start, we close any blocks unmatched in
 | ||||
|     // step 1 before creating the new block as a child of the last matched
 | ||||
|     // block.
 | ||||
|     if (continued === stack.length) { | ||||
|       // No need to `check` whether there’s a container, of `exitContainers`
 | ||||
|       // would be moot.
 | ||||
|       // We can instead immediately `attempt` to parse one.
 | ||||
|       if (!childFlow) { | ||||
|         return documentContinued(code) | ||||
|       } | ||||
| 
 | ||||
|       // If we have concrete content, such as block HTML or fenced code,
 | ||||
|       // we can’t have containers “pierce” into them, so we can immediately
 | ||||
|       // start.
 | ||||
|       if (childFlow.currentConstruct && childFlow.currentConstruct.concrete) { | ||||
|         return flowStart(code) | ||||
|       } | ||||
| 
 | ||||
|       // If we do have flow, it could still be a blank line,
 | ||||
|       // but we’d be interrupting it w/ a new container if there’s a current
 | ||||
|       // construct.
 | ||||
|       // To do: next major: remove `_gfmTableDynamicInterruptHack` (no longer
 | ||||
|       // needed in micromark-extension-gfm-table@1.0.6).
 | ||||
|       self.interrupt = Boolean( | ||||
|         childFlow.currentConstruct && !childFlow._gfmTableDynamicInterruptHack | ||||
|       ) | ||||
|     } | ||||
| 
 | ||||
|     // Check if there is a new container.
 | ||||
|     self.containerState = {} | ||||
|     return effects.check( | ||||
|       containerConstruct, | ||||
|       thereIsANewContainer, | ||||
|       thereIsNoNewContainer | ||||
|     )(code) | ||||
|   } | ||||
| 
 | ||||
|   /** @type {State} */ | ||||
|   function thereIsANewContainer(code) { | ||||
|     if (childFlow) closeFlow() | ||||
|     exitContainers(continued) | ||||
|     return documentContinued(code) | ||||
|   } | ||||
| 
 | ||||
|   /** @type {State} */ | ||||
|   function thereIsNoNewContainer(code) { | ||||
|     self.parser.lazy[self.now().line] = continued !== stack.length | ||||
|     lineStartOffset = self.now().offset | ||||
|     return flowStart(code) | ||||
|   } | ||||
| 
 | ||||
|   /** @type {State} */ | ||||
|   function documentContinued(code) { | ||||
|     // Try new containers.
 | ||||
|     self.containerState = {} | ||||
|     return effects.attempt( | ||||
|       containerConstruct, | ||||
|       containerContinue, | ||||
|       flowStart | ||||
|     )(code) | ||||
|   } | ||||
| 
 | ||||
|   /** @type {State} */ | ||||
|   function containerContinue(code) { | ||||
|     assert( | ||||
|       self.currentConstruct, | ||||
|       'expected `currentConstruct` to be defined on tokenizer' | ||||
|     ) | ||||
|     assert( | ||||
|       self.containerState, | ||||
|       'expected `containerState` to be defined on tokenizer' | ||||
|     ) | ||||
|     continued++ | ||||
|     stack.push([self.currentConstruct, self.containerState]) | ||||
|     // Try another.
 | ||||
|     return documentContinued(code) | ||||
|   } | ||||
| 
 | ||||
|   /** @type {State} */ | ||||
|   function flowStart(code) { | ||||
|     if (code === codes.eof) { | ||||
|       if (childFlow) closeFlow() | ||||
|       exitContainers(0) | ||||
|       effects.consume(code) | ||||
|       return | ||||
|     } | ||||
| 
 | ||||
|     childFlow = childFlow || self.parser.flow(self.now()) | ||||
|     effects.enter(types.chunkFlow, { | ||||
|       contentType: constants.contentTypeFlow, | ||||
|       previous: childToken, | ||||
|       _tokenizer: childFlow | ||||
|     }) | ||||
| 
 | ||||
|     return flowContinue(code) | ||||
|   } | ||||
| 
 | ||||
|   /** @type {State} */ | ||||
|   function flowContinue(code) { | ||||
|     if (code === codes.eof) { | ||||
|       writeToChild(effects.exit(types.chunkFlow), true) | ||||
|       exitContainers(0) | ||||
|       effects.consume(code) | ||||
|       return | ||||
|     } | ||||
| 
 | ||||
|     if (markdownLineEnding(code)) { | ||||
|       effects.consume(code) | ||||
|       writeToChild(effects.exit(types.chunkFlow)) | ||||
|       // Get ready for the next line.
 | ||||
|       continued = 0 | ||||
|       self.interrupt = undefined | ||||
|       return start | ||||
|     } | ||||
| 
 | ||||
|     effects.consume(code) | ||||
|     return flowContinue | ||||
|   } | ||||
| 
 | ||||
|   /** | ||||
|    * @param {Token} token | ||||
|    * @param {boolean | undefined} [eof] | ||||
|    * @returns {void} | ||||
|    */ | ||||
|   function writeToChild(token, eof) { | ||||
|     assert(childFlow, 'expected `childFlow` to be defined when continuing') | ||||
|     const stream = self.sliceStream(token) | ||||
|     if (eof) stream.push(null) | ||||
|     token.previous = childToken | ||||
|     if (childToken) childToken.next = token | ||||
|     childToken = token | ||||
|     childFlow.defineSkip(token.start) | ||||
|     childFlow.write(stream) | ||||
| 
 | ||||
|     // Alright, so we just added a lazy line:
 | ||||
|     //
 | ||||
|     // ```markdown
 | ||||
|     // > a
 | ||||
|     // b.
 | ||||
|     //
 | ||||
|     // Or:
 | ||||
|     //
 | ||||
|     // > ~~~c
 | ||||
|     // d
 | ||||
|     //
 | ||||
|     // Or:
 | ||||
|     //
 | ||||
|     // > | e |
 | ||||
|     // f
 | ||||
|     // ```
 | ||||
|     //
 | ||||
|     // The construct in the second example (fenced code) does not accept lazy
 | ||||
|     // lines, so it marked itself as done at the end of its first line, and
 | ||||
|     // then the content construct parses `d`.
 | ||||
|     // Most constructs in markdown match on the first line: if the first line
 | ||||
|     // forms a construct, a non-lazy line can’t “unmake” it.
 | ||||
|     //
 | ||||
|     // The construct in the third example is potentially a GFM table, and
 | ||||
|     // those are *weird*.
 | ||||
|     // It *could* be a table, from the first line, if the following line
 | ||||
|     // matches a condition.
 | ||||
|     // In this case, that second line is lazy, which “unmakes” the first line
 | ||||
|     // and turns the whole into one content block.
 | ||||
|     //
 | ||||
|     // We’ve now parsed the non-lazy and the lazy line, and can figure out
 | ||||
|     // whether the lazy line started a new flow block.
 | ||||
|     // If it did, we exit the current containers between the two flow blocks.
 | ||||
|     if (self.parser.lazy[token.start.line]) { | ||||
|       let index = childFlow.events.length | ||||
| 
 | ||||
|       while (index--) { | ||||
|         if ( | ||||
|           // The token starts before the line ending…
 | ||||
|           childFlow.events[index][1].start.offset < lineStartOffset && | ||||
|           // …and either is not ended yet…
 | ||||
|           (!childFlow.events[index][1].end || | ||||
|             // …or ends after it.
 | ||||
|             childFlow.events[index][1].end.offset > lineStartOffset) | ||||
|         ) { | ||||
|           // Exit: there’s still something open, which means it’s a lazy line
 | ||||
|           // part of something.
 | ||||
|           return | ||||
|         } | ||||
|       } | ||||
| 
 | ||||
|       // Note: this algorithm for moving events around is similar to the
 | ||||
|       // algorithm when closing flow in `documentContinue`.
 | ||||
|       const indexBeforeExits = self.events.length | ||||
|       let indexBeforeFlow = indexBeforeExits | ||||
|       /** @type {boolean | undefined} */ | ||||
|       let seen | ||||
|       /** @type {Point | undefined} */ | ||||
|       let point | ||||
| 
 | ||||
|       // Find the previous chunk (the one before the lazy line).
 | ||||
|       while (indexBeforeFlow--) { | ||||
|         if ( | ||||
|           self.events[indexBeforeFlow][0] === 'exit' && | ||||
|           self.events[indexBeforeFlow][1].type === types.chunkFlow | ||||
|         ) { | ||||
|           if (seen) { | ||||
|             point = self.events[indexBeforeFlow][1].end | ||||
|             break | ||||
|           } | ||||
| 
 | ||||
|           seen = true | ||||
|         } | ||||
|       } | ||||
| 
 | ||||
|       assert(point, 'could not find previous flow chunk') | ||||
| 
 | ||||
|       exitContainers(continued) | ||||
| 
 | ||||
|       // Fix positions.
 | ||||
|       index = indexBeforeExits | ||||
| 
 | ||||
|       while (index < self.events.length) { | ||||
|         self.events[index][1].end = Object.assign({}, point) | ||||
|         index++ | ||||
|       } | ||||
| 
 | ||||
|       // Inject the exits earlier (they’re still also at the end).
 | ||||
|       splice( | ||||
|         self.events, | ||||
|         indexBeforeFlow + 1, | ||||
|         0, | ||||
|         self.events.slice(indexBeforeExits) | ||||
|       ) | ||||
| 
 | ||||
|       // Discard the duplicate exits.
 | ||||
|       self.events.length = index | ||||
|     } | ||||
|   } | ||||
| 
 | ||||
|   /** | ||||
|    * @param {number} size | ||||
|    * @returns {void} | ||||
|    */ | ||||
|   function exitContainers(size) { | ||||
|     let index = stack.length | ||||
| 
 | ||||
|     // Exit open containers.
 | ||||
|     while (index-- > size) { | ||||
|       const entry = stack[index] | ||||
|       self.containerState = entry[1] | ||||
|       assert( | ||||
|         entry[0].exit, | ||||
|         'expected `exit` to be defined on container construct' | ||||
|       ) | ||||
|       entry[0].exit.call(self, effects) | ||||
|     } | ||||
| 
 | ||||
|     stack.length = size | ||||
|   } | ||||
| 
 | ||||
|   function closeFlow() { | ||||
|     assert( | ||||
|       self.containerState, | ||||
|       'expected `containerState` to be defined when closing flow' | ||||
|     ) | ||||
|     assert(childFlow, 'expected `childFlow` to be defined when closing it') | ||||
|     childFlow.write([codes.eof]) | ||||
|     childToken = undefined | ||||
|     childFlow = undefined | ||||
|     self.containerState._closeFlow = undefined | ||||
|   } | ||||
| } | ||||
| 
 | ||||
| /** | ||||
|  * @this {TokenizeContext} | ||||
|  * @type {Tokenizer} | ||||
|  */ | ||||
| function tokenizeContainer(effects, ok, nok) { | ||||
|   // Always populated by defaults.
 | ||||
|   assert( | ||||
|     this.parser.constructs.disable.null, | ||||
|     'expected `disable.null` to be populated' | ||||
|   ) | ||||
|   return factorySpace( | ||||
|     effects, | ||||
|     effects.attempt(this.parser.constructs.document, ok, nok), | ||||
|     types.linePrefix, | ||||
|     this.parser.constructs.disable.null.includes('codeIndented') | ||||
|       ? undefined | ||||
|       : constants.tabSize | ||||
|   ) | ||||
| } | ||||
							
								
								
									
										6
									
								
								node_modules/micromark/dev/lib/initialize/flow.d.ts
									
										
									
										generated
									
									
										vendored
									
									
										Normal file
									
								
							
							
						
						
									
										6
									
								
								node_modules/micromark/dev/lib/initialize/flow.d.ts
									
										
									
										generated
									
									
										vendored
									
									
										Normal file
									
								
							|  | @ -0,0 +1,6 @@ | |||
| /** @type {InitialConstruct} */ | ||||
| export const flow: InitialConstruct | ||||
| export type InitialConstruct = import('micromark-util-types').InitialConstruct | ||||
| export type Initializer = import('micromark-util-types').Initializer | ||||
| export type State = import('micromark-util-types').State | ||||
| export type TokenizeContext = import('micromark-util-types').TokenizeContext | ||||
							
								
								
									
										83
									
								
								node_modules/micromark/dev/lib/initialize/flow.js
									
										
									
										generated
									
									
										vendored
									
									
										Normal file
									
								
							
							
						
						
									
										83
									
								
								node_modules/micromark/dev/lib/initialize/flow.js
									
										
									
										generated
									
									
										vendored
									
									
										Normal file
									
								
							|  | @ -0,0 +1,83 @@ | |||
| /** | ||||
|  * @typedef {import('micromark-util-types').InitialConstruct} InitialConstruct | ||||
|  * @typedef {import('micromark-util-types').Initializer} Initializer | ||||
|  * @typedef {import('micromark-util-types').State} State | ||||
|  * @typedef {import('micromark-util-types').TokenizeContext} TokenizeContext | ||||
|  */ | ||||
| 
 | ||||
| import {blankLine, content} from 'micromark-core-commonmark' | ||||
| import {factorySpace} from 'micromark-factory-space' | ||||
| import {markdownLineEnding} from 'micromark-util-character' | ||||
| import {codes} from 'micromark-util-symbol/codes.js' | ||||
| import {types} from 'micromark-util-symbol/types.js' | ||||
| import {ok as assert} from 'uvu/assert' | ||||
| 
 | ||||
| /** @type {InitialConstruct} */ | ||||
| export const flow = {tokenize: initializeFlow} | ||||
| 
 | ||||
| /** | ||||
|  * @this {TokenizeContext} | ||||
|  * @type {Initializer} | ||||
|  */ | ||||
| function initializeFlow(effects) { | ||||
|   const self = this | ||||
|   const initial = effects.attempt( | ||||
|     // Try to parse a blank line.
 | ||||
|     blankLine, | ||||
|     atBlankEnding, | ||||
|     // Try to parse initial flow (essentially, only code).
 | ||||
|     effects.attempt( | ||||
|       this.parser.constructs.flowInitial, | ||||
|       afterConstruct, | ||||
|       factorySpace( | ||||
|         effects, | ||||
|         effects.attempt( | ||||
|           this.parser.constructs.flow, | ||||
|           afterConstruct, | ||||
|           effects.attempt(content, afterConstruct) | ||||
|         ), | ||||
|         types.linePrefix | ||||
|       ) | ||||
|     ) | ||||
|   ) | ||||
| 
 | ||||
|   return initial | ||||
| 
 | ||||
|   /** @type {State} */ | ||||
|   function atBlankEnding(code) { | ||||
|     assert( | ||||
|       code === codes.eof || markdownLineEnding(code), | ||||
|       'expected eol or eof' | ||||
|     ) | ||||
| 
 | ||||
|     if (code === codes.eof) { | ||||
|       effects.consume(code) | ||||
|       return | ||||
|     } | ||||
| 
 | ||||
|     effects.enter(types.lineEndingBlank) | ||||
|     effects.consume(code) | ||||
|     effects.exit(types.lineEndingBlank) | ||||
|     self.currentConstruct = undefined | ||||
|     return initial | ||||
|   } | ||||
| 
 | ||||
|   /** @type {State} */ | ||||
|   function afterConstruct(code) { | ||||
|     assert( | ||||
|       code === codes.eof || markdownLineEnding(code), | ||||
|       'expected eol or eof' | ||||
|     ) | ||||
| 
 | ||||
|     if (code === codes.eof) { | ||||
|       effects.consume(code) | ||||
|       return | ||||
|     } | ||||
| 
 | ||||
|     effects.enter(types.lineEnding) | ||||
|     effects.consume(code) | ||||
|     effects.exit(types.lineEnding) | ||||
|     self.currentConstruct = undefined | ||||
|     return initial | ||||
|   } | ||||
| } | ||||
							
								
								
									
										11
									
								
								node_modules/micromark/dev/lib/initialize/text.d.ts
									
										
									
										generated
									
									
										vendored
									
									
										Normal file
									
								
							
							
						
						
									
										11
									
								
								node_modules/micromark/dev/lib/initialize/text.d.ts
									
										
									
										generated
									
									
										vendored
									
									
										Normal file
									
								
							|  | @ -0,0 +1,11 @@ | |||
| export namespace resolver { | ||||
|   const resolveAll: import('micromark-util-types').Resolver | ||||
| } | ||||
| export const string: import('micromark-util-types').InitialConstruct | ||||
| export const text: import('micromark-util-types').InitialConstruct | ||||
| export type Code = import('micromark-util-types').Code | ||||
| export type InitialConstruct = import('micromark-util-types').InitialConstruct | ||||
| export type Initializer = import('micromark-util-types').Initializer | ||||
| export type Resolver = import('micromark-util-types').Resolver | ||||
| export type State = import('micromark-util-types').State | ||||
| export type TokenizeContext = import('micromark-util-types').TokenizeContext | ||||
							
								
								
									
										232
									
								
								node_modules/micromark/dev/lib/initialize/text.js
									
										
									
										generated
									
									
										vendored
									
									
										Normal file
									
								
							
							
						
						
									
										232
									
								
								node_modules/micromark/dev/lib/initialize/text.js
									
										
									
										generated
									
									
										vendored
									
									
										Normal file
									
								
							|  | @ -0,0 +1,232 @@ | |||
| /** | ||||
|  * @typedef {import('micromark-util-types').Code} Code | ||||
|  * @typedef {import('micromark-util-types').InitialConstruct} InitialConstruct | ||||
|  * @typedef {import('micromark-util-types').Initializer} Initializer | ||||
|  * @typedef {import('micromark-util-types').Resolver} Resolver | ||||
|  * @typedef {import('micromark-util-types').State} State | ||||
|  * @typedef {import('micromark-util-types').TokenizeContext} TokenizeContext | ||||
|  */ | ||||
| 
 | ||||
| import {codes} from 'micromark-util-symbol/codes.js' | ||||
| import {constants} from 'micromark-util-symbol/constants.js' | ||||
| import {types} from 'micromark-util-symbol/types.js' | ||||
| import {ok as assert} from 'uvu/assert' | ||||
| 
 | ||||
| export const resolver = {resolveAll: createResolver()} | ||||
| export const string = initializeFactory('string') | ||||
| export const text = initializeFactory('text') | ||||
| 
 | ||||
| /** | ||||
|  * @param {'string' | 'text'} field | ||||
|  * @returns {InitialConstruct} | ||||
|  */ | ||||
| function initializeFactory(field) { | ||||
|   return { | ||||
|     tokenize: initializeText, | ||||
|     resolveAll: createResolver( | ||||
|       field === 'text' ? resolveAllLineSuffixes : undefined | ||||
|     ) | ||||
|   } | ||||
| 
 | ||||
|   /** | ||||
|    * @this {TokenizeContext} | ||||
|    * @type {Initializer} | ||||
|    */ | ||||
|   function initializeText(effects) { | ||||
|     const self = this | ||||
|     const constructs = this.parser.constructs[field] | ||||
|     const text = effects.attempt(constructs, start, notText) | ||||
| 
 | ||||
|     return start | ||||
| 
 | ||||
|     /** @type {State} */ | ||||
|     function start(code) { | ||||
|       return atBreak(code) ? text(code) : notText(code) | ||||
|     } | ||||
| 
 | ||||
|     /** @type {State} */ | ||||
|     function notText(code) { | ||||
|       if (code === codes.eof) { | ||||
|         effects.consume(code) | ||||
|         return | ||||
|       } | ||||
| 
 | ||||
|       effects.enter(types.data) | ||||
|       effects.consume(code) | ||||
|       return data | ||||
|     } | ||||
| 
 | ||||
|     /** @type {State} */ | ||||
|     function data(code) { | ||||
|       if (atBreak(code)) { | ||||
|         effects.exit(types.data) | ||||
|         return text(code) | ||||
|       } | ||||
| 
 | ||||
|       // Data.
 | ||||
|       effects.consume(code) | ||||
|       return data | ||||
|     } | ||||
| 
 | ||||
|     /** | ||||
|      * @param {Code} code | ||||
|      * @returns {boolean} | ||||
|      */ | ||||
|     function atBreak(code) { | ||||
|       if (code === codes.eof) { | ||||
|         return true | ||||
|       } | ||||
| 
 | ||||
|       const list = constructs[code] | ||||
|       let index = -1 | ||||
| 
 | ||||
|       if (list) { | ||||
|         // Always populated by defaults.
 | ||||
|         assert(Array.isArray(list), 'expected `disable.null` to be populated') | ||||
| 
 | ||||
|         while (++index < list.length) { | ||||
|           const item = list[index] | ||||
|           if (!item.previous || item.previous.call(self, self.previous)) { | ||||
|             return true | ||||
|           } | ||||
|         } | ||||
|       } | ||||
| 
 | ||||
|       return false | ||||
|     } | ||||
|   } | ||||
| } | ||||
| 
 | ||||
| /** | ||||
|  * @param {Resolver | undefined} [extraResolver] | ||||
|  * @returns {Resolver} | ||||
|  */ | ||||
| function createResolver(extraResolver) { | ||||
|   return resolveAllText | ||||
| 
 | ||||
|   /** @type {Resolver} */ | ||||
|   function resolveAllText(events, context) { | ||||
|     let index = -1 | ||||
|     /** @type {number | undefined} */ | ||||
|     let enter | ||||
| 
 | ||||
|     // A rather boring computation (to merge adjacent `data` events) which
 | ||||
|     // improves mm performance by 29%.
 | ||||
|     while (++index <= events.length) { | ||||
|       if (enter === undefined) { | ||||
|         if (events[index] && events[index][1].type === types.data) { | ||||
|           enter = index | ||||
|           index++ | ||||
|         } | ||||
|       } else if (!events[index] || events[index][1].type !== types.data) { | ||||
|         // Don’t do anything if there is one data token.
 | ||||
|         if (index !== enter + 2) { | ||||
|           events[enter][1].end = events[index - 1][1].end | ||||
|           events.splice(enter + 2, index - enter - 2) | ||||
|           index = enter + 2 | ||||
|         } | ||||
| 
 | ||||
|         enter = undefined | ||||
|       } | ||||
|     } | ||||
| 
 | ||||
|     return extraResolver ? extraResolver(events, context) : events | ||||
|   } | ||||
| } | ||||
| 
 | ||||
| /** | ||||
|  * A rather ugly set of instructions which again looks at chunks in the input | ||||
|  * stream. | ||||
|  * The reason to do this here is that it is *much* faster to parse in reverse. | ||||
|  * And that we can’t hook into `null` to split the line suffix before an EOF. | ||||
|  * To do: figure out if we can make this into a clean utility, or even in core. | ||||
|  * As it will be useful for GFMs literal autolink extension (and maybe even | ||||
|  * tables?) | ||||
|  * | ||||
|  * @type {Resolver} | ||||
|  */ | ||||
| function resolveAllLineSuffixes(events, context) { | ||||
|   let eventIndex = 0 // Skip first.
 | ||||
| 
 | ||||
|   while (++eventIndex <= events.length) { | ||||
|     if ( | ||||
|       (eventIndex === events.length || | ||||
|         events[eventIndex][1].type === types.lineEnding) && | ||||
|       events[eventIndex - 1][1].type === types.data | ||||
|     ) { | ||||
|       const data = events[eventIndex - 1][1] | ||||
|       const chunks = context.sliceStream(data) | ||||
|       let index = chunks.length | ||||
|       let bufferIndex = -1 | ||||
|       let size = 0 | ||||
|       /** @type {boolean | undefined} */ | ||||
|       let tabs | ||||
| 
 | ||||
|       while (index--) { | ||||
|         const chunk = chunks[index] | ||||
| 
 | ||||
|         if (typeof chunk === 'string') { | ||||
|           bufferIndex = chunk.length | ||||
| 
 | ||||
|           while (chunk.charCodeAt(bufferIndex - 1) === codes.space) { | ||||
|             size++ | ||||
|             bufferIndex-- | ||||
|           } | ||||
| 
 | ||||
|           if (bufferIndex) break | ||||
|           bufferIndex = -1 | ||||
|         } | ||||
|         // Number
 | ||||
|         else if (chunk === codes.horizontalTab) { | ||||
|           tabs = true | ||||
|           size++ | ||||
|         } else if (chunk === codes.virtualSpace) { | ||||
|           // Empty
 | ||||
|         } else { | ||||
|           // Replacement character, exit.
 | ||||
|           index++ | ||||
|           break | ||||
|         } | ||||
|       } | ||||
| 
 | ||||
|       if (size) { | ||||
|         const token = { | ||||
|           type: | ||||
|             eventIndex === events.length || | ||||
|             tabs || | ||||
|             size < constants.hardBreakPrefixSizeMin | ||||
|               ? types.lineSuffix | ||||
|               : types.hardBreakTrailing, | ||||
|           start: { | ||||
|             line: data.end.line, | ||||
|             column: data.end.column - size, | ||||
|             offset: data.end.offset - size, | ||||
|             _index: data.start._index + index, | ||||
|             _bufferIndex: index | ||||
|               ? bufferIndex | ||||
|               : data.start._bufferIndex + bufferIndex | ||||
|           }, | ||||
|           end: Object.assign({}, data.end) | ||||
|         } | ||||
| 
 | ||||
|         data.end = Object.assign({}, token.start) | ||||
| 
 | ||||
|         if (data.start.offset === data.end.offset) { | ||||
|           Object.assign(data, token) | ||||
|         } else { | ||||
|           events.splice( | ||||
|             eventIndex, | ||||
|             0, | ||||
|             ['enter', token, context], | ||||
|             ['exit', token, context] | ||||
|           ) | ||||
|           eventIndex += 2 | ||||
|         } | ||||
|       } | ||||
| 
 | ||||
|       eventIndex++ | ||||
|     } | ||||
|   } | ||||
| 
 | ||||
|   return events | ||||
| } | ||||
		Loading…
	
	Add table
		Add a link
		
	
		Reference in a new issue
	
	 sindrekjelsrud
						sindrekjelsrud