mirror of
https://github.com/nodejs/node.git
synced 2025-05-08 14:36:08 +00:00

Greatly simplify how ESLint and its plugins are installed. PR-URL: https://github.com/nodejs/node/pull/53413 Reviewed-By: Antoine du Hamel <duhamelantoine1995@gmail.com>
212 lines
5.2 KiB
JavaScript
212 lines
5.2 KiB
JavaScript
export default subtokenize
|
||
|
||
import assert from 'assert'
|
||
import codes from '../character/codes.mjs'
|
||
import assign from '../constant/assign.mjs'
|
||
import types from '../constant/types.mjs'
|
||
import chunkedSplice from './chunked-splice.mjs'
|
||
import shallow from './shallow.mjs'
|
||
|
||
function subtokenize(events) {
|
||
var jumps = {}
|
||
var index = -1
|
||
var event
|
||
var lineIndex
|
||
var otherIndex
|
||
var otherEvent
|
||
var parameters
|
||
var subevents
|
||
var more
|
||
|
||
while (++index < events.length) {
|
||
while (index in jumps) {
|
||
index = jumps[index]
|
||
}
|
||
|
||
event = events[index]
|
||
|
||
// Add a hook for the GFM tasklist extension, which needs to know if text
|
||
// is in the first content of a list item.
|
||
if (
|
||
index &&
|
||
event[1].type === types.chunkFlow &&
|
||
events[index - 1][1].type === types.listItemPrefix
|
||
) {
|
||
subevents = event[1]._tokenizer.events
|
||
otherIndex = 0
|
||
|
||
if (
|
||
otherIndex < subevents.length &&
|
||
subevents[otherIndex][1].type === types.lineEndingBlank
|
||
) {
|
||
otherIndex += 2
|
||
}
|
||
|
||
if (
|
||
otherIndex < subevents.length &&
|
||
subevents[otherIndex][1].type === types.content
|
||
) {
|
||
while (++otherIndex < subevents.length) {
|
||
if (subevents[otherIndex][1].type === types.content) {
|
||
break
|
||
}
|
||
|
||
if (subevents[otherIndex][1].type === types.chunkText) {
|
||
subevents[otherIndex][1].isInFirstContentOfListItem = true
|
||
otherIndex++
|
||
}
|
||
}
|
||
}
|
||
}
|
||
|
||
// Enter.
|
||
if (event[0] === 'enter') {
|
||
if (event[1].contentType) {
|
||
assign(jumps, subcontent(events, index))
|
||
index = jumps[index]
|
||
more = true
|
||
}
|
||
}
|
||
// Exit.
|
||
else if (event[1]._container || event[1]._movePreviousLineEndings) {
|
||
otherIndex = index
|
||
lineIndex = undefined
|
||
|
||
while (otherIndex--) {
|
||
otherEvent = events[otherIndex]
|
||
|
||
if (
|
||
otherEvent[1].type === types.lineEnding ||
|
||
otherEvent[1].type === types.lineEndingBlank
|
||
) {
|
||
if (otherEvent[0] === 'enter') {
|
||
if (lineIndex) {
|
||
events[lineIndex][1].type = types.lineEndingBlank
|
||
}
|
||
|
||
otherEvent[1].type = types.lineEnding
|
||
lineIndex = otherIndex
|
||
}
|
||
} else {
|
||
break
|
||
}
|
||
}
|
||
|
||
if (lineIndex) {
|
||
// Fix position.
|
||
event[1].end = shallow(events[lineIndex][1].start)
|
||
|
||
// Switch container exit w/ line endings.
|
||
parameters = events.slice(lineIndex, index)
|
||
parameters.unshift(event)
|
||
chunkedSplice(events, lineIndex, index - lineIndex + 1, parameters)
|
||
}
|
||
}
|
||
}
|
||
|
||
return !more
|
||
}
|
||
|
||
function subcontent(events, eventIndex) {
|
||
var token = events[eventIndex][1]
|
||
var context = events[eventIndex][2]
|
||
var startPosition = eventIndex - 1
|
||
var startPositions = []
|
||
var tokenizer =
|
||
token._tokenizer || context.parser[token.contentType](token.start)
|
||
var childEvents = tokenizer.events
|
||
var jumps = []
|
||
var gaps = {}
|
||
var stream
|
||
var previous
|
||
var index
|
||
var entered
|
||
var end
|
||
var adjust
|
||
|
||
// Loop forward through the linked tokens to pass them in order to the
|
||
// subtokenizer.
|
||
while (token) {
|
||
// Find the position of the event for this token.
|
||
while (events[++startPosition][1] !== token) {
|
||
// Empty.
|
||
}
|
||
|
||
startPositions.push(startPosition)
|
||
|
||
if (!token._tokenizer) {
|
||
stream = context.sliceStream(token)
|
||
|
||
if (!token.next) {
|
||
stream.push(codes.eof)
|
||
}
|
||
|
||
if (previous) {
|
||
tokenizer.defineSkip(token.start)
|
||
}
|
||
|
||
if (token.isInFirstContentOfListItem) {
|
||
tokenizer._gfmTasklistFirstContentOfListItem = true
|
||
}
|
||
|
||
tokenizer.write(stream)
|
||
|
||
if (token.isInFirstContentOfListItem) {
|
||
tokenizer._gfmTasklistFirstContentOfListItem = undefined
|
||
}
|
||
}
|
||
|
||
// Unravel the next token.
|
||
previous = token
|
||
token = token.next
|
||
}
|
||
|
||
// Now, loop back through all events (and linked tokens), to figure out which
|
||
// parts belong where.
|
||
token = previous
|
||
index = childEvents.length
|
||
|
||
while (index--) {
|
||
// Make sure we’ve at least seen something (final eol is part of the last
|
||
// token).
|
||
if (childEvents[index][0] === 'enter') {
|
||
entered = true
|
||
} else if (
|
||
// Find a void token that includes a break.
|
||
entered &&
|
||
childEvents[index][1].type === childEvents[index - 1][1].type &&
|
||
childEvents[index][1].start.line !== childEvents[index][1].end.line
|
||
) {
|
||
add(childEvents.slice(index + 1, end))
|
||
assert(token.previous, 'expected a previous token')
|
||
// Help GC.
|
||
token._tokenizer = token.next = undefined
|
||
token = token.previous
|
||
end = index + 1
|
||
}
|
||
}
|
||
|
||
assert(!token.previous, 'expected no previous token')
|
||
// Help GC.
|
||
tokenizer.events = token._tokenizer = token.next = undefined
|
||
|
||
// Do head:
|
||
add(childEvents.slice(0, end))
|
||
|
||
index = -1
|
||
adjust = 0
|
||
|
||
while (++index < jumps.length) {
|
||
gaps[adjust + jumps[index][0]] = adjust + jumps[index][1]
|
||
adjust += jumps[index][1] - jumps[index][0] - 1
|
||
}
|
||
|
||
return gaps
|
||
|
||
function add(slice) {
|
||
var start = startPositions.pop()
|
||
jumps.unshift([start, start + slice.length - 1])
|
||
chunkedSplice(events, start, 2, slice)
|
||
}
|
||
}
|